1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
30 #include "hard-reg-set.h"
33 #include "insn-config.h"
34 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
39 #include "typeclass.h"
45 #ifndef ACCUMULATE_OUTGOING_ARGS
46 #define ACCUMULATE_OUTGOING_ARGS 0
49 /* Supply a default definition for PUSH_ARGS. */
52 #define PUSH_ARGS !ACCUMULATE_OUTGOING_ARGS
58 /* Decide whether a function's arguments should be processed
59 from first to last or from last to first.
61 They should if the stack and args grow in opposite directions, but
62 only if we have push insns. */
66 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
67 #define PUSH_ARGS_REVERSED /* If it's last to first. */
72 #ifndef STACK_PUSH_CODE
73 #ifdef STACK_GROWS_DOWNWARD
74 #define STACK_PUSH_CODE PRE_DEC
76 #define STACK_PUSH_CODE PRE_INC
80 /* Assume that case vectors are not pc-relative. */
81 #ifndef CASE_VECTOR_PC_RELATIVE
82 #define CASE_VECTOR_PC_RELATIVE 0
85 /* Hook called by safe_from_p for language-specific tree codes. It is
86 up to the language front-end to install a hook if it has any such
87 codes that safe_from_p needs to know about. Since same_from_p will
88 recursively explore the TREE_OPERANDs of an expression, this hook
89 should not reexamine those pieces. This routine may recursively
90 call safe_from_p; it should always pass `0' as the TOP_P
92 int (*lang_safe_from_p
) PARAMS ((rtx
, tree
));
94 /* If this is nonzero, we do not bother generating VOLATILE
95 around volatile memory references, and we are willing to
96 output indirect addresses. If cse is to follow, we reject
97 indirect addresses so a useful potential cse is generated;
98 if it is used only once, instruction combination will produce
99 the same indirect address eventually. */
100 int cse_not_expected
;
102 /* Don't check memory usage, since code is being emitted to check a memory
103 usage. Used when current_function_check_memory_usage is true, to avoid
104 infinite recursion. */
105 static int in_check_memory_usage
;
107 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
108 static tree placeholder_list
= 0;
110 /* This structure is used by move_by_pieces to describe the move to
112 struct move_by_pieces
121 int explicit_inc_from
;
122 unsigned HOST_WIDE_INT len
;
123 HOST_WIDE_INT offset
;
127 /* This structure is used by store_by_pieces to describe the clear to
130 struct store_by_pieces
136 unsigned HOST_WIDE_INT len
;
137 HOST_WIDE_INT offset
;
138 rtx (*constfun
) PARAMS ((PTR
, HOST_WIDE_INT
, enum machine_mode
));
143 extern struct obstack permanent_obstack
;
145 static rtx get_push_address
PARAMS ((int));
147 static rtx enqueue_insn
PARAMS ((rtx
, rtx
));
148 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
149 PARAMS ((unsigned HOST_WIDE_INT
,
151 static void move_by_pieces_1
PARAMS ((rtx (*) (rtx
, ...), enum machine_mode
,
152 struct move_by_pieces
*));
153 static rtx clear_by_pieces_1
PARAMS ((PTR
, HOST_WIDE_INT
,
155 static void clear_by_pieces
PARAMS ((rtx
, unsigned HOST_WIDE_INT
,
157 static void store_by_pieces_1
PARAMS ((struct store_by_pieces
*,
159 static void store_by_pieces_2
PARAMS ((rtx (*) (rtx
, ...),
161 struct store_by_pieces
*));
162 static rtx get_subtarget
PARAMS ((rtx
));
163 static int is_zeros_p
PARAMS ((tree
));
164 static int mostly_zeros_p
PARAMS ((tree
));
165 static void store_constructor_field
PARAMS ((rtx
, unsigned HOST_WIDE_INT
,
166 HOST_WIDE_INT
, enum machine_mode
,
167 tree
, tree
, unsigned int, int,
169 static void store_constructor
PARAMS ((tree
, rtx
, unsigned int, int,
171 static rtx store_field
PARAMS ((rtx
, HOST_WIDE_INT
,
172 HOST_WIDE_INT
, enum machine_mode
,
173 tree
, enum machine_mode
, int,
174 unsigned int, HOST_WIDE_INT
, int));
175 static enum memory_use_mode
176 get_memory_usage_from_modifier
PARAMS ((enum expand_modifier
));
177 static tree save_noncopied_parts
PARAMS ((tree
, tree
));
178 static tree init_noncopied_parts
PARAMS ((tree
, tree
));
179 static int fixed_type_p
PARAMS ((tree
));
180 static rtx var_rtx
PARAMS ((tree
));
181 static rtx expand_expr_unaligned
PARAMS ((tree
, unsigned int *));
182 static rtx expand_increment
PARAMS ((tree
, int, int));
183 static void do_jump_by_parts_greater
PARAMS ((tree
, int, rtx
, rtx
));
184 static void do_jump_by_parts_equality
PARAMS ((tree
, rtx
, rtx
));
185 static void do_compare_and_jump
PARAMS ((tree
, enum rtx_code
, enum rtx_code
,
187 static rtx do_store_flag
PARAMS ((tree
, rtx
, enum machine_mode
, int));
189 /* Record for each mode whether we can move a register directly to or
190 from an object of that mode in memory. If we can't, we won't try
191 to use that mode directly when accessing a field of that mode. */
193 static char direct_load
[NUM_MACHINE_MODES
];
194 static char direct_store
[NUM_MACHINE_MODES
];
196 /* If a memory-to-memory move would take MOVE_RATIO or more simple
197 move-instruction sequences, we will do a movstr or libcall instead. */
200 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
203 /* If we are optimizing for space (-Os), cut down the default move ratio. */
204 #define MOVE_RATIO (optimize_size ? 3 : 15)
208 /* This macro is used to determine whether move_by_pieces should be called
209 to perform a structure copy. */
210 #ifndef MOVE_BY_PIECES_P
211 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
212 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
215 /* This array records the insn_code of insns to perform block moves. */
216 enum insn_code movstr_optab
[NUM_MACHINE_MODES
];
218 /* This array records the insn_code of insns to perform block clears. */
219 enum insn_code clrstr_optab
[NUM_MACHINE_MODES
];
221 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
223 #ifndef SLOW_UNALIGNED_ACCESS
224 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
227 /* This is run once per compilation to set up which modes can be used
228 directly in memory and to initialize the block move optab. */
234 enum machine_mode mode
;
240 /* Try indexing by frame ptr and try by stack ptr.
241 It is known that on the Convex the stack ptr isn't a valid index.
242 With luck, one or the other is valid on any machine. */
243 mem
= gen_rtx_MEM (VOIDmode
, stack_pointer_rtx
);
244 mem1
= gen_rtx_MEM (VOIDmode
, frame_pointer_rtx
);
246 insn
= emit_insn (gen_rtx_SET (0, NULL_RTX
, NULL_RTX
));
247 pat
= PATTERN (insn
);
249 for (mode
= VOIDmode
; (int) mode
< NUM_MACHINE_MODES
;
250 mode
= (enum machine_mode
) ((int) mode
+ 1))
255 direct_load
[(int) mode
] = direct_store
[(int) mode
] = 0;
256 PUT_MODE (mem
, mode
);
257 PUT_MODE (mem1
, mode
);
259 /* See if there is some register that can be used in this mode and
260 directly loaded or stored from memory. */
262 if (mode
!= VOIDmode
&& mode
!= BLKmode
)
263 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
264 && (direct_load
[(int) mode
] == 0 || direct_store
[(int) mode
] == 0);
267 if (! HARD_REGNO_MODE_OK (regno
, mode
))
270 reg
= gen_rtx_REG (mode
, regno
);
273 SET_DEST (pat
) = reg
;
274 if (recog (pat
, insn
, &num_clobbers
) >= 0)
275 direct_load
[(int) mode
] = 1;
277 SET_SRC (pat
) = mem1
;
278 SET_DEST (pat
) = reg
;
279 if (recog (pat
, insn
, &num_clobbers
) >= 0)
280 direct_load
[(int) mode
] = 1;
283 SET_DEST (pat
) = mem
;
284 if (recog (pat
, insn
, &num_clobbers
) >= 0)
285 direct_store
[(int) mode
] = 1;
288 SET_DEST (pat
) = mem1
;
289 if (recog (pat
, insn
, &num_clobbers
) >= 0)
290 direct_store
[(int) mode
] = 1;
297 /* This is run at the start of compiling a function. */
302 cfun
->expr
= (struct expr_status
*) xmalloc (sizeof (struct expr_status
));
305 pending_stack_adjust
= 0;
306 stack_pointer_delta
= 0;
307 inhibit_defer_pop
= 0;
309 apply_args_value
= 0;
315 struct expr_status
*p
;
320 ggc_mark_rtx (p
->x_saveregs_value
);
321 ggc_mark_rtx (p
->x_apply_args_value
);
322 ggc_mark_rtx (p
->x_forced_labels
);
333 /* Small sanity check that the queue is empty at the end of a function. */
336 finish_expr_for_function ()
342 /* Manage the queue of increment instructions to be output
343 for POSTINCREMENT_EXPR expressions, etc. */
345 /* Queue up to increment (or change) VAR later. BODY says how:
346 BODY should be the same thing you would pass to emit_insn
347 to increment right away. It will go to emit_insn later on.
349 The value is a QUEUED expression to be used in place of VAR
350 where you want to guarantee the pre-incrementation value of VAR. */
353 enqueue_insn (var
, body
)
356 pending_chain
= gen_rtx_QUEUED (GET_MODE (var
), var
, NULL_RTX
, NULL_RTX
,
357 body
, pending_chain
);
358 return pending_chain
;
361 /* Use protect_from_queue to convert a QUEUED expression
362 into something that you can put immediately into an instruction.
363 If the queued incrementation has not happened yet,
364 protect_from_queue returns the variable itself.
365 If the incrementation has happened, protect_from_queue returns a temp
366 that contains a copy of the old value of the variable.
368 Any time an rtx which might possibly be a QUEUED is to be put
369 into an instruction, it must be passed through protect_from_queue first.
370 QUEUED expressions are not meaningful in instructions.
372 Do not pass a value through protect_from_queue and then hold
373 on to it for a while before putting it in an instruction!
374 If the queue is flushed in between, incorrect code will result. */
377 protect_from_queue (x
, modify
)
381 register RTX_CODE code
= GET_CODE (x
);
383 #if 0 /* A QUEUED can hang around after the queue is forced out. */
384 /* Shortcut for most common case. */
385 if (pending_chain
== 0)
391 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
392 use of autoincrement. Make a copy of the contents of the memory
393 location rather than a copy of the address, but not if the value is
394 of mode BLKmode. Don't modify X in place since it might be
396 if (code
== MEM
&& GET_MODE (x
) != BLKmode
397 && GET_CODE (XEXP (x
, 0)) == QUEUED
&& !modify
)
399 register rtx y
= XEXP (x
, 0);
400 register rtx
new = gen_rtx_MEM (GET_MODE (x
), QUEUED_VAR (y
));
402 MEM_COPY_ATTRIBUTES (new, x
);
406 register rtx temp
= gen_reg_rtx (GET_MODE (new));
407 emit_insn_before (gen_move_insn (temp
, new),
411 /* Copy the address into a pseudo, so that the returned value
412 remains correct across calls to emit_queue. */
413 XEXP (new, 0) = copy_to_reg (XEXP (new, 0));
416 /* Otherwise, recursively protect the subexpressions of all
417 the kinds of rtx's that can contain a QUEUED. */
420 rtx tem
= protect_from_queue (XEXP (x
, 0), 0);
421 if (tem
!= XEXP (x
, 0))
427 else if (code
== PLUS
|| code
== MULT
)
429 rtx new0
= protect_from_queue (XEXP (x
, 0), 0);
430 rtx new1
= protect_from_queue (XEXP (x
, 1), 0);
431 if (new0
!= XEXP (x
, 0) || new1
!= XEXP (x
, 1))
440 /* If the increment has not happened, use the variable itself. Copy it
441 into a new pseudo so that the value remains correct across calls to
443 if (QUEUED_INSN (x
) == 0)
444 return copy_to_reg (QUEUED_VAR (x
));
445 /* If the increment has happened and a pre-increment copy exists,
447 if (QUEUED_COPY (x
) != 0)
448 return QUEUED_COPY (x
);
449 /* The increment has happened but we haven't set up a pre-increment copy.
450 Set one up now, and use it. */
451 QUEUED_COPY (x
) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x
)));
452 emit_insn_before (gen_move_insn (QUEUED_COPY (x
), QUEUED_VAR (x
)),
454 return QUEUED_COPY (x
);
457 /* Return nonzero if X contains a QUEUED expression:
458 if it contains anything that will be altered by a queued increment.
459 We handle only combinations of MEM, PLUS, MINUS and MULT operators
460 since memory addresses generally contain only those. */
466 register enum rtx_code code
= GET_CODE (x
);
472 return queued_subexp_p (XEXP (x
, 0));
476 return (queued_subexp_p (XEXP (x
, 0))
477 || queued_subexp_p (XEXP (x
, 1)));
483 /* Perform all the pending incrementations. */
489 while ((p
= pending_chain
))
491 rtx body
= QUEUED_BODY (p
);
493 if (GET_CODE (body
) == SEQUENCE
)
495 QUEUED_INSN (p
) = XVECEXP (QUEUED_BODY (p
), 0, 0);
496 emit_insn (QUEUED_BODY (p
));
499 QUEUED_INSN (p
) = emit_insn (QUEUED_BODY (p
));
500 pending_chain
= QUEUED_NEXT (p
);
504 /* Copy data from FROM to TO, where the machine modes are not the same.
505 Both modes may be integer, or both may be floating.
506 UNSIGNEDP should be nonzero if FROM is an unsigned type.
507 This causes zero-extension instead of sign-extension. */
510 convert_move (to
, from
, unsignedp
)
511 register rtx to
, from
;
514 enum machine_mode to_mode
= GET_MODE (to
);
515 enum machine_mode from_mode
= GET_MODE (from
);
516 int to_real
= GET_MODE_CLASS (to_mode
) == MODE_FLOAT
;
517 int from_real
= GET_MODE_CLASS (from_mode
) == MODE_FLOAT
;
521 /* rtx code for making an equivalent value. */
522 enum rtx_code equiv_code
= (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
);
524 to
= protect_from_queue (to
, 1);
525 from
= protect_from_queue (from
, 0);
527 if (to_real
!= from_real
)
530 /* If FROM is a SUBREG that indicates that we have already done at least
531 the required extension, strip it. We don't handle such SUBREGs as
534 if (GET_CODE (from
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (from
)
535 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from
)))
536 >= GET_MODE_SIZE (to_mode
))
537 && SUBREG_PROMOTED_UNSIGNED_P (from
) == unsignedp
)
538 from
= gen_lowpart (to_mode
, from
), from_mode
= to_mode
;
540 if (GET_CODE (to
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (to
))
543 if (to_mode
== from_mode
544 || (from_mode
== VOIDmode
&& CONSTANT_P (from
)))
546 emit_move_insn (to
, from
);
550 if (VECTOR_MODE_P (to_mode
) || VECTOR_MODE_P (from_mode
))
552 if (GET_MODE_BITSIZE (from_mode
) != GET_MODE_BITSIZE (to_mode
))
555 if (VECTOR_MODE_P (to_mode
))
556 from
= gen_rtx_SUBREG (to_mode
, from
, 0);
558 to
= gen_rtx_SUBREG (from_mode
, to
, 0);
560 emit_move_insn (to
, from
);
564 if (to_real
!= from_real
)
571 if (GET_MODE_BITSIZE (from_mode
) < GET_MODE_BITSIZE (to_mode
))
573 /* Try converting directly if the insn is supported. */
574 if ((code
= can_extend_p (to_mode
, from_mode
, 0))
577 emit_unop_insn (code
, to
, from
, UNKNOWN
);
582 #ifdef HAVE_trunchfqf2
583 if (HAVE_trunchfqf2
&& from_mode
== HFmode
&& to_mode
== QFmode
)
585 emit_unop_insn (CODE_FOR_trunchfqf2
, to
, from
, UNKNOWN
);
589 #ifdef HAVE_trunctqfqf2
590 if (HAVE_trunctqfqf2
&& from_mode
== TQFmode
&& to_mode
== QFmode
)
592 emit_unop_insn (CODE_FOR_trunctqfqf2
, to
, from
, UNKNOWN
);
596 #ifdef HAVE_truncsfqf2
597 if (HAVE_truncsfqf2
&& from_mode
== SFmode
&& to_mode
== QFmode
)
599 emit_unop_insn (CODE_FOR_truncsfqf2
, to
, from
, UNKNOWN
);
603 #ifdef HAVE_truncdfqf2
604 if (HAVE_truncdfqf2
&& from_mode
== DFmode
&& to_mode
== QFmode
)
606 emit_unop_insn (CODE_FOR_truncdfqf2
, to
, from
, UNKNOWN
);
610 #ifdef HAVE_truncxfqf2
611 if (HAVE_truncxfqf2
&& from_mode
== XFmode
&& to_mode
== QFmode
)
613 emit_unop_insn (CODE_FOR_truncxfqf2
, to
, from
, UNKNOWN
);
617 #ifdef HAVE_trunctfqf2
618 if (HAVE_trunctfqf2
&& from_mode
== TFmode
&& to_mode
== QFmode
)
620 emit_unop_insn (CODE_FOR_trunctfqf2
, to
, from
, UNKNOWN
);
625 #ifdef HAVE_trunctqfhf2
626 if (HAVE_trunctqfhf2
&& from_mode
== TQFmode
&& to_mode
== HFmode
)
628 emit_unop_insn (CODE_FOR_trunctqfhf2
, to
, from
, UNKNOWN
);
632 #ifdef HAVE_truncsfhf2
633 if (HAVE_truncsfhf2
&& from_mode
== SFmode
&& to_mode
== HFmode
)
635 emit_unop_insn (CODE_FOR_truncsfhf2
, to
, from
, UNKNOWN
);
639 #ifdef HAVE_truncdfhf2
640 if (HAVE_truncdfhf2
&& from_mode
== DFmode
&& to_mode
== HFmode
)
642 emit_unop_insn (CODE_FOR_truncdfhf2
, to
, from
, UNKNOWN
);
646 #ifdef HAVE_truncxfhf2
647 if (HAVE_truncxfhf2
&& from_mode
== XFmode
&& to_mode
== HFmode
)
649 emit_unop_insn (CODE_FOR_truncxfhf2
, to
, from
, UNKNOWN
);
653 #ifdef HAVE_trunctfhf2
654 if (HAVE_trunctfhf2
&& from_mode
== TFmode
&& to_mode
== HFmode
)
656 emit_unop_insn (CODE_FOR_trunctfhf2
, to
, from
, UNKNOWN
);
661 #ifdef HAVE_truncsftqf2
662 if (HAVE_truncsftqf2
&& from_mode
== SFmode
&& to_mode
== TQFmode
)
664 emit_unop_insn (CODE_FOR_truncsftqf2
, to
, from
, UNKNOWN
);
668 #ifdef HAVE_truncdftqf2
669 if (HAVE_truncdftqf2
&& from_mode
== DFmode
&& to_mode
== TQFmode
)
671 emit_unop_insn (CODE_FOR_truncdftqf2
, to
, from
, UNKNOWN
);
675 #ifdef HAVE_truncxftqf2
676 if (HAVE_truncxftqf2
&& from_mode
== XFmode
&& to_mode
== TQFmode
)
678 emit_unop_insn (CODE_FOR_truncxftqf2
, to
, from
, UNKNOWN
);
682 #ifdef HAVE_trunctftqf2
683 if (HAVE_trunctftqf2
&& from_mode
== TFmode
&& to_mode
== TQFmode
)
685 emit_unop_insn (CODE_FOR_trunctftqf2
, to
, from
, UNKNOWN
);
690 #ifdef HAVE_truncdfsf2
691 if (HAVE_truncdfsf2
&& from_mode
== DFmode
&& to_mode
== SFmode
)
693 emit_unop_insn (CODE_FOR_truncdfsf2
, to
, from
, UNKNOWN
);
697 #ifdef HAVE_truncxfsf2
698 if (HAVE_truncxfsf2
&& from_mode
== XFmode
&& to_mode
== SFmode
)
700 emit_unop_insn (CODE_FOR_truncxfsf2
, to
, from
, UNKNOWN
);
704 #ifdef HAVE_trunctfsf2
705 if (HAVE_trunctfsf2
&& from_mode
== TFmode
&& to_mode
== SFmode
)
707 emit_unop_insn (CODE_FOR_trunctfsf2
, to
, from
, UNKNOWN
);
711 #ifdef HAVE_truncxfdf2
712 if (HAVE_truncxfdf2
&& from_mode
== XFmode
&& to_mode
== DFmode
)
714 emit_unop_insn (CODE_FOR_truncxfdf2
, to
, from
, UNKNOWN
);
718 #ifdef HAVE_trunctfdf2
719 if (HAVE_trunctfdf2
&& from_mode
== TFmode
&& to_mode
== DFmode
)
721 emit_unop_insn (CODE_FOR_trunctfdf2
, to
, from
, UNKNOWN
);
733 libcall
= extendsfdf2_libfunc
;
737 libcall
= extendsfxf2_libfunc
;
741 libcall
= extendsftf2_libfunc
;
753 libcall
= truncdfsf2_libfunc
;
757 libcall
= extenddfxf2_libfunc
;
761 libcall
= extenddftf2_libfunc
;
773 libcall
= truncxfsf2_libfunc
;
777 libcall
= truncxfdf2_libfunc
;
789 libcall
= trunctfsf2_libfunc
;
793 libcall
= trunctfdf2_libfunc
;
805 if (libcall
== (rtx
) 0)
806 /* This conversion is not implemented yet. */
810 value
= emit_library_call_value (libcall
, NULL_RTX
, LCT_CONST
, to_mode
,
812 insns
= get_insns ();
814 emit_libcall_block (insns
, to
, value
, gen_rtx_FLOAT_TRUNCATE (to_mode
,
819 /* Now both modes are integers. */
821 /* Handle expanding beyond a word. */
822 if (GET_MODE_BITSIZE (from_mode
) < GET_MODE_BITSIZE (to_mode
)
823 && GET_MODE_BITSIZE (to_mode
) > BITS_PER_WORD
)
830 enum machine_mode lowpart_mode
;
831 int nwords
= CEIL (GET_MODE_SIZE (to_mode
), UNITS_PER_WORD
);
833 /* Try converting directly if the insn is supported. */
834 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
837 /* If FROM is a SUBREG, put it into a register. Do this
838 so that we always generate the same set of insns for
839 better cse'ing; if an intermediate assignment occurred,
840 we won't be doing the operation directly on the SUBREG. */
841 if (optimize
> 0 && GET_CODE (from
) == SUBREG
)
842 from
= force_reg (from_mode
, from
);
843 emit_unop_insn (code
, to
, from
, equiv_code
);
846 /* Next, try converting via full word. */
847 else if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
848 && ((code
= can_extend_p (to_mode
, word_mode
, unsignedp
))
849 != CODE_FOR_nothing
))
851 if (GET_CODE (to
) == REG
)
852 emit_insn (gen_rtx_CLOBBER (VOIDmode
, to
));
853 convert_move (gen_lowpart (word_mode
, to
), from
, unsignedp
);
854 emit_unop_insn (code
, to
,
855 gen_lowpart (word_mode
, to
), equiv_code
);
859 /* No special multiword conversion insn; do it by hand. */
862 /* Since we will turn this into a no conflict block, we must ensure
863 that the source does not overlap the target. */
865 if (reg_overlap_mentioned_p (to
, from
))
866 from
= force_reg (from_mode
, from
);
868 /* Get a copy of FROM widened to a word, if necessary. */
869 if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
)
870 lowpart_mode
= word_mode
;
872 lowpart_mode
= from_mode
;
874 lowfrom
= convert_to_mode (lowpart_mode
, from
, unsignedp
);
876 lowpart
= gen_lowpart (lowpart_mode
, to
);
877 emit_move_insn (lowpart
, lowfrom
);
879 /* Compute the value to put in each remaining word. */
881 fill_value
= const0_rtx
;
886 && insn_data
[(int) CODE_FOR_slt
].operand
[0].mode
== word_mode
887 && STORE_FLAG_VALUE
== -1)
889 emit_cmp_insn (lowfrom
, const0_rtx
, NE
, NULL_RTX
,
891 fill_value
= gen_reg_rtx (word_mode
);
892 emit_insn (gen_slt (fill_value
));
898 = expand_shift (RSHIFT_EXPR
, lowpart_mode
, lowfrom
,
899 size_int (GET_MODE_BITSIZE (lowpart_mode
) - 1),
901 fill_value
= convert_to_mode (word_mode
, fill_value
, 1);
905 /* Fill the remaining words. */
906 for (i
= GET_MODE_SIZE (lowpart_mode
) / UNITS_PER_WORD
; i
< nwords
; i
++)
908 int index
= (WORDS_BIG_ENDIAN
? nwords
- i
- 1 : i
);
909 rtx subword
= operand_subword (to
, index
, 1, to_mode
);
914 if (fill_value
!= subword
)
915 emit_move_insn (subword
, fill_value
);
918 insns
= get_insns ();
921 emit_no_conflict_block (insns
, to
, from
, NULL_RTX
,
922 gen_rtx_fmt_e (equiv_code
, to_mode
, copy_rtx (from
)));
926 /* Truncating multi-word to a word or less. */
927 if (GET_MODE_BITSIZE (from_mode
) > BITS_PER_WORD
928 && GET_MODE_BITSIZE (to_mode
) <= BITS_PER_WORD
)
930 if (!((GET_CODE (from
) == MEM
931 && ! MEM_VOLATILE_P (from
)
932 && direct_load
[(int) to_mode
]
933 && ! mode_dependent_address_p (XEXP (from
, 0)))
934 || GET_CODE (from
) == REG
935 || GET_CODE (from
) == SUBREG
))
936 from
= force_reg (from_mode
, from
);
937 convert_move (to
, gen_lowpart (word_mode
, from
), 0);
941 /* Handle pointer conversion. */ /* SPEE 900220. */
942 if (to_mode
== PQImode
)
944 if (from_mode
!= QImode
)
945 from
= convert_to_mode (QImode
, from
, unsignedp
);
947 #ifdef HAVE_truncqipqi2
948 if (HAVE_truncqipqi2
)
950 emit_unop_insn (CODE_FOR_truncqipqi2
, to
, from
, UNKNOWN
);
953 #endif /* HAVE_truncqipqi2 */
957 if (from_mode
== PQImode
)
959 if (to_mode
!= QImode
)
961 from
= convert_to_mode (QImode
, from
, unsignedp
);
966 #ifdef HAVE_extendpqiqi2
967 if (HAVE_extendpqiqi2
)
969 emit_unop_insn (CODE_FOR_extendpqiqi2
, to
, from
, UNKNOWN
);
972 #endif /* HAVE_extendpqiqi2 */
977 if (to_mode
== PSImode
)
979 if (from_mode
!= SImode
)
980 from
= convert_to_mode (SImode
, from
, unsignedp
);
982 #ifdef HAVE_truncsipsi2
983 if (HAVE_truncsipsi2
)
985 emit_unop_insn (CODE_FOR_truncsipsi2
, to
, from
, UNKNOWN
);
988 #endif /* HAVE_truncsipsi2 */
992 if (from_mode
== PSImode
)
994 if (to_mode
!= SImode
)
996 from
= convert_to_mode (SImode
, from
, unsignedp
);
1001 #ifdef HAVE_extendpsisi2
1002 if (! unsignedp
&& HAVE_extendpsisi2
)
1004 emit_unop_insn (CODE_FOR_extendpsisi2
, to
, from
, UNKNOWN
);
1007 #endif /* HAVE_extendpsisi2 */
1008 #ifdef HAVE_zero_extendpsisi2
1009 if (unsignedp
&& HAVE_zero_extendpsisi2
)
1011 emit_unop_insn (CODE_FOR_zero_extendpsisi2
, to
, from
, UNKNOWN
);
1014 #endif /* HAVE_zero_extendpsisi2 */
1019 if (to_mode
== PDImode
)
1021 if (from_mode
!= DImode
)
1022 from
= convert_to_mode (DImode
, from
, unsignedp
);
1024 #ifdef HAVE_truncdipdi2
1025 if (HAVE_truncdipdi2
)
1027 emit_unop_insn (CODE_FOR_truncdipdi2
, to
, from
, UNKNOWN
);
1030 #endif /* HAVE_truncdipdi2 */
1034 if (from_mode
== PDImode
)
1036 if (to_mode
!= DImode
)
1038 from
= convert_to_mode (DImode
, from
, unsignedp
);
1043 #ifdef HAVE_extendpdidi2
1044 if (HAVE_extendpdidi2
)
1046 emit_unop_insn (CODE_FOR_extendpdidi2
, to
, from
, UNKNOWN
);
1049 #endif /* HAVE_extendpdidi2 */
1054 /* Now follow all the conversions between integers
1055 no more than a word long. */
1057 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1058 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
)
1059 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
1060 GET_MODE_BITSIZE (from_mode
)))
1062 if (!((GET_CODE (from
) == MEM
1063 && ! MEM_VOLATILE_P (from
)
1064 && direct_load
[(int) to_mode
]
1065 && ! mode_dependent_address_p (XEXP (from
, 0)))
1066 || GET_CODE (from
) == REG
1067 || GET_CODE (from
) == SUBREG
))
1068 from
= force_reg (from_mode
, from
);
1069 if (GET_CODE (from
) == REG
&& REGNO (from
) < FIRST_PSEUDO_REGISTER
1070 && ! HARD_REGNO_MODE_OK (REGNO (from
), to_mode
))
1071 from
= copy_to_reg (from
);
1072 emit_move_insn (to
, gen_lowpart (to_mode
, from
));
1076 /* Handle extension. */
1077 if (GET_MODE_BITSIZE (to_mode
) > GET_MODE_BITSIZE (from_mode
))
1079 /* Convert directly if that works. */
1080 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
1081 != CODE_FOR_nothing
)
1083 emit_unop_insn (code
, to
, from
, equiv_code
);
1088 enum machine_mode intermediate
;
1092 /* Search for a mode to convert via. */
1093 for (intermediate
= from_mode
; intermediate
!= VOIDmode
;
1094 intermediate
= GET_MODE_WIDER_MODE (intermediate
))
1095 if (((can_extend_p (to_mode
, intermediate
, unsignedp
)
1096 != CODE_FOR_nothing
)
1097 || (GET_MODE_SIZE (to_mode
) < GET_MODE_SIZE (intermediate
)
1098 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
1099 GET_MODE_BITSIZE (intermediate
))))
1100 && (can_extend_p (intermediate
, from_mode
, unsignedp
)
1101 != CODE_FOR_nothing
))
1103 convert_move (to
, convert_to_mode (intermediate
, from
,
1104 unsignedp
), unsignedp
);
1108 /* No suitable intermediate mode.
1109 Generate what we need with shifts. */
1110 shift_amount
= build_int_2 (GET_MODE_BITSIZE (to_mode
)
1111 - GET_MODE_BITSIZE (from_mode
), 0);
1112 from
= gen_lowpart (to_mode
, force_reg (from_mode
, from
));
1113 tmp
= expand_shift (LSHIFT_EXPR
, to_mode
, from
, shift_amount
,
1115 tmp
= expand_shift (RSHIFT_EXPR
, to_mode
, tmp
, shift_amount
,
1118 emit_move_insn (to
, tmp
);
1123 /* Support special truncate insns for certain modes. */
1125 if (from_mode
== DImode
&& to_mode
== SImode
)
1127 #ifdef HAVE_truncdisi2
1128 if (HAVE_truncdisi2
)
1130 emit_unop_insn (CODE_FOR_truncdisi2
, to
, from
, UNKNOWN
);
1134 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1138 if (from_mode
== DImode
&& to_mode
== HImode
)
1140 #ifdef HAVE_truncdihi2
1141 if (HAVE_truncdihi2
)
1143 emit_unop_insn (CODE_FOR_truncdihi2
, to
, from
, UNKNOWN
);
1147 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1151 if (from_mode
== DImode
&& to_mode
== QImode
)
1153 #ifdef HAVE_truncdiqi2
1154 if (HAVE_truncdiqi2
)
1156 emit_unop_insn (CODE_FOR_truncdiqi2
, to
, from
, UNKNOWN
);
1160 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1164 if (from_mode
== SImode
&& to_mode
== HImode
)
1166 #ifdef HAVE_truncsihi2
1167 if (HAVE_truncsihi2
)
1169 emit_unop_insn (CODE_FOR_truncsihi2
, to
, from
, UNKNOWN
);
1173 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1177 if (from_mode
== SImode
&& to_mode
== QImode
)
1179 #ifdef HAVE_truncsiqi2
1180 if (HAVE_truncsiqi2
)
1182 emit_unop_insn (CODE_FOR_truncsiqi2
, to
, from
, UNKNOWN
);
1186 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1190 if (from_mode
== HImode
&& to_mode
== QImode
)
1192 #ifdef HAVE_trunchiqi2
1193 if (HAVE_trunchiqi2
)
1195 emit_unop_insn (CODE_FOR_trunchiqi2
, to
, from
, UNKNOWN
);
1199 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1203 if (from_mode
== TImode
&& to_mode
== DImode
)
1205 #ifdef HAVE_trunctidi2
1206 if (HAVE_trunctidi2
)
1208 emit_unop_insn (CODE_FOR_trunctidi2
, to
, from
, UNKNOWN
);
1212 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1216 if (from_mode
== TImode
&& to_mode
== SImode
)
1218 #ifdef HAVE_trunctisi2
1219 if (HAVE_trunctisi2
)
1221 emit_unop_insn (CODE_FOR_trunctisi2
, to
, from
, UNKNOWN
);
1225 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1229 if (from_mode
== TImode
&& to_mode
== HImode
)
1231 #ifdef HAVE_trunctihi2
1232 if (HAVE_trunctihi2
)
1234 emit_unop_insn (CODE_FOR_trunctihi2
, to
, from
, UNKNOWN
);
1238 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1242 if (from_mode
== TImode
&& to_mode
== QImode
)
1244 #ifdef HAVE_trunctiqi2
1245 if (HAVE_trunctiqi2
)
1247 emit_unop_insn (CODE_FOR_trunctiqi2
, to
, from
, UNKNOWN
);
1251 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1255 /* Handle truncation of volatile memrefs, and so on;
1256 the things that couldn't be truncated directly,
1257 and for which there was no special instruction. */
1258 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
))
1260 rtx temp
= force_reg (to_mode
, gen_lowpart (to_mode
, from
));
1261 emit_move_insn (to
, temp
);
1265 /* Mode combination is not recognized. */
1269 /* Return an rtx for a value that would result
1270 from converting X to mode MODE.
1271 Both X and MODE may be floating, or both integer.
1272 UNSIGNEDP is nonzero if X is an unsigned value.
1273 This can be done by referring to a part of X in place
1274 or by copying to a new temporary with conversion.
1276 This function *must not* call protect_from_queue
1277 except when putting X into an insn (in which case convert_move does it). */
1280 convert_to_mode (mode
, x
, unsignedp
)
1281 enum machine_mode mode
;
1285 return convert_modes (mode
, VOIDmode
, x
, unsignedp
);
1288 /* Return an rtx for a value that would result
1289 from converting X from mode OLDMODE to mode MODE.
1290 Both modes may be floating, or both integer.
1291 UNSIGNEDP is nonzero if X is an unsigned value.
1293 This can be done by referring to a part of X in place
1294 or by copying to a new temporary with conversion.
1296 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1298 This function *must not* call protect_from_queue
1299 except when putting X into an insn (in which case convert_move does it). */
1302 convert_modes (mode
, oldmode
, x
, unsignedp
)
1303 enum machine_mode mode
, oldmode
;
1309 /* If FROM is a SUBREG that indicates that we have already done at least
1310 the required extension, strip it. */
1312 if (GET_CODE (x
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (x
)
1313 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))) >= GET_MODE_SIZE (mode
)
1314 && SUBREG_PROMOTED_UNSIGNED_P (x
) == unsignedp
)
1315 x
= gen_lowpart (mode
, x
);
1317 if (GET_MODE (x
) != VOIDmode
)
1318 oldmode
= GET_MODE (x
);
1320 if (mode
== oldmode
)
1323 /* There is one case that we must handle specially: If we are converting
1324 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1325 we are to interpret the constant as unsigned, gen_lowpart will do
1326 the wrong if the constant appears negative. What we want to do is
1327 make the high-order word of the constant zero, not all ones. */
1329 if (unsignedp
&& GET_MODE_CLASS (mode
) == MODE_INT
1330 && GET_MODE_BITSIZE (mode
) == 2 * HOST_BITS_PER_WIDE_INT
1331 && GET_CODE (x
) == CONST_INT
&& INTVAL (x
) < 0)
1333 HOST_WIDE_INT val
= INTVAL (x
);
1335 if (oldmode
!= VOIDmode
1336 && HOST_BITS_PER_WIDE_INT
> GET_MODE_BITSIZE (oldmode
))
1338 int width
= GET_MODE_BITSIZE (oldmode
);
1340 /* We need to zero extend VAL. */
1341 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
1344 return immed_double_const (val
, (HOST_WIDE_INT
) 0, mode
);
1347 /* We can do this with a gen_lowpart if both desired and current modes
1348 are integer, and this is either a constant integer, a register, or a
1349 non-volatile MEM. Except for the constant case where MODE is no
1350 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1352 if ((GET_CODE (x
) == CONST_INT
1353 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
1354 || (GET_MODE_CLASS (mode
) == MODE_INT
1355 && GET_MODE_CLASS (oldmode
) == MODE_INT
1356 && (GET_CODE (x
) == CONST_DOUBLE
1357 || (GET_MODE_SIZE (mode
) <= GET_MODE_SIZE (oldmode
)
1358 && ((GET_CODE (x
) == MEM
&& ! MEM_VOLATILE_P (x
)
1359 && direct_load
[(int) mode
])
1360 || (GET_CODE (x
) == REG
1361 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode
),
1362 GET_MODE_BITSIZE (GET_MODE (x
)))))))))
1364 /* ?? If we don't know OLDMODE, we have to assume here that
1365 X does not need sign- or zero-extension. This may not be
1366 the case, but it's the best we can do. */
1367 if (GET_CODE (x
) == CONST_INT
&& oldmode
!= VOIDmode
1368 && GET_MODE_SIZE (mode
) > GET_MODE_SIZE (oldmode
))
1370 HOST_WIDE_INT val
= INTVAL (x
);
1371 int width
= GET_MODE_BITSIZE (oldmode
);
1373 /* We must sign or zero-extend in this case. Start by
1374 zero-extending, then sign extend if we need to. */
1375 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
1377 && (val
& ((HOST_WIDE_INT
) 1 << (width
- 1))))
1378 val
|= (HOST_WIDE_INT
) (-1) << width
;
1380 return GEN_INT (val
);
1383 return gen_lowpart (mode
, x
);
1386 temp
= gen_reg_rtx (mode
);
1387 convert_move (temp
, x
, unsignedp
);
1391 /* This macro is used to determine what the largest unit size that
1392 move_by_pieces can use is. */
1394 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1395 move efficiently, as opposed to MOVE_MAX which is the maximum
1396 number of bytes we can move with a single instruction. */
1398 #ifndef MOVE_MAX_PIECES
1399 #define MOVE_MAX_PIECES MOVE_MAX
1402 /* Generate several move instructions to copy LEN bytes
1403 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1404 The caller must pass FROM and TO
1405 through protect_from_queue before calling.
1406 ALIGN is maximum alignment we can assume. */
1409 move_by_pieces (to
, from
, len
, align
)
1411 unsigned HOST_WIDE_INT len
;
1414 struct move_by_pieces data
;
1415 rtx to_addr
= XEXP (to
, 0), from_addr
= XEXP (from
, 0);
1416 unsigned int max_size
= MOVE_MAX_PIECES
+ 1;
1417 enum machine_mode mode
= VOIDmode
, tmode
;
1418 enum insn_code icode
;
1421 data
.to_addr
= to_addr
;
1422 data
.from_addr
= from_addr
;
1426 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
1427 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
1429 = (GET_CODE (from_addr
) == PRE_INC
|| GET_CODE (from_addr
) == PRE_DEC
1430 || GET_CODE (from_addr
) == POST_INC
1431 || GET_CODE (from_addr
) == POST_DEC
);
1433 data
.explicit_inc_from
= 0;
1434 data
.explicit_inc_to
= 0;
1436 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
1437 if (data
.reverse
) data
.offset
= len
;
1440 /* If copying requires more than two move insns,
1441 copy addresses to registers (to make displacements shorter)
1442 and use post-increment if available. */
1443 if (!(data
.autinc_from
&& data
.autinc_to
)
1444 && move_by_pieces_ninsns (len
, align
) > 2)
1446 /* Find the mode of the largest move... */
1447 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1448 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1449 if (GET_MODE_SIZE (tmode
) < max_size
)
1452 if (USE_LOAD_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_from
)
1454 data
.from_addr
= copy_addr_to_reg (plus_constant (from_addr
, len
));
1455 data
.autinc_from
= 1;
1456 data
.explicit_inc_from
= -1;
1458 if (USE_LOAD_POST_INCREMENT (mode
) && ! data
.autinc_from
)
1460 data
.from_addr
= copy_addr_to_reg (from_addr
);
1461 data
.autinc_from
= 1;
1462 data
.explicit_inc_from
= 1;
1464 if (!data
.autinc_from
&& CONSTANT_P (from_addr
))
1465 data
.from_addr
= copy_addr_to_reg (from_addr
);
1466 if (USE_STORE_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_to
)
1468 data
.to_addr
= copy_addr_to_reg (plus_constant (to_addr
, len
));
1470 data
.explicit_inc_to
= -1;
1472 if (USE_STORE_POST_INCREMENT (mode
) && ! data
.reverse
&& ! data
.autinc_to
)
1474 data
.to_addr
= copy_addr_to_reg (to_addr
);
1476 data
.explicit_inc_to
= 1;
1478 if (!data
.autinc_to
&& CONSTANT_P (to_addr
))
1479 data
.to_addr
= copy_addr_to_reg (to_addr
);
1482 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
1483 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
1484 align
= MOVE_MAX
* BITS_PER_UNIT
;
1486 /* First move what we can in the largest integer mode, then go to
1487 successively smaller modes. */
1489 while (max_size
> 1)
1491 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1492 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1493 if (GET_MODE_SIZE (tmode
) < max_size
)
1496 if (mode
== VOIDmode
)
1499 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1500 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
1501 move_by_pieces_1 (GEN_FCN (icode
), mode
, &data
);
1503 max_size
= GET_MODE_SIZE (mode
);
1506 /* The code above should have handled everything. */
1511 /* Return number of insns required to move L bytes by pieces.
1512 ALIGN (in bits) is maximum alignment we can assume. */
1514 static unsigned HOST_WIDE_INT
1515 move_by_pieces_ninsns (l
, align
)
1516 unsigned HOST_WIDE_INT l
;
1519 unsigned HOST_WIDE_INT n_insns
= 0;
1520 unsigned HOST_WIDE_INT max_size
= MOVE_MAX
+ 1;
1522 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
1523 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
1524 align
= MOVE_MAX
* BITS_PER_UNIT
;
1526 while (max_size
> 1)
1528 enum machine_mode mode
= VOIDmode
, tmode
;
1529 enum insn_code icode
;
1531 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1532 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1533 if (GET_MODE_SIZE (tmode
) < max_size
)
1536 if (mode
== VOIDmode
)
1539 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1540 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
1541 n_insns
+= l
/ GET_MODE_SIZE (mode
), l
%= GET_MODE_SIZE (mode
);
1543 max_size
= GET_MODE_SIZE (mode
);
1551 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1552 with move instructions for mode MODE. GENFUN is the gen_... function
1553 to make a move insn for that mode. DATA has all the other info. */
1556 move_by_pieces_1 (genfun
, mode
, data
)
1557 rtx (*genfun
) PARAMS ((rtx
, ...));
1558 enum machine_mode mode
;
1559 struct move_by_pieces
*data
;
1561 unsigned int size
= GET_MODE_SIZE (mode
);
1564 while (data
->len
>= size
)
1567 data
->offset
-= size
;
1569 if (data
->autinc_to
)
1571 to1
= gen_rtx_MEM (mode
, data
->to_addr
);
1572 MEM_COPY_ATTRIBUTES (to1
, data
->to
);
1575 to1
= change_address (data
->to
, mode
,
1576 plus_constant (data
->to_addr
, data
->offset
));
1578 if (data
->autinc_from
)
1580 from1
= gen_rtx_MEM (mode
, data
->from_addr
);
1581 MEM_COPY_ATTRIBUTES (from1
, data
->from
);
1584 from1
= change_address (data
->from
, mode
,
1585 plus_constant (data
->from_addr
, data
->offset
));
1587 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
1588 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (-size
)));
1589 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_from
< 0)
1590 emit_insn (gen_add2_insn (data
->from_addr
, GEN_INT (-size
)));
1592 emit_insn ((*genfun
) (to1
, from1
));
1594 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
1595 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
1596 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_from
> 0)
1597 emit_insn (gen_add2_insn (data
->from_addr
, GEN_INT (size
)));
1599 if (! data
->reverse
)
1600 data
->offset
+= size
;
1606 /* Emit code to move a block Y to a block X.
1607 This may be done with string-move instructions,
1608 with multiple scalar move instructions, or with a library call.
1610 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1612 SIZE is an rtx that says how long they are.
1613 ALIGN is the maximum alignment we can assume they have.
1615 Return the address of the new block, if memcpy is called and returns it,
1619 emit_block_move (x
, y
, size
, align
)
1625 #ifdef TARGET_MEM_FUNCTIONS
1627 tree call_expr
, arg_list
;
1630 if (GET_MODE (x
) != BLKmode
)
1633 if (GET_MODE (y
) != BLKmode
)
1636 x
= protect_from_queue (x
, 1);
1637 y
= protect_from_queue (y
, 0);
1638 size
= protect_from_queue (size
, 0);
1640 if (GET_CODE (x
) != MEM
)
1642 if (GET_CODE (y
) != MEM
)
1647 if (GET_CODE (size
) == CONST_INT
&& MOVE_BY_PIECES_P (INTVAL (size
), align
))
1648 move_by_pieces (x
, y
, INTVAL (size
), align
);
1651 /* Try the most limited insn first, because there's no point
1652 including more than one in the machine description unless
1653 the more limited one has some advantage. */
1655 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
1656 enum machine_mode mode
;
1658 /* Since this is a move insn, we don't care about volatility. */
1661 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
1662 mode
= GET_MODE_WIDER_MODE (mode
))
1664 enum insn_code code
= movstr_optab
[(int) mode
];
1665 insn_operand_predicate_fn pred
;
1667 if (code
!= CODE_FOR_nothing
1668 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1669 here because if SIZE is less than the mode mask, as it is
1670 returned by the macro, it will definitely be less than the
1671 actual mode mask. */
1672 && ((GET_CODE (size
) == CONST_INT
1673 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
1674 <= (GET_MODE_MASK (mode
) >> 1)))
1675 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
1676 && ((pred
= insn_data
[(int) code
].operand
[0].predicate
) == 0
1677 || (*pred
) (x
, BLKmode
))
1678 && ((pred
= insn_data
[(int) code
].operand
[1].predicate
) == 0
1679 || (*pred
) (y
, BLKmode
))
1680 && ((pred
= insn_data
[(int) code
].operand
[3].predicate
) == 0
1681 || (*pred
) (opalign
, VOIDmode
)))
1684 rtx last
= get_last_insn ();
1687 op2
= convert_to_mode (mode
, size
, 1);
1688 pred
= insn_data
[(int) code
].operand
[2].predicate
;
1689 if (pred
!= 0 && ! (*pred
) (op2
, mode
))
1690 op2
= copy_to_mode_reg (mode
, op2
);
1692 pat
= GEN_FCN ((int) code
) (x
, y
, op2
, opalign
);
1700 delete_insns_since (last
);
1706 /* X, Y, or SIZE may have been passed through protect_from_queue.
1708 It is unsafe to save the value generated by protect_from_queue
1709 and reuse it later. Consider what happens if emit_queue is
1710 called before the return value from protect_from_queue is used.
1712 Expansion of the CALL_EXPR below will call emit_queue before
1713 we are finished emitting RTL for argument setup. So if we are
1714 not careful we could get the wrong value for an argument.
1716 To avoid this problem we go ahead and emit code to copy X, Y &
1717 SIZE into new pseudos. We can then place those new pseudos
1718 into an RTL_EXPR and use them later, even after a call to
1721 Note this is not strictly needed for library calls since they
1722 do not call emit_queue before loading their arguments. However,
1723 we may need to have library calls call emit_queue in the future
1724 since failing to do so could cause problems for targets which
1725 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1726 x
= copy_to_mode_reg (Pmode
, XEXP (x
, 0));
1727 y
= copy_to_mode_reg (Pmode
, XEXP (y
, 0));
1729 #ifdef TARGET_MEM_FUNCTIONS
1730 size
= copy_to_mode_reg (TYPE_MODE (sizetype
), size
);
1732 size
= convert_to_mode (TYPE_MODE (integer_type_node
), size
,
1733 TREE_UNSIGNED (integer_type_node
));
1734 size
= copy_to_mode_reg (TYPE_MODE (integer_type_node
), size
);
1737 #ifdef TARGET_MEM_FUNCTIONS
1738 /* It is incorrect to use the libcall calling conventions to call
1739 memcpy in this context.
1741 This could be a user call to memcpy and the user may wish to
1742 examine the return value from memcpy.
1744 For targets where libcalls and normal calls have different conventions
1745 for returning pointers, we could end up generating incorrect code.
1747 So instead of using a libcall sequence we build up a suitable
1748 CALL_EXPR and expand the call in the normal fashion. */
1749 if (fn
== NULL_TREE
)
1753 /* This was copied from except.c, I don't know if all this is
1754 necessary in this context or not. */
1755 fn
= get_identifier ("memcpy");
1756 fntype
= build_pointer_type (void_type_node
);
1757 fntype
= build_function_type (fntype
, NULL_TREE
);
1758 fn
= build_decl (FUNCTION_DECL
, fn
, fntype
);
1759 ggc_add_tree_root (&fn
, 1);
1760 DECL_EXTERNAL (fn
) = 1;
1761 TREE_PUBLIC (fn
) = 1;
1762 DECL_ARTIFICIAL (fn
) = 1;
1763 make_decl_rtl (fn
, NULL_PTR
);
1764 assemble_external (fn
);
1767 /* We need to make an argument list for the function call.
1769 memcpy has three arguments, the first two are void * addresses and
1770 the last is a size_t byte count for the copy. */
1772 = build_tree_list (NULL_TREE
,
1773 make_tree (build_pointer_type (void_type_node
), x
));
1774 TREE_CHAIN (arg_list
)
1775 = build_tree_list (NULL_TREE
,
1776 make_tree (build_pointer_type (void_type_node
), y
));
1777 TREE_CHAIN (TREE_CHAIN (arg_list
))
1778 = build_tree_list (NULL_TREE
, make_tree (sizetype
, size
));
1779 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list
))) = NULL_TREE
;
1781 /* Now we have to build up the CALL_EXPR itself. */
1782 call_expr
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
1783 call_expr
= build (CALL_EXPR
, TREE_TYPE (TREE_TYPE (fn
)),
1784 call_expr
, arg_list
, NULL_TREE
);
1785 TREE_SIDE_EFFECTS (call_expr
) = 1;
1787 retval
= expand_expr (call_expr
, NULL_RTX
, VOIDmode
, 0);
1789 emit_library_call (bcopy_libfunc
, LCT_NORMAL
,
1790 VOIDmode
, 3, y
, Pmode
, x
, Pmode
,
1791 convert_to_mode (TYPE_MODE (integer_type_node
), size
,
1792 TREE_UNSIGNED (integer_type_node
)),
1793 TYPE_MODE (integer_type_node
));
1800 /* Copy all or part of a value X into registers starting at REGNO.
1801 The number of registers to be filled is NREGS. */
1804 move_block_to_reg (regno
, x
, nregs
, mode
)
1808 enum machine_mode mode
;
1811 #ifdef HAVE_load_multiple
1819 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
1820 x
= validize_mem (force_const_mem (mode
, x
));
1822 /* See if the machine can do this with a load multiple insn. */
1823 #ifdef HAVE_load_multiple
1824 if (HAVE_load_multiple
)
1826 last
= get_last_insn ();
1827 pat
= gen_load_multiple (gen_rtx_REG (word_mode
, regno
), x
,
1835 delete_insns_since (last
);
1839 for (i
= 0; i
< nregs
; i
++)
1840 emit_move_insn (gen_rtx_REG (word_mode
, regno
+ i
),
1841 operand_subword_force (x
, i
, mode
));
1844 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1845 The number of registers to be filled is NREGS. SIZE indicates the number
1846 of bytes in the object X. */
1849 move_block_from_reg (regno
, x
, nregs
, size
)
1856 #ifdef HAVE_store_multiple
1860 enum machine_mode mode
;
1865 /* If SIZE is that of a mode no bigger than a word, just use that
1866 mode's store operation. */
1867 if (size
<= UNITS_PER_WORD
1868 && (mode
= mode_for_size (size
* BITS_PER_UNIT
, MODE_INT
, 0)) != BLKmode
)
1870 emit_move_insn (change_address (x
, mode
, NULL
),
1871 gen_rtx_REG (mode
, regno
));
1875 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1876 to the left before storing to memory. Note that the previous test
1877 doesn't handle all cases (e.g. SIZE == 3). */
1878 if (size
< UNITS_PER_WORD
&& BYTES_BIG_ENDIAN
)
1880 rtx tem
= operand_subword (x
, 0, 1, BLKmode
);
1886 shift
= expand_shift (LSHIFT_EXPR
, word_mode
,
1887 gen_rtx_REG (word_mode
, regno
),
1888 build_int_2 ((UNITS_PER_WORD
- size
)
1889 * BITS_PER_UNIT
, 0), NULL_RTX
, 0);
1890 emit_move_insn (tem
, shift
);
1894 /* See if the machine can do this with a store multiple insn. */
1895 #ifdef HAVE_store_multiple
1896 if (HAVE_store_multiple
)
1898 last
= get_last_insn ();
1899 pat
= gen_store_multiple (x
, gen_rtx_REG (word_mode
, regno
),
1907 delete_insns_since (last
);
1911 for (i
= 0; i
< nregs
; i
++)
1913 rtx tem
= operand_subword (x
, i
, 1, BLKmode
);
1918 emit_move_insn (tem
, gen_rtx_REG (word_mode
, regno
+ i
));
1922 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1923 registers represented by a PARALLEL. SSIZE represents the total size of
1924 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1926 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1927 the balance will be in what would be the low-order memory addresses, i.e.
1928 left justified for big endian, right justified for little endian. This
1929 happens to be true for the targets currently using this support. If this
1930 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1934 emit_group_load (dst
, orig_src
, ssize
, align
)
1942 if (GET_CODE (dst
) != PARALLEL
)
1945 /* Check for a NULL entry, used to indicate that the parameter goes
1946 both on the stack and in registers. */
1947 if (XEXP (XVECEXP (dst
, 0, 0), 0))
1952 tmps
= (rtx
*) alloca (sizeof (rtx
) * XVECLEN (dst
, 0));
1954 /* Process the pieces. */
1955 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
1957 enum machine_mode mode
= GET_MODE (XEXP (XVECEXP (dst
, 0, i
), 0));
1958 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (dst
, 0, i
), 1));
1959 unsigned int bytelen
= GET_MODE_SIZE (mode
);
1962 /* Handle trailing fragments that run over the size of the struct. */
1963 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
1965 shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
1966 bytelen
= ssize
- bytepos
;
1971 /* If we won't be loading directly from memory, protect the real source
1972 from strange tricks we might play; but make sure that the source can
1973 be loaded directly into the destination. */
1975 if (GET_CODE (orig_src
) != MEM
1976 && (!CONSTANT_P (orig_src
)
1977 || (GET_MODE (orig_src
) != mode
1978 && GET_MODE (orig_src
) != VOIDmode
)))
1980 if (GET_MODE (orig_src
) == VOIDmode
)
1981 src
= gen_reg_rtx (mode
);
1983 src
= gen_reg_rtx (GET_MODE (orig_src
));
1984 emit_move_insn (src
, orig_src
);
1987 /* Optimize the access just a bit. */
1988 if (GET_CODE (src
) == MEM
1989 && align
>= GET_MODE_ALIGNMENT (mode
)
1990 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
1991 && bytelen
== GET_MODE_SIZE (mode
))
1993 tmps
[i
] = gen_reg_rtx (mode
);
1994 emit_move_insn (tmps
[i
],
1995 change_address (src
, mode
,
1996 plus_constant (XEXP (src
, 0),
1999 else if (GET_CODE (src
) == CONCAT
)
2002 && bytelen
== GET_MODE_SIZE (GET_MODE (XEXP (src
, 0))))
2003 tmps
[i
] = XEXP (src
, 0);
2004 else if (bytepos
== (HOST_WIDE_INT
) GET_MODE_SIZE (GET_MODE (XEXP (src
, 0)))
2005 && bytelen
== GET_MODE_SIZE (GET_MODE (XEXP (src
, 1))))
2006 tmps
[i
] = XEXP (src
, 1);
2010 else if (CONSTANT_P (src
)
2011 || (GET_CODE (src
) == REG
&& GET_MODE (src
) == mode
))
2014 tmps
[i
] = extract_bit_field (src
, bytelen
* BITS_PER_UNIT
,
2015 bytepos
* BITS_PER_UNIT
, 1, NULL_RTX
,
2016 mode
, mode
, align
, ssize
);
2018 if (BYTES_BIG_ENDIAN
&& shift
)
2019 expand_binop (mode
, ashl_optab
, tmps
[i
], GEN_INT (shift
),
2020 tmps
[i
], 0, OPTAB_WIDEN
);
2025 /* Copy the extracted pieces into the proper (probable) hard regs. */
2026 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
2027 emit_move_insn (XEXP (XVECEXP (dst
, 0, i
), 0), tmps
[i
]);
2030 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2031 registers represented by a PARALLEL. SSIZE represents the total size of
2032 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
2035 emit_group_store (orig_dst
, src
, ssize
, align
)
2043 if (GET_CODE (src
) != PARALLEL
)
2046 /* Check for a NULL entry, used to indicate that the parameter goes
2047 both on the stack and in registers. */
2048 if (XEXP (XVECEXP (src
, 0, 0), 0))
2053 tmps
= (rtx
*) alloca (sizeof (rtx
) * XVECLEN (src
, 0));
2055 /* Copy the (probable) hard regs into pseudos. */
2056 for (i
= start
; i
< XVECLEN (src
, 0); i
++)
2058 rtx reg
= XEXP (XVECEXP (src
, 0, i
), 0);
2059 tmps
[i
] = gen_reg_rtx (GET_MODE (reg
));
2060 emit_move_insn (tmps
[i
], reg
);
2064 /* If we won't be storing directly into memory, protect the real destination
2065 from strange tricks we might play. */
2067 if (GET_CODE (dst
) == PARALLEL
)
2071 /* We can get a PARALLEL dst if there is a conditional expression in
2072 a return statement. In that case, the dst and src are the same,
2073 so no action is necessary. */
2074 if (rtx_equal_p (dst
, src
))
2077 /* It is unclear if we can ever reach here, but we may as well handle
2078 it. Allocate a temporary, and split this into a store/load to/from
2081 temp
= assign_stack_temp (GET_MODE (dst
), ssize
, 0);
2082 emit_group_store (temp
, src
, ssize
, align
);
2083 emit_group_load (dst
, temp
, ssize
, align
);
2086 else if (GET_CODE (dst
) != MEM
)
2088 dst
= gen_reg_rtx (GET_MODE (orig_dst
));
2089 /* Make life a bit easier for combine. */
2090 emit_move_insn (dst
, const0_rtx
);
2093 /* Process the pieces. */
2094 for (i
= start
; i
< XVECLEN (src
, 0); i
++)
2096 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (src
, 0, i
), 1));
2097 enum machine_mode mode
= GET_MODE (tmps
[i
]);
2098 unsigned int bytelen
= GET_MODE_SIZE (mode
);
2100 /* Handle trailing fragments that run over the size of the struct. */
2101 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
2103 if (BYTES_BIG_ENDIAN
)
2105 int shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
2106 expand_binop (mode
, ashr_optab
, tmps
[i
], GEN_INT (shift
),
2107 tmps
[i
], 0, OPTAB_WIDEN
);
2109 bytelen
= ssize
- bytepos
;
2112 /* Optimize the access just a bit. */
2113 if (GET_CODE (dst
) == MEM
2114 && align
>= GET_MODE_ALIGNMENT (mode
)
2115 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
2116 && bytelen
== GET_MODE_SIZE (mode
))
2117 emit_move_insn (change_address (dst
, mode
,
2118 plus_constant (XEXP (dst
, 0),
2122 store_bit_field (dst
, bytelen
* BITS_PER_UNIT
, bytepos
* BITS_PER_UNIT
,
2123 mode
, tmps
[i
], align
, ssize
);
2128 /* Copy from the pseudo into the (probable) hard reg. */
2129 if (GET_CODE (dst
) == REG
)
2130 emit_move_insn (orig_dst
, dst
);
2133 /* Generate code to copy a BLKmode object of TYPE out of a
2134 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2135 is null, a stack temporary is created. TGTBLK is returned.
2137 The primary purpose of this routine is to handle functions
2138 that return BLKmode structures in registers. Some machines
2139 (the PA for example) want to return all small structures
2140 in registers regardless of the structure's alignment. */
2143 copy_blkmode_from_reg (tgtblk
, srcreg
, type
)
2148 unsigned HOST_WIDE_INT bytes
= int_size_in_bytes (type
);
2149 rtx src
= NULL
, dst
= NULL
;
2150 unsigned HOST_WIDE_INT bitsize
= MIN (TYPE_ALIGN (type
), BITS_PER_WORD
);
2151 unsigned HOST_WIDE_INT bitpos
, xbitpos
, big_endian_correction
= 0;
2155 tgtblk
= assign_temp (build_qualified_type (type
,
2157 | TYPE_QUAL_CONST
)),
2159 preserve_temp_slots (tgtblk
);
2162 /* This code assumes srcreg is at least a full word. If it isn't,
2163 copy it into a new pseudo which is a full word. */
2164 if (GET_MODE (srcreg
) != BLKmode
2165 && GET_MODE_SIZE (GET_MODE (srcreg
)) < UNITS_PER_WORD
)
2166 srcreg
= convert_to_mode (word_mode
, srcreg
, TREE_UNSIGNED (type
));
2168 /* Structures whose size is not a multiple of a word are aligned
2169 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2170 machine, this means we must skip the empty high order bytes when
2171 calculating the bit offset. */
2172 if (BYTES_BIG_ENDIAN
&& bytes
% UNITS_PER_WORD
)
2173 big_endian_correction
2174 = (BITS_PER_WORD
- ((bytes
% UNITS_PER_WORD
) * BITS_PER_UNIT
));
2176 /* Copy the structure BITSIZE bites at a time.
2178 We could probably emit more efficient code for machines which do not use
2179 strict alignment, but it doesn't seem worth the effort at the current
2181 for (bitpos
= 0, xbitpos
= big_endian_correction
;
2182 bitpos
< bytes
* BITS_PER_UNIT
;
2183 bitpos
+= bitsize
, xbitpos
+= bitsize
)
2185 /* We need a new source operand each time xbitpos is on a
2186 word boundary and when xbitpos == big_endian_correction
2187 (the first time through). */
2188 if (xbitpos
% BITS_PER_WORD
== 0
2189 || xbitpos
== big_endian_correction
)
2190 src
= operand_subword_force (srcreg
, xbitpos
/ BITS_PER_WORD
, BLKmode
);
2192 /* We need a new destination operand each time bitpos is on
2194 if (bitpos
% BITS_PER_WORD
== 0)
2195 dst
= operand_subword (tgtblk
, bitpos
/ BITS_PER_WORD
, 1, BLKmode
);
2197 /* Use xbitpos for the source extraction (right justified) and
2198 xbitpos for the destination store (left justified). */
2199 store_bit_field (dst
, bitsize
, bitpos
% BITS_PER_WORD
, word_mode
,
2200 extract_bit_field (src
, bitsize
,
2201 xbitpos
% BITS_PER_WORD
, 1,
2202 NULL_RTX
, word_mode
, word_mode
,
2203 bitsize
, BITS_PER_WORD
),
2204 bitsize
, BITS_PER_WORD
);
2210 /* Add a USE expression for REG to the (possibly empty) list pointed
2211 to by CALL_FUSAGE. REG must denote a hard register. */
2214 use_reg (call_fusage
, reg
)
2215 rtx
*call_fusage
, reg
;
2217 if (GET_CODE (reg
) != REG
2218 || REGNO (reg
) >= FIRST_PSEUDO_REGISTER
)
2222 = gen_rtx_EXPR_LIST (VOIDmode
,
2223 gen_rtx_USE (VOIDmode
, reg
), *call_fusage
);
2226 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2227 starting at REGNO. All of these registers must be hard registers. */
2230 use_regs (call_fusage
, regno
, nregs
)
2237 if (regno
+ nregs
> FIRST_PSEUDO_REGISTER
)
2240 for (i
= 0; i
< nregs
; i
++)
2241 use_reg (call_fusage
, gen_rtx_REG (reg_raw_mode
[regno
+ i
], regno
+ i
));
2244 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2245 PARALLEL REGS. This is for calls that pass values in multiple
2246 non-contiguous locations. The Irix 6 ABI has examples of this. */
2249 use_group_regs (call_fusage
, regs
)
2255 for (i
= 0; i
< XVECLEN (regs
, 0); i
++)
2257 rtx reg
= XEXP (XVECEXP (regs
, 0, i
), 0);
2259 /* A NULL entry means the parameter goes both on the stack and in
2260 registers. This can also be a MEM for targets that pass values
2261 partially on the stack and partially in registers. */
2262 if (reg
!= 0 && GET_CODE (reg
) == REG
)
2263 use_reg (call_fusage
, reg
);
2269 can_store_by_pieces (len
, constfun
, constfundata
, align
)
2270 unsigned HOST_WIDE_INT len
;
2271 rtx (*constfun
) PARAMS ((PTR
, HOST_WIDE_INT
, enum machine_mode
));
2275 unsigned HOST_WIDE_INT max_size
, l
;
2276 HOST_WIDE_INT offset
= 0;
2277 enum machine_mode mode
, tmode
;
2278 enum insn_code icode
;
2282 if (! MOVE_BY_PIECES_P (len
, align
))
2285 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
2286 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
2287 align
= MOVE_MAX
* BITS_PER_UNIT
;
2289 /* We would first store what we can in the largest integer mode, then go to
2290 successively smaller modes. */
2293 reverse
<= (HAVE_PRE_DECREMENT
|| HAVE_POST_DECREMENT
);
2298 max_size
= MOVE_MAX_PIECES
+ 1;
2299 while (max_size
> 1)
2301 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2302 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2303 if (GET_MODE_SIZE (tmode
) < max_size
)
2306 if (mode
== VOIDmode
)
2309 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
2310 if (icode
!= CODE_FOR_nothing
2311 && align
>= GET_MODE_ALIGNMENT (mode
))
2313 unsigned int size
= GET_MODE_SIZE (mode
);
2320 cst
= (*constfun
) (constfundata
, offset
, mode
);
2321 if (!LEGITIMATE_CONSTANT_P (cst
))
2331 max_size
= GET_MODE_SIZE (mode
);
2334 /* The code above should have handled everything. */
2342 /* Generate several move instructions to store LEN bytes generated by
2343 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2344 pointer which will be passed as argument in every CONSTFUN call.
2345 ALIGN is maximum alignment we can assume. */
2348 store_by_pieces (to
, len
, constfun
, constfundata
, align
)
2350 unsigned HOST_WIDE_INT len
;
2351 rtx (*constfun
) PARAMS ((PTR
, HOST_WIDE_INT
, enum machine_mode
));
2355 struct store_by_pieces data
;
2357 if (! MOVE_BY_PIECES_P (len
, align
))
2359 to
= protect_from_queue (to
, 1);
2360 data
.constfun
= constfun
;
2361 data
.constfundata
= constfundata
;
2364 store_by_pieces_1 (&data
, align
);
2367 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2368 rtx with BLKmode). The caller must pass TO through protect_from_queue
2369 before calling. ALIGN is maximum alignment we can assume. */
2372 clear_by_pieces (to
, len
, align
)
2374 unsigned HOST_WIDE_INT len
;
2377 struct store_by_pieces data
;
2379 data
.constfun
= clear_by_pieces_1
;
2380 data
.constfundata
= NULL_PTR
;
2383 store_by_pieces_1 (&data
, align
);
2386 /* Callback routine for clear_by_pieces.
2387 Return const0_rtx unconditionally. */
2390 clear_by_pieces_1 (data
, offset
, mode
)
2391 PTR data ATTRIBUTE_UNUSED
;
2392 HOST_WIDE_INT offset ATTRIBUTE_UNUSED
;
2393 enum machine_mode mode ATTRIBUTE_UNUSED
;
2398 /* Subroutine of clear_by_pieces and store_by_pieces.
2399 Generate several move instructions to store LEN bytes of block TO. (A MEM
2400 rtx with BLKmode). The caller must pass TO through protect_from_queue
2401 before calling. ALIGN is maximum alignment we can assume. */
2404 store_by_pieces_1 (data
, align
)
2405 struct store_by_pieces
*data
;
2408 rtx to_addr
= XEXP (data
->to
, 0);
2409 unsigned HOST_WIDE_INT max_size
= MOVE_MAX_PIECES
+ 1;
2410 enum machine_mode mode
= VOIDmode
, tmode
;
2411 enum insn_code icode
;
2414 data
->to_addr
= to_addr
;
2416 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
2417 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
2419 data
->explicit_inc_to
= 0;
2421 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
2423 data
->offset
= data
->len
;
2425 /* If storing requires more than two move insns,
2426 copy addresses to registers (to make displacements shorter)
2427 and use post-increment if available. */
2428 if (!data
->autinc_to
2429 && move_by_pieces_ninsns (data
->len
, align
) > 2)
2431 /* Determine the main mode we'll be using. */
2432 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2433 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2434 if (GET_MODE_SIZE (tmode
) < max_size
)
2437 if (USE_STORE_PRE_DECREMENT (mode
) && data
->reverse
&& ! data
->autinc_to
)
2439 data
->to_addr
= copy_addr_to_reg (plus_constant (to_addr
, data
->len
));
2440 data
->autinc_to
= 1;
2441 data
->explicit_inc_to
= -1;
2444 if (USE_STORE_POST_INCREMENT (mode
) && ! data
->reverse
2445 && ! data
->autinc_to
)
2447 data
->to_addr
= copy_addr_to_reg (to_addr
);
2448 data
->autinc_to
= 1;
2449 data
->explicit_inc_to
= 1;
2452 if ( !data
->autinc_to
&& CONSTANT_P (to_addr
))
2453 data
->to_addr
= copy_addr_to_reg (to_addr
);
2456 if (! SLOW_UNALIGNED_ACCESS (word_mode
, align
)
2457 || align
> MOVE_MAX
* BITS_PER_UNIT
|| align
>= BIGGEST_ALIGNMENT
)
2458 align
= MOVE_MAX
* BITS_PER_UNIT
;
2460 /* First store what we can in the largest integer mode, then go to
2461 successively smaller modes. */
2463 while (max_size
> 1)
2465 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2466 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2467 if (GET_MODE_SIZE (tmode
) < max_size
)
2470 if (mode
== VOIDmode
)
2473 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
2474 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
2475 store_by_pieces_2 (GEN_FCN (icode
), mode
, data
);
2477 max_size
= GET_MODE_SIZE (mode
);
2480 /* The code above should have handled everything. */
2485 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2486 with move instructions for mode MODE. GENFUN is the gen_... function
2487 to make a move insn for that mode. DATA has all the other info. */
2490 store_by_pieces_2 (genfun
, mode
, data
)
2491 rtx (*genfun
) PARAMS ((rtx
, ...));
2492 enum machine_mode mode
;
2493 struct store_by_pieces
*data
;
2495 unsigned int size
= GET_MODE_SIZE (mode
);
2498 while (data
->len
>= size
)
2501 data
->offset
-= size
;
2503 if (data
->autinc_to
)
2505 to1
= gen_rtx_MEM (mode
, data
->to_addr
);
2506 MEM_COPY_ATTRIBUTES (to1
, data
->to
);
2509 to1
= change_address (data
->to
, mode
,
2510 plus_constant (data
->to_addr
, data
->offset
));
2512 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
2513 emit_insn (gen_add2_insn (data
->to_addr
,
2514 GEN_INT (-(HOST_WIDE_INT
) size
)));
2516 cst
= (*data
->constfun
) (data
->constfundata
, data
->offset
, mode
);
2517 emit_insn ((*genfun
) (to1
, cst
));
2519 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
2520 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
2522 if (! data
->reverse
)
2523 data
->offset
+= size
;
2529 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2530 its length in bytes and ALIGN is the maximum alignment we can is has.
2532 If we call a function that returns the length of the block, return it. */
2535 clear_storage (object
, size
, align
)
2540 #ifdef TARGET_MEM_FUNCTIONS
2542 tree call_expr
, arg_list
;
2546 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2547 just move a zero. Otherwise, do this a piece at a time. */
2548 if (GET_MODE (object
) != BLKmode
2549 && GET_CODE (size
) == CONST_INT
2550 && GET_MODE_SIZE (GET_MODE (object
)) == (unsigned int) INTVAL (size
))
2551 emit_move_insn (object
, CONST0_RTX (GET_MODE (object
)));
2554 object
= protect_from_queue (object
, 1);
2555 size
= protect_from_queue (size
, 0);
2557 if (GET_CODE (size
) == CONST_INT
2558 && MOVE_BY_PIECES_P (INTVAL (size
), align
))
2559 clear_by_pieces (object
, INTVAL (size
), align
);
2562 /* Try the most limited insn first, because there's no point
2563 including more than one in the machine description unless
2564 the more limited one has some advantage. */
2566 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
2567 enum machine_mode mode
;
2569 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
2570 mode
= GET_MODE_WIDER_MODE (mode
))
2572 enum insn_code code
= clrstr_optab
[(int) mode
];
2573 insn_operand_predicate_fn pred
;
2575 if (code
!= CODE_FOR_nothing
2576 /* We don't need MODE to be narrower than
2577 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2578 the mode mask, as it is returned by the macro, it will
2579 definitely be less than the actual mode mask. */
2580 && ((GET_CODE (size
) == CONST_INT
2581 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
2582 <= (GET_MODE_MASK (mode
) >> 1)))
2583 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
2584 && ((pred
= insn_data
[(int) code
].operand
[0].predicate
) == 0
2585 || (*pred
) (object
, BLKmode
))
2586 && ((pred
= insn_data
[(int) code
].operand
[2].predicate
) == 0
2587 || (*pred
) (opalign
, VOIDmode
)))
2590 rtx last
= get_last_insn ();
2593 op1
= convert_to_mode (mode
, size
, 1);
2594 pred
= insn_data
[(int) code
].operand
[1].predicate
;
2595 if (pred
!= 0 && ! (*pred
) (op1
, mode
))
2596 op1
= copy_to_mode_reg (mode
, op1
);
2598 pat
= GEN_FCN ((int) code
) (object
, op1
, opalign
);
2605 delete_insns_since (last
);
2609 /* OBJECT or SIZE may have been passed through protect_from_queue.
2611 It is unsafe to save the value generated by protect_from_queue
2612 and reuse it later. Consider what happens if emit_queue is
2613 called before the return value from protect_from_queue is used.
2615 Expansion of the CALL_EXPR below will call emit_queue before
2616 we are finished emitting RTL for argument setup. So if we are
2617 not careful we could get the wrong value for an argument.
2619 To avoid this problem we go ahead and emit code to copy OBJECT
2620 and SIZE into new pseudos. We can then place those new pseudos
2621 into an RTL_EXPR and use them later, even after a call to
2624 Note this is not strictly needed for library calls since they
2625 do not call emit_queue before loading their arguments. However,
2626 we may need to have library calls call emit_queue in the future
2627 since failing to do so could cause problems for targets which
2628 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2629 object
= copy_to_mode_reg (Pmode
, XEXP (object
, 0));
2631 #ifdef TARGET_MEM_FUNCTIONS
2632 size
= copy_to_mode_reg (TYPE_MODE (sizetype
), size
);
2634 size
= convert_to_mode (TYPE_MODE (integer_type_node
), size
,
2635 TREE_UNSIGNED (integer_type_node
));
2636 size
= copy_to_mode_reg (TYPE_MODE (integer_type_node
), size
);
2639 #ifdef TARGET_MEM_FUNCTIONS
2640 /* It is incorrect to use the libcall calling conventions to call
2641 memset in this context.
2643 This could be a user call to memset and the user may wish to
2644 examine the return value from memset.
2646 For targets where libcalls and normal calls have different
2647 conventions for returning pointers, we could end up generating
2650 So instead of using a libcall sequence we build up a suitable
2651 CALL_EXPR and expand the call in the normal fashion. */
2652 if (fn
== NULL_TREE
)
2656 /* This was copied from except.c, I don't know if all this is
2657 necessary in this context or not. */
2658 fn
= get_identifier ("memset");
2659 fntype
= build_pointer_type (void_type_node
);
2660 fntype
= build_function_type (fntype
, NULL_TREE
);
2661 fn
= build_decl (FUNCTION_DECL
, fn
, fntype
);
2662 ggc_add_tree_root (&fn
, 1);
2663 DECL_EXTERNAL (fn
) = 1;
2664 TREE_PUBLIC (fn
) = 1;
2665 DECL_ARTIFICIAL (fn
) = 1;
2666 make_decl_rtl (fn
, NULL_PTR
);
2667 assemble_external (fn
);
2670 /* We need to make an argument list for the function call.
2672 memset has three arguments, the first is a void * addresses, the
2673 second a integer with the initialization value, the last is a
2674 size_t byte count for the copy. */
2676 = build_tree_list (NULL_TREE
,
2677 make_tree (build_pointer_type (void_type_node
),
2679 TREE_CHAIN (arg_list
)
2680 = build_tree_list (NULL_TREE
,
2681 make_tree (integer_type_node
, const0_rtx
));
2682 TREE_CHAIN (TREE_CHAIN (arg_list
))
2683 = build_tree_list (NULL_TREE
, make_tree (sizetype
, size
));
2684 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list
))) = NULL_TREE
;
2686 /* Now we have to build up the CALL_EXPR itself. */
2687 call_expr
= build1 (ADDR_EXPR
,
2688 build_pointer_type (TREE_TYPE (fn
)), fn
);
2689 call_expr
= build (CALL_EXPR
, TREE_TYPE (TREE_TYPE (fn
)),
2690 call_expr
, arg_list
, NULL_TREE
);
2691 TREE_SIDE_EFFECTS (call_expr
) = 1;
2693 retval
= expand_expr (call_expr
, NULL_RTX
, VOIDmode
, 0);
2695 emit_library_call (bzero_libfunc
, LCT_NORMAL
,
2696 VOIDmode
, 2, object
, Pmode
, size
,
2697 TYPE_MODE (integer_type_node
));
2705 /* Generate code to copy Y into X.
2706 Both Y and X must have the same mode, except that
2707 Y can be a constant with VOIDmode.
2708 This mode cannot be BLKmode; use emit_block_move for that.
2710 Return the last instruction emitted. */
2713 emit_move_insn (x
, y
)
2716 enum machine_mode mode
= GET_MODE (x
);
2717 rtx y_cst
= NULL_RTX
;
2720 x
= protect_from_queue (x
, 1);
2721 y
= protect_from_queue (y
, 0);
2723 if (mode
== BLKmode
|| (GET_MODE (y
) != mode
&& GET_MODE (y
) != VOIDmode
))
2726 /* Never force constant_p_rtx to memory. */
2727 if (GET_CODE (y
) == CONSTANT_P_RTX
)
2729 else if (CONSTANT_P (y
) && ! LEGITIMATE_CONSTANT_P (y
))
2732 y
= force_const_mem (mode
, y
);
2735 /* If X or Y are memory references, verify that their addresses are valid
2737 if (GET_CODE (x
) == MEM
2738 && ((! memory_address_p (GET_MODE (x
), XEXP (x
, 0))
2739 && ! push_operand (x
, GET_MODE (x
)))
2741 && CONSTANT_ADDRESS_P (XEXP (x
, 0)))))
2742 x
= change_address (x
, VOIDmode
, XEXP (x
, 0));
2744 if (GET_CODE (y
) == MEM
2745 && (! memory_address_p (GET_MODE (y
), XEXP (y
, 0))
2747 && CONSTANT_ADDRESS_P (XEXP (y
, 0)))))
2748 y
= change_address (y
, VOIDmode
, XEXP (y
, 0));
2750 if (mode
== BLKmode
)
2753 last_insn
= emit_move_insn_1 (x
, y
);
2755 if (y_cst
&& GET_CODE (x
) == REG
)
2756 REG_NOTES (last_insn
)
2757 = gen_rtx_EXPR_LIST (REG_EQUAL
, y_cst
, REG_NOTES (last_insn
));
2762 /* Low level part of emit_move_insn.
2763 Called just like emit_move_insn, but assumes X and Y
2764 are basically valid. */
2767 emit_move_insn_1 (x
, y
)
2770 enum machine_mode mode
= GET_MODE (x
);
2771 enum machine_mode submode
;
2772 enum mode_class
class = GET_MODE_CLASS (mode
);
2775 if ((unsigned int) mode
>= (unsigned int) MAX_MACHINE_MODE
)
2778 if (mov_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
2780 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) mode
].insn_code
) (x
, y
));
2782 /* Expand complex moves by moving real part and imag part, if possible. */
2783 else if ((class == MODE_COMPLEX_FLOAT
|| class == MODE_COMPLEX_INT
)
2784 && BLKmode
!= (submode
= mode_for_size ((GET_MODE_UNIT_SIZE (mode
)
2786 (class == MODE_COMPLEX_INT
2787 ? MODE_INT
: MODE_FLOAT
),
2789 && (mov_optab
->handlers
[(int) submode
].insn_code
2790 != CODE_FOR_nothing
))
2792 /* Don't split destination if it is a stack push. */
2793 int stack
= push_operand (x
, GET_MODE (x
));
2795 /* If this is a stack, push the highpart first, so it
2796 will be in the argument order.
2798 In that case, change_address is used only to convert
2799 the mode, not to change the address. */
2802 /* Note that the real part always precedes the imag part in memory
2803 regardless of machine's endianness. */
2804 #ifdef STACK_GROWS_DOWNWARD
2805 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2806 (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2807 gen_imagpart (submode
, y
)));
2808 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2809 (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2810 gen_realpart (submode
, y
)));
2812 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2813 (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2814 gen_realpart (submode
, y
)));
2815 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2816 (gen_rtx_MEM (submode
, XEXP (x
, 0)),
2817 gen_imagpart (submode
, y
)));
2822 rtx realpart_x
, realpart_y
;
2823 rtx imagpart_x
, imagpart_y
;
2825 /* If this is a complex value with each part being smaller than a
2826 word, the usual calling sequence will likely pack the pieces into
2827 a single register. Unfortunately, SUBREG of hard registers only
2828 deals in terms of words, so we have a problem converting input
2829 arguments to the CONCAT of two registers that is used elsewhere
2830 for complex values. If this is before reload, we can copy it into
2831 memory and reload. FIXME, we should see about using extract and
2832 insert on integer registers, but complex short and complex char
2833 variables should be rarely used. */
2834 if (GET_MODE_BITSIZE (mode
) < 2 * BITS_PER_WORD
2835 && (reload_in_progress
| reload_completed
) == 0)
2837 int packed_dest_p
= (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
);
2838 int packed_src_p
= (REG_P (y
) && REGNO (y
) < FIRST_PSEUDO_REGISTER
);
2840 if (packed_dest_p
|| packed_src_p
)
2842 enum mode_class reg_class
= ((class == MODE_COMPLEX_FLOAT
)
2843 ? MODE_FLOAT
: MODE_INT
);
2845 enum machine_mode reg_mode
2846 = mode_for_size (GET_MODE_BITSIZE (mode
), reg_class
, 1);
2848 if (reg_mode
!= BLKmode
)
2850 rtx mem
= assign_stack_temp (reg_mode
,
2851 GET_MODE_SIZE (mode
), 0);
2852 rtx cmem
= change_address (mem
, mode
, NULL_RTX
);
2855 = N_("function using short complex types cannot be inline");
2859 rtx sreg
= gen_rtx_SUBREG (reg_mode
, x
, 0);
2860 emit_move_insn_1 (cmem
, y
);
2861 return emit_move_insn_1 (sreg
, mem
);
2865 rtx sreg
= gen_rtx_SUBREG (reg_mode
, y
, 0);
2866 emit_move_insn_1 (mem
, sreg
);
2867 return emit_move_insn_1 (x
, cmem
);
2873 realpart_x
= gen_realpart (submode
, x
);
2874 realpart_y
= gen_realpart (submode
, y
);
2875 imagpart_x
= gen_imagpart (submode
, x
);
2876 imagpart_y
= gen_imagpart (submode
, y
);
2878 /* Show the output dies here. This is necessary for SUBREGs
2879 of pseudos since we cannot track their lifetimes correctly;
2880 hard regs shouldn't appear here except as return values.
2881 We never want to emit such a clobber after reload. */
2883 && ! (reload_in_progress
|| reload_completed
)
2884 && (GET_CODE (realpart_x
) == SUBREG
2885 || GET_CODE (imagpart_x
) == SUBREG
))
2887 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
2890 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2891 (realpart_x
, realpart_y
));
2892 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2893 (imagpart_x
, imagpart_y
));
2896 return get_last_insn ();
2899 /* This will handle any multi-word mode that lacks a move_insn pattern.
2900 However, you will get better code if you define such patterns,
2901 even if they must turn into multiple assembler instructions. */
2902 else if (GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
2908 #ifdef PUSH_ROUNDING
2910 /* If X is a push on the stack, do the push now and replace
2911 X with a reference to the stack pointer. */
2912 if (push_operand (x
, GET_MODE (x
)))
2914 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x
))));
2915 x
= change_address (x
, VOIDmode
, stack_pointer_rtx
);
2919 /* If we are in reload, see if either operand is a MEM whose address
2920 is scheduled for replacement. */
2921 if (reload_in_progress
&& GET_CODE (x
) == MEM
2922 && (inner
= find_replacement (&XEXP (x
, 0))) != XEXP (x
, 0))
2924 rtx
new = gen_rtx_MEM (GET_MODE (x
), inner
);
2926 MEM_COPY_ATTRIBUTES (new, x
);
2929 if (reload_in_progress
&& GET_CODE (y
) == MEM
2930 && (inner
= find_replacement (&XEXP (y
, 0))) != XEXP (y
, 0))
2932 rtx
new = gen_rtx_MEM (GET_MODE (y
), inner
);
2934 MEM_COPY_ATTRIBUTES (new, y
);
2942 i
< (GET_MODE_SIZE (mode
) + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
;
2945 rtx xpart
= operand_subword (x
, i
, 1, mode
);
2946 rtx ypart
= operand_subword (y
, i
, 1, mode
);
2948 /* If we can't get a part of Y, put Y into memory if it is a
2949 constant. Otherwise, force it into a register. If we still
2950 can't get a part of Y, abort. */
2951 if (ypart
== 0 && CONSTANT_P (y
))
2953 y
= force_const_mem (mode
, y
);
2954 ypart
= operand_subword (y
, i
, 1, mode
);
2956 else if (ypart
== 0)
2957 ypart
= operand_subword_force (y
, i
, mode
);
2959 if (xpart
== 0 || ypart
== 0)
2962 need_clobber
|= (GET_CODE (xpart
) == SUBREG
);
2964 last_insn
= emit_move_insn (xpart
, ypart
);
2967 seq
= gen_sequence ();
2970 /* Show the output dies here. This is necessary for SUBREGs
2971 of pseudos since we cannot track their lifetimes correctly;
2972 hard regs shouldn't appear here except as return values.
2973 We never want to emit such a clobber after reload. */
2975 && ! (reload_in_progress
|| reload_completed
)
2976 && need_clobber
!= 0)
2978 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
2989 /* Pushing data onto the stack. */
2991 /* Push a block of length SIZE (perhaps variable)
2992 and return an rtx to address the beginning of the block.
2993 Note that it is not possible for the value returned to be a QUEUED.
2994 The value may be virtual_outgoing_args_rtx.
2996 EXTRA is the number of bytes of padding to push in addition to SIZE.
2997 BELOW nonzero means this padding comes at low addresses;
2998 otherwise, the padding comes at high addresses. */
3001 push_block (size
, extra
, below
)
3007 size
= convert_modes (Pmode
, ptr_mode
, size
, 1);
3008 if (CONSTANT_P (size
))
3009 anti_adjust_stack (plus_constant (size
, extra
));
3010 else if (GET_CODE (size
) == REG
&& extra
== 0)
3011 anti_adjust_stack (size
);
3014 temp
= copy_to_mode_reg (Pmode
, size
);
3016 temp
= expand_binop (Pmode
, add_optab
, temp
, GEN_INT (extra
),
3017 temp
, 0, OPTAB_LIB_WIDEN
);
3018 anti_adjust_stack (temp
);
3021 #ifndef STACK_GROWS_DOWNWARD
3022 #ifdef ARGS_GROW_DOWNWARD
3023 if (!ACCUMULATE_OUTGOING_ARGS
)
3031 /* Return the lowest stack address when STACK or ARGS grow downward and
3032 we are not aaccumulating outgoing arguments (the c4x port uses such
3034 temp
= virtual_outgoing_args_rtx
;
3035 if (extra
!= 0 && below
)
3036 temp
= plus_constant (temp
, extra
);
3040 if (GET_CODE (size
) == CONST_INT
)
3041 temp
= plus_constant (virtual_outgoing_args_rtx
,
3042 -INTVAL (size
) - (below
? 0 : extra
));
3043 else if (extra
!= 0 && !below
)
3044 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3045 negate_rtx (Pmode
, plus_constant (size
, extra
)));
3047 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3048 negate_rtx (Pmode
, size
));
3051 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT
), temp
);
3057 return gen_rtx_fmt_e (STACK_PUSH_CODE
, Pmode
, stack_pointer_rtx
);
3060 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
3061 block of SIZE bytes. */
3064 get_push_address (size
)
3069 if (STACK_PUSH_CODE
== POST_DEC
)
3070 temp
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
, GEN_INT (size
));
3071 else if (STACK_PUSH_CODE
== POST_INC
)
3072 temp
= gen_rtx_MINUS (Pmode
, stack_pointer_rtx
, GEN_INT (size
));
3074 temp
= stack_pointer_rtx
;
3076 return copy_to_reg (temp
);
3079 /* Generate code to push X onto the stack, assuming it has mode MODE and
3081 MODE is redundant except when X is a CONST_INT (since they don't
3083 SIZE is an rtx for the size of data to be copied (in bytes),
3084 needed only if X is BLKmode.
3086 ALIGN (in bits) is maximum alignment we can assume.
3088 If PARTIAL and REG are both nonzero, then copy that many of the first
3089 words of X into registers starting with REG, and push the rest of X.
3090 The amount of space pushed is decreased by PARTIAL words,
3091 rounded *down* to a multiple of PARM_BOUNDARY.
3092 REG must be a hard register in this case.
3093 If REG is zero but PARTIAL is not, take any all others actions for an
3094 argument partially in registers, but do not actually load any
3097 EXTRA is the amount in bytes of extra space to leave next to this arg.
3098 This is ignored if an argument block has already been allocated.
3100 On a machine that lacks real push insns, ARGS_ADDR is the address of
3101 the bottom of the argument block for this call. We use indexing off there
3102 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3103 argument block has not been preallocated.
3105 ARGS_SO_FAR is the size of args previously pushed for this call.
3107 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3108 for arguments passed in registers. If nonzero, it will be the number
3109 of bytes required. */
3112 emit_push_insn (x
, mode
, type
, size
, align
, partial
, reg
, extra
,
3113 args_addr
, args_so_far
, reg_parm_stack_space
,
3116 enum machine_mode mode
;
3125 int reg_parm_stack_space
;
3129 enum direction stack_direction
3130 #ifdef STACK_GROWS_DOWNWARD
3136 /* Decide where to pad the argument: `downward' for below,
3137 `upward' for above, or `none' for don't pad it.
3138 Default is below for small data on big-endian machines; else above. */
3139 enum direction where_pad
= FUNCTION_ARG_PADDING (mode
, type
);
3141 /* Invert direction if stack is post-update. */
3142 if (STACK_PUSH_CODE
== POST_INC
|| STACK_PUSH_CODE
== POST_DEC
)
3143 if (where_pad
!= none
)
3144 where_pad
= (where_pad
== downward
? upward
: downward
);
3146 xinner
= x
= protect_from_queue (x
, 0);
3148 if (mode
== BLKmode
)
3150 /* Copy a block into the stack, entirely or partially. */
3153 int used
= partial
* UNITS_PER_WORD
;
3154 int offset
= used
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
3162 /* USED is now the # of bytes we need not copy to the stack
3163 because registers will take care of them. */
3166 xinner
= change_address (xinner
, BLKmode
,
3167 plus_constant (XEXP (xinner
, 0), used
));
3169 /* If the partial register-part of the arg counts in its stack size,
3170 skip the part of stack space corresponding to the registers.
3171 Otherwise, start copying to the beginning of the stack space,
3172 by setting SKIP to 0. */
3173 skip
= (reg_parm_stack_space
== 0) ? 0 : used
;
3175 #ifdef PUSH_ROUNDING
3176 /* Do it with several push insns if that doesn't take lots of insns
3177 and if there is no difficulty with push insns that skip bytes
3178 on the stack for alignment purposes. */
3181 && GET_CODE (size
) == CONST_INT
3183 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size
) - used
, align
))
3184 /* Here we avoid the case of a structure whose weak alignment
3185 forces many pushes of a small amount of data,
3186 and such small pushes do rounding that causes trouble. */
3187 && ((! SLOW_UNALIGNED_ACCESS (word_mode
, align
))
3188 || align
>= BIGGEST_ALIGNMENT
3189 || (PUSH_ROUNDING (align
/ BITS_PER_UNIT
)
3190 == (align
/ BITS_PER_UNIT
)))
3191 && PUSH_ROUNDING (INTVAL (size
)) == INTVAL (size
))
3193 /* Push padding now if padding above and stack grows down,
3194 or if padding below and stack grows up.
3195 But if space already allocated, this has already been done. */
3196 if (extra
&& args_addr
== 0
3197 && where_pad
!= none
&& where_pad
!= stack_direction
)
3198 anti_adjust_stack (GEN_INT (extra
));
3200 stack_pointer_delta
+= INTVAL (size
) - used
;
3201 move_by_pieces (gen_rtx_MEM (BLKmode
, gen_push_operand ()), xinner
,
3202 INTVAL (size
) - used
, align
);
3204 if (current_function_check_memory_usage
&& ! in_check_memory_usage
)
3208 in_check_memory_usage
= 1;
3209 temp
= get_push_address (INTVAL (size
) - used
);
3210 if (GET_CODE (x
) == MEM
&& type
&& AGGREGATE_TYPE_P (type
))
3211 emit_library_call (chkr_copy_bitmap_libfunc
,
3212 LCT_CONST_MAKE_BLOCK
, VOIDmode
, 3, temp
,
3213 Pmode
, XEXP (xinner
, 0), Pmode
,
3214 GEN_INT (INTVAL (size
) - used
),
3215 TYPE_MODE (sizetype
));
3217 emit_library_call (chkr_set_right_libfunc
,
3218 LCT_CONST_MAKE_BLOCK
, VOIDmode
, 3, temp
,
3219 Pmode
, GEN_INT (INTVAL (size
) - used
),
3220 TYPE_MODE (sizetype
),
3221 GEN_INT (MEMORY_USE_RW
),
3222 TYPE_MODE (integer_type_node
));
3223 in_check_memory_usage
= 0;
3227 #endif /* PUSH_ROUNDING */
3231 /* Otherwise make space on the stack and copy the data
3232 to the address of that space. */
3234 /* Deduct words put into registers from the size we must copy. */
3237 if (GET_CODE (size
) == CONST_INT
)
3238 size
= GEN_INT (INTVAL (size
) - used
);
3240 size
= expand_binop (GET_MODE (size
), sub_optab
, size
,
3241 GEN_INT (used
), NULL_RTX
, 0,
3245 /* Get the address of the stack space.
3246 In this case, we do not deal with EXTRA separately.
3247 A single stack adjust will do. */
3250 temp
= push_block (size
, extra
, where_pad
== downward
);
3253 else if (GET_CODE (args_so_far
) == CONST_INT
)
3254 temp
= memory_address (BLKmode
,
3255 plus_constant (args_addr
,
3256 skip
+ INTVAL (args_so_far
)));
3258 temp
= memory_address (BLKmode
,
3259 plus_constant (gen_rtx_PLUS (Pmode
,
3263 if (current_function_check_memory_usage
&& ! in_check_memory_usage
)
3265 in_check_memory_usage
= 1;
3266 target
= copy_to_reg (temp
);
3267 if (GET_CODE (x
) == MEM
&& type
&& AGGREGATE_TYPE_P (type
))
3268 emit_library_call (chkr_copy_bitmap_libfunc
,
3269 LCT_CONST_MAKE_BLOCK
, VOIDmode
, 3,
3271 XEXP (xinner
, 0), Pmode
,
3272 size
, TYPE_MODE (sizetype
));
3274 emit_library_call (chkr_set_right_libfunc
,
3275 LCT_CONST_MAKE_BLOCK
, VOIDmode
, 3,
3277 size
, TYPE_MODE (sizetype
),
3278 GEN_INT (MEMORY_USE_RW
),
3279 TYPE_MODE (integer_type_node
));
3280 in_check_memory_usage
= 0;
3283 target
= gen_rtx_MEM (BLKmode
, temp
);
3287 set_mem_attributes (target
, type
, 1);
3288 /* Function incoming arguments may overlap with sibling call
3289 outgoing arguments and we cannot allow reordering of reads
3290 from function arguments with stores to outgoing arguments
3291 of sibling calls. */
3292 MEM_ALIAS_SET (target
) = 0;
3295 /* TEMP is the address of the block. Copy the data there. */
3296 if (GET_CODE (size
) == CONST_INT
3297 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size
), align
))
3299 move_by_pieces (target
, xinner
, INTVAL (size
), align
);
3304 rtx opalign
= GEN_INT (align
/ BITS_PER_UNIT
);
3305 enum machine_mode mode
;
3307 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
3309 mode
= GET_MODE_WIDER_MODE (mode
))
3311 enum insn_code code
= movstr_optab
[(int) mode
];
3312 insn_operand_predicate_fn pred
;
3314 if (code
!= CODE_FOR_nothing
3315 && ((GET_CODE (size
) == CONST_INT
3316 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
3317 <= (GET_MODE_MASK (mode
) >> 1)))
3318 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
3319 && (!(pred
= insn_data
[(int) code
].operand
[0].predicate
)
3320 || ((*pred
) (target
, BLKmode
)))
3321 && (!(pred
= insn_data
[(int) code
].operand
[1].predicate
)
3322 || ((*pred
) (xinner
, BLKmode
)))
3323 && (!(pred
= insn_data
[(int) code
].operand
[3].predicate
)
3324 || ((*pred
) (opalign
, VOIDmode
))))
3326 rtx op2
= convert_to_mode (mode
, size
, 1);
3327 rtx last
= get_last_insn ();
3330 pred
= insn_data
[(int) code
].operand
[2].predicate
;
3331 if (pred
!= 0 && ! (*pred
) (op2
, mode
))
3332 op2
= copy_to_mode_reg (mode
, op2
);
3334 pat
= GEN_FCN ((int) code
) (target
, xinner
,
3342 delete_insns_since (last
);
3347 if (!ACCUMULATE_OUTGOING_ARGS
)
3349 /* If the source is referenced relative to the stack pointer,
3350 copy it to another register to stabilize it. We do not need
3351 to do this if we know that we won't be changing sp. */
3353 if (reg_mentioned_p (virtual_stack_dynamic_rtx
, temp
)
3354 || reg_mentioned_p (virtual_outgoing_args_rtx
, temp
))
3355 temp
= copy_to_reg (temp
);
3358 /* Make inhibit_defer_pop nonzero around the library call
3359 to force it to pop the bcopy-arguments right away. */
3361 #ifdef TARGET_MEM_FUNCTIONS
3362 emit_library_call (memcpy_libfunc
, LCT_NORMAL
,
3363 VOIDmode
, 3, temp
, Pmode
, XEXP (xinner
, 0), Pmode
,
3364 convert_to_mode (TYPE_MODE (sizetype
),
3365 size
, TREE_UNSIGNED (sizetype
)),
3366 TYPE_MODE (sizetype
));
3368 emit_library_call (bcopy_libfunc
, LCT_NORMAL
,
3369 VOIDmode
, 3, XEXP (xinner
, 0), Pmode
, temp
, Pmode
,
3370 convert_to_mode (TYPE_MODE (integer_type_node
),
3372 TREE_UNSIGNED (integer_type_node
)),
3373 TYPE_MODE (integer_type_node
));
3378 else if (partial
> 0)
3380 /* Scalar partly in registers. */
3382 int size
= GET_MODE_SIZE (mode
) / UNITS_PER_WORD
;
3385 /* # words of start of argument
3386 that we must make space for but need not store. */
3387 int offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_WORD
);
3388 int args_offset
= INTVAL (args_so_far
);
3391 /* Push padding now if padding above and stack grows down,
3392 or if padding below and stack grows up.
3393 But if space already allocated, this has already been done. */
3394 if (extra
&& args_addr
== 0
3395 && where_pad
!= none
&& where_pad
!= stack_direction
)
3396 anti_adjust_stack (GEN_INT (extra
));
3398 /* If we make space by pushing it, we might as well push
3399 the real data. Otherwise, we can leave OFFSET nonzero
3400 and leave the space uninitialized. */
3404 /* Now NOT_STACK gets the number of words that we don't need to
3405 allocate on the stack. */
3406 not_stack
= partial
- offset
;
3408 /* If the partial register-part of the arg counts in its stack size,
3409 skip the part of stack space corresponding to the registers.
3410 Otherwise, start copying to the beginning of the stack space,
3411 by setting SKIP to 0. */
3412 skip
= (reg_parm_stack_space
== 0) ? 0 : not_stack
;
3414 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
3415 x
= validize_mem (force_const_mem (mode
, x
));
3417 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3418 SUBREGs of such registers are not allowed. */
3419 if ((GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
3420 && GET_MODE_CLASS (GET_MODE (x
)) != MODE_INT
))
3421 x
= copy_to_reg (x
);
3423 /* Loop over all the words allocated on the stack for this arg. */
3424 /* We can do it by words, because any scalar bigger than a word
3425 has a size a multiple of a word. */
3426 #ifndef PUSH_ARGS_REVERSED
3427 for (i
= not_stack
; i
< size
; i
++)
3429 for (i
= size
- 1; i
>= not_stack
; i
--)
3431 if (i
>= not_stack
+ offset
)
3432 emit_push_insn (operand_subword_force (x
, i
, mode
),
3433 word_mode
, NULL_TREE
, NULL_RTX
, align
, 0, NULL_RTX
,
3435 GEN_INT (args_offset
+ ((i
- not_stack
+ skip
)
3437 reg_parm_stack_space
, alignment_pad
);
3442 rtx target
= NULL_RTX
;
3445 /* Push padding now if padding above and stack grows down,
3446 or if padding below and stack grows up.
3447 But if space already allocated, this has already been done. */
3448 if (extra
&& args_addr
== 0
3449 && where_pad
!= none
&& where_pad
!= stack_direction
)
3450 anti_adjust_stack (GEN_INT (extra
));
3452 #ifdef PUSH_ROUNDING
3453 if (args_addr
== 0 && PUSH_ARGS
)
3455 addr
= gen_push_operand ();
3456 stack_pointer_delta
+= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3461 if (GET_CODE (args_so_far
) == CONST_INT
)
3463 = memory_address (mode
,
3464 plus_constant (args_addr
,
3465 INTVAL (args_so_far
)));
3467 addr
= memory_address (mode
, gen_rtx_PLUS (Pmode
, args_addr
,
3472 dest
= gen_rtx_MEM (mode
, addr
);
3475 set_mem_attributes (dest
, type
, 1);
3476 /* Function incoming arguments may overlap with sibling call
3477 outgoing arguments and we cannot allow reordering of reads
3478 from function arguments with stores to outgoing arguments
3479 of sibling calls. */
3480 MEM_ALIAS_SET (dest
) = 0;
3483 emit_move_insn (dest
, x
);
3485 if (current_function_check_memory_usage
&& ! in_check_memory_usage
)
3487 in_check_memory_usage
= 1;
3489 target
= get_push_address (GET_MODE_SIZE (mode
));
3491 if (GET_CODE (x
) == MEM
&& type
&& AGGREGATE_TYPE_P (type
))
3492 emit_library_call (chkr_copy_bitmap_libfunc
,
3493 LCT_CONST_MAKE_BLOCK
, VOIDmode
, 3, target
,
3494 Pmode
, XEXP (x
, 0), Pmode
,
3495 GEN_INT (GET_MODE_SIZE (mode
)),
3496 TYPE_MODE (sizetype
));
3498 emit_library_call (chkr_set_right_libfunc
,
3499 LCT_CONST_MAKE_BLOCK
, VOIDmode
, 3, target
,
3500 Pmode
, GEN_INT (GET_MODE_SIZE (mode
)),
3501 TYPE_MODE (sizetype
),
3502 GEN_INT (MEMORY_USE_RW
),
3503 TYPE_MODE (integer_type_node
));
3504 in_check_memory_usage
= 0;
3509 /* If part should go in registers, copy that part
3510 into the appropriate registers. Do this now, at the end,
3511 since mem-to-mem copies above may do function calls. */
3512 if (partial
> 0 && reg
!= 0)
3514 /* Handle calls that pass values in multiple non-contiguous locations.
3515 The Irix 6 ABI has examples of this. */
3516 if (GET_CODE (reg
) == PARALLEL
)
3517 emit_group_load (reg
, x
, -1, align
); /* ??? size? */
3519 move_block_to_reg (REGNO (reg
), x
, partial
, mode
);
3522 if (extra
&& args_addr
== 0 && where_pad
== stack_direction
)
3523 anti_adjust_stack (GEN_INT (extra
));
3525 if (alignment_pad
&& args_addr
== 0)
3526 anti_adjust_stack (alignment_pad
);
3529 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3537 /* Only registers can be subtargets. */
3538 || GET_CODE (x
) != REG
3539 /* If the register is readonly, it can't be set more than once. */
3540 || RTX_UNCHANGING_P (x
)
3541 /* Don't use hard regs to avoid extending their life. */
3542 || REGNO (x
) < FIRST_PSEUDO_REGISTER
3543 /* Avoid subtargets inside loops,
3544 since they hide some invariant expressions. */
3545 || preserve_subexpressions_p ())
3549 /* Expand an assignment that stores the value of FROM into TO.
3550 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3551 (This may contain a QUEUED rtx;
3552 if the value is constant, this rtx is a constant.)
3553 Otherwise, the returned value is NULL_RTX.
3555 SUGGEST_REG is no longer actually used.
3556 It used to mean, copy the value through a register
3557 and return that register, if that is possible.
3558 We now use WANT_VALUE to decide whether to do this. */
3561 expand_assignment (to
, from
, want_value
, suggest_reg
)
3564 int suggest_reg ATTRIBUTE_UNUSED
;
3566 register rtx to_rtx
= 0;
3569 /* Don't crash if the lhs of the assignment was erroneous. */
3571 if (TREE_CODE (to
) == ERROR_MARK
)
3573 result
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
3574 return want_value
? result
: NULL_RTX
;
3577 /* Assignment of a structure component needs special treatment
3578 if the structure component's rtx is not simply a MEM.
3579 Assignment of an array element at a constant index, and assignment of
3580 an array element in an unaligned packed structure field, has the same
3583 if (TREE_CODE (to
) == COMPONENT_REF
|| TREE_CODE (to
) == BIT_FIELD_REF
3584 || TREE_CODE (to
) == ARRAY_REF
)
3586 enum machine_mode mode1
;
3587 HOST_WIDE_INT bitsize
, bitpos
;
3592 unsigned int alignment
;
3595 tem
= get_inner_reference (to
, &bitsize
, &bitpos
, &offset
, &mode1
,
3596 &unsignedp
, &volatilep
, &alignment
);
3598 /* If we are going to use store_bit_field and extract_bit_field,
3599 make sure to_rtx will be safe for multiple use. */
3601 if (mode1
== VOIDmode
&& want_value
)
3602 tem
= stabilize_reference (tem
);
3604 to_rtx
= expand_expr (tem
, NULL_RTX
, VOIDmode
, EXPAND_MEMORY_USE_DONT
);
3607 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
3609 if (GET_CODE (to_rtx
) != MEM
)
3612 if (GET_MODE (offset_rtx
) != ptr_mode
)
3614 #ifdef POINTERS_EXTEND_UNSIGNED
3615 offset_rtx
= convert_memory_address (ptr_mode
, offset_rtx
);
3617 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
3621 /* A constant address in TO_RTX can have VOIDmode, we must not try
3622 to call force_reg for that case. Avoid that case. */
3623 if (GET_CODE (to_rtx
) == MEM
3624 && GET_MODE (to_rtx
) == BLKmode
3625 && GET_MODE (XEXP (to_rtx
, 0)) != VOIDmode
3627 && (bitpos
% bitsize
) == 0
3628 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
3629 && alignment
== GET_MODE_ALIGNMENT (mode1
))
3631 rtx temp
= change_address (to_rtx
, mode1
,
3632 plus_constant (XEXP (to_rtx
, 0),
3635 if (GET_CODE (XEXP (temp
, 0)) == REG
)
3638 to_rtx
= change_address (to_rtx
, mode1
,
3639 force_reg (GET_MODE (XEXP (temp
, 0)),
3644 to_rtx
= change_address (to_rtx
, VOIDmode
,
3645 gen_rtx_PLUS (ptr_mode
, XEXP (to_rtx
, 0),
3646 force_reg (ptr_mode
,
3652 if (GET_CODE (to_rtx
) == MEM
)
3654 /* When the offset is zero, to_rtx is the address of the
3655 structure we are storing into, and hence may be shared.
3656 We must make a new MEM before setting the volatile bit. */
3658 to_rtx
= copy_rtx (to_rtx
);
3660 MEM_VOLATILE_P (to_rtx
) = 1;
3662 #if 0 /* This was turned off because, when a field is volatile
3663 in an object which is not volatile, the object may be in a register,
3664 and then we would abort over here. */
3670 if (TREE_CODE (to
) == COMPONENT_REF
3671 && TREE_READONLY (TREE_OPERAND (to
, 1)))
3674 to_rtx
= copy_rtx (to_rtx
);
3676 RTX_UNCHANGING_P (to_rtx
) = 1;
3679 /* Check the access. */
3680 if (current_function_check_memory_usage
&& GET_CODE (to_rtx
) == MEM
)
3685 enum machine_mode best_mode
;
3687 best_mode
= get_best_mode (bitsize
, bitpos
,
3688 TYPE_ALIGN (TREE_TYPE (tem
)),
3690 if (best_mode
== VOIDmode
)
3693 best_mode_size
= GET_MODE_BITSIZE (best_mode
);
3694 to_addr
= plus_constant (XEXP (to_rtx
, 0), (bitpos
/ BITS_PER_UNIT
));
3695 size
= CEIL ((bitpos
% best_mode_size
) + bitsize
, best_mode_size
);
3696 size
*= GET_MODE_SIZE (best_mode
);
3698 /* Check the access right of the pointer. */
3699 in_check_memory_usage
= 1;
3701 emit_library_call (chkr_check_addr_libfunc
, LCT_CONST_MAKE_BLOCK
,
3702 VOIDmode
, 3, to_addr
, Pmode
,
3703 GEN_INT (size
), TYPE_MODE (sizetype
),
3704 GEN_INT (MEMORY_USE_WO
),
3705 TYPE_MODE (integer_type_node
));
3706 in_check_memory_usage
= 0;
3709 /* If this is a varying-length object, we must get the address of
3710 the source and do an explicit block move. */
3713 unsigned int from_align
;
3714 rtx from_rtx
= expand_expr_unaligned (from
, &from_align
);
3716 = change_address (to_rtx
, VOIDmode
,
3717 plus_constant (XEXP (to_rtx
, 0),
3718 bitpos
/ BITS_PER_UNIT
));
3720 emit_block_move (inner_to_rtx
, from_rtx
, expr_size (from
),
3721 MIN (alignment
, from_align
));
3728 result
= store_field (to_rtx
, bitsize
, bitpos
, mode1
, from
,
3730 /* Spurious cast for HPUX compiler. */
3731 ? ((enum machine_mode
)
3732 TYPE_MODE (TREE_TYPE (to
)))
3736 int_size_in_bytes (TREE_TYPE (tem
)),
3737 get_alias_set (to
));
3739 preserve_temp_slots (result
);
3743 /* If the value is meaningful, convert RESULT to the proper mode.
3744 Otherwise, return nothing. */
3745 return (want_value
? convert_modes (TYPE_MODE (TREE_TYPE (to
)),
3746 TYPE_MODE (TREE_TYPE (from
)),
3748 TREE_UNSIGNED (TREE_TYPE (to
)))
3753 /* If the rhs is a function call and its value is not an aggregate,
3754 call the function before we start to compute the lhs.
3755 This is needed for correct code for cases such as
3756 val = setjmp (buf) on machines where reference to val
3757 requires loading up part of an address in a separate insn.
3759 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3760 since it might be a promoted variable where the zero- or sign- extension
3761 needs to be done. Handling this in the normal way is safe because no
3762 computation is done before the call. */
3763 if (TREE_CODE (from
) == CALL_EXPR
&& ! aggregate_value_p (from
)
3764 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from
))) == INTEGER_CST
3765 && ! ((TREE_CODE (to
) == VAR_DECL
|| TREE_CODE (to
) == PARM_DECL
)
3766 && GET_CODE (DECL_RTL (to
)) == REG
))
3771 value
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
3773 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_MEMORY_USE_WO
);
3775 /* Handle calls that return values in multiple non-contiguous locations.
3776 The Irix 6 ABI has examples of this. */
3777 if (GET_CODE (to_rtx
) == PARALLEL
)
3778 emit_group_load (to_rtx
, value
, int_size_in_bytes (TREE_TYPE (from
)),
3779 TYPE_ALIGN (TREE_TYPE (from
)));
3780 else if (GET_MODE (to_rtx
) == BLKmode
)
3781 emit_block_move (to_rtx
, value
, expr_size (from
),
3782 TYPE_ALIGN (TREE_TYPE (from
)));
3785 #ifdef POINTERS_EXTEND_UNSIGNED
3786 if (TREE_CODE (TREE_TYPE (to
)) == REFERENCE_TYPE
3787 || TREE_CODE (TREE_TYPE (to
)) == POINTER_TYPE
)
3788 value
= convert_memory_address (GET_MODE (to_rtx
), value
);
3790 emit_move_insn (to_rtx
, value
);
3792 preserve_temp_slots (to_rtx
);
3795 return want_value
? to_rtx
: NULL_RTX
;
3798 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3799 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3803 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_MEMORY_USE_WO
);
3804 if (GET_CODE (to_rtx
) == MEM
)
3805 MEM_ALIAS_SET (to_rtx
) = get_alias_set (to
);
3808 /* Don't move directly into a return register. */
3809 if (TREE_CODE (to
) == RESULT_DECL
3810 && (GET_CODE (to_rtx
) == REG
|| GET_CODE (to_rtx
) == PARALLEL
))
3815 temp
= expand_expr (from
, 0, GET_MODE (to_rtx
), 0);
3817 if (GET_CODE (to_rtx
) == PARALLEL
)
3818 emit_group_load (to_rtx
, temp
, int_size_in_bytes (TREE_TYPE (from
)),
3819 TYPE_ALIGN (TREE_TYPE (from
)));
3821 emit_move_insn (to_rtx
, temp
);
3823 preserve_temp_slots (to_rtx
);
3826 return want_value
? to_rtx
: NULL_RTX
;
3829 /* In case we are returning the contents of an object which overlaps
3830 the place the value is being stored, use a safe function when copying
3831 a value through a pointer into a structure value return block. */
3832 if (TREE_CODE (to
) == RESULT_DECL
&& TREE_CODE (from
) == INDIRECT_REF
3833 && current_function_returns_struct
3834 && !current_function_returns_pcc_struct
)
3839 size
= expr_size (from
);
3840 from_rtx
= expand_expr (from
, NULL_RTX
, VOIDmode
,
3841 EXPAND_MEMORY_USE_DONT
);
3843 /* Copy the rights of the bitmap. */
3844 if (current_function_check_memory_usage
)
3845 emit_library_call (chkr_copy_bitmap_libfunc
, LCT_CONST_MAKE_BLOCK
,
3846 VOIDmode
, 3, XEXP (to_rtx
, 0), Pmode
,
3847 XEXP (from_rtx
, 0), Pmode
,
3848 convert_to_mode (TYPE_MODE (sizetype
),
3849 size
, TREE_UNSIGNED (sizetype
)),
3850 TYPE_MODE (sizetype
));
3852 #ifdef TARGET_MEM_FUNCTIONS
3853 emit_library_call (memmove_libfunc
, LCT_NORMAL
,
3854 VOIDmode
, 3, XEXP (to_rtx
, 0), Pmode
,
3855 XEXP (from_rtx
, 0), Pmode
,
3856 convert_to_mode (TYPE_MODE (sizetype
),
3857 size
, TREE_UNSIGNED (sizetype
)),
3858 TYPE_MODE (sizetype
));
3860 emit_library_call (bcopy_libfunc
, LCT_NORMAL
,
3861 VOIDmode
, 3, XEXP (from_rtx
, 0), Pmode
,
3862 XEXP (to_rtx
, 0), Pmode
,
3863 convert_to_mode (TYPE_MODE (integer_type_node
),
3864 size
, TREE_UNSIGNED (integer_type_node
)),
3865 TYPE_MODE (integer_type_node
));
3868 preserve_temp_slots (to_rtx
);
3871 return want_value
? to_rtx
: NULL_RTX
;
3874 /* Compute FROM and store the value in the rtx we got. */
3877 result
= store_expr (from
, to_rtx
, want_value
);
3878 preserve_temp_slots (result
);
3881 return want_value
? result
: NULL_RTX
;
3884 /* Generate code for computing expression EXP,
3885 and storing the value into TARGET.
3886 TARGET may contain a QUEUED rtx.
3888 If WANT_VALUE is nonzero, return a copy of the value
3889 not in TARGET, so that we can be sure to use the proper
3890 value in a containing expression even if TARGET has something
3891 else stored in it. If possible, we copy the value through a pseudo
3892 and return that pseudo. Or, if the value is constant, we try to
3893 return the constant. In some cases, we return a pseudo
3894 copied *from* TARGET.
3896 If the mode is BLKmode then we may return TARGET itself.
3897 It turns out that in BLKmode it doesn't cause a problem.
3898 because C has no operators that could combine two different
3899 assignments into the same BLKmode object with different values
3900 with no sequence point. Will other languages need this to
3903 If WANT_VALUE is 0, we return NULL, to make sure
3904 to catch quickly any cases where the caller uses the value
3905 and fails to set WANT_VALUE. */
3908 store_expr (exp
, target
, want_value
)
3910 register rtx target
;
3914 int dont_return_target
= 0;
3916 if (TREE_CODE (exp
) == COMPOUND_EXPR
)
3918 /* Perform first part of compound expression, then assign from second
3920 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
3922 return store_expr (TREE_OPERAND (exp
, 1), target
, want_value
);
3924 else if (TREE_CODE (exp
) == COND_EXPR
&& GET_MODE (target
) == BLKmode
)
3926 /* For conditional expression, get safe form of the target. Then
3927 test the condition, doing the appropriate assignment on either
3928 side. This avoids the creation of unnecessary temporaries.
3929 For non-BLKmode, it is more efficient not to do this. */
3931 rtx lab1
= gen_label_rtx (), lab2
= gen_label_rtx ();
3934 target
= protect_from_queue (target
, 1);
3936 do_pending_stack_adjust ();
3938 jumpifnot (TREE_OPERAND (exp
, 0), lab1
);
3939 start_cleanup_deferral ();
3940 store_expr (TREE_OPERAND (exp
, 1), target
, 0);
3941 end_cleanup_deferral ();
3943 emit_jump_insn (gen_jump (lab2
));
3946 start_cleanup_deferral ();
3947 store_expr (TREE_OPERAND (exp
, 2), target
, 0);
3948 end_cleanup_deferral ();
3953 return want_value
? target
: NULL_RTX
;
3955 else if (queued_subexp_p (target
))
3956 /* If target contains a postincrement, let's not risk
3957 using it as the place to generate the rhs. */
3959 if (GET_MODE (target
) != BLKmode
&& GET_MODE (target
) != VOIDmode
)
3961 /* Expand EXP into a new pseudo. */
3962 temp
= gen_reg_rtx (GET_MODE (target
));
3963 temp
= expand_expr (exp
, temp
, GET_MODE (target
), 0);
3966 temp
= expand_expr (exp
, NULL_RTX
, GET_MODE (target
), 0);
3968 /* If target is volatile, ANSI requires accessing the value
3969 *from* the target, if it is accessed. So make that happen.
3970 In no case return the target itself. */
3971 if (! MEM_VOLATILE_P (target
) && want_value
)
3972 dont_return_target
= 1;
3974 else if (want_value
&& GET_CODE (target
) == MEM
&& ! MEM_VOLATILE_P (target
)
3975 && GET_MODE (target
) != BLKmode
)
3976 /* If target is in memory and caller wants value in a register instead,
3977 arrange that. Pass TARGET as target for expand_expr so that,
3978 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3979 We know expand_expr will not use the target in that case.
3980 Don't do this if TARGET is volatile because we are supposed
3981 to write it and then read it. */
3983 temp
= expand_expr (exp
, target
, GET_MODE (target
), 0);
3984 if (GET_MODE (temp
) != BLKmode
&& GET_MODE (temp
) != VOIDmode
)
3985 temp
= copy_to_reg (temp
);
3986 dont_return_target
= 1;
3988 else if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
3989 /* If this is an scalar in a register that is stored in a wider mode
3990 than the declared mode, compute the result into its declared mode
3991 and then convert to the wider mode. Our value is the computed
3994 /* If we don't want a value, we can do the conversion inside EXP,
3995 which will often result in some optimizations. Do the conversion
3996 in two steps: first change the signedness, if needed, then
3997 the extend. But don't do this if the type of EXP is a subtype
3998 of something else since then the conversion might involve
3999 more than just converting modes. */
4000 if (! want_value
&& INTEGRAL_TYPE_P (TREE_TYPE (exp
))
4001 && TREE_TYPE (TREE_TYPE (exp
)) == 0)
4003 if (TREE_UNSIGNED (TREE_TYPE (exp
))
4004 != SUBREG_PROMOTED_UNSIGNED_P (target
))
4007 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target
),
4011 exp
= convert (type_for_mode (GET_MODE (SUBREG_REG (target
)),
4012 SUBREG_PROMOTED_UNSIGNED_P (target
)),
4016 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
4018 /* If TEMP is a volatile MEM and we want a result value, make
4019 the access now so it gets done only once. Likewise if
4020 it contains TARGET. */
4021 if (GET_CODE (temp
) == MEM
&& want_value
4022 && (MEM_VOLATILE_P (temp
)
4023 || reg_mentioned_p (SUBREG_REG (target
), XEXP (temp
, 0))))
4024 temp
= copy_to_reg (temp
);
4026 /* If TEMP is a VOIDmode constant, use convert_modes to make
4027 sure that we properly convert it. */
4028 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
)
4029 temp
= convert_modes (GET_MODE (SUBREG_REG (target
)),
4030 TYPE_MODE (TREE_TYPE (exp
)), temp
,
4031 SUBREG_PROMOTED_UNSIGNED_P (target
));
4033 convert_move (SUBREG_REG (target
), temp
,
4034 SUBREG_PROMOTED_UNSIGNED_P (target
));
4036 /* If we promoted a constant, change the mode back down to match
4037 target. Otherwise, the caller might get confused by a result whose
4038 mode is larger than expected. */
4040 if (want_value
&& GET_MODE (temp
) != GET_MODE (target
)
4041 && GET_MODE (temp
) != VOIDmode
)
4043 temp
= gen_rtx_SUBREG (GET_MODE (target
), temp
, 0);
4044 SUBREG_PROMOTED_VAR_P (temp
) = 1;
4045 SUBREG_PROMOTED_UNSIGNED_P (temp
)
4046 = SUBREG_PROMOTED_UNSIGNED_P (target
);
4049 return want_value
? temp
: NULL_RTX
;
4053 temp
= expand_expr (exp
, target
, GET_MODE (target
), 0);
4054 /* Return TARGET if it's a specified hardware register.
4055 If TARGET is a volatile mem ref, either return TARGET
4056 or return a reg copied *from* TARGET; ANSI requires this.
4058 Otherwise, if TEMP is not TARGET, return TEMP
4059 if it is constant (for efficiency),
4060 or if we really want the correct value. */
4061 if (!(target
&& GET_CODE (target
) == REG
4062 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)
4063 && !(GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
))
4064 && ! rtx_equal_p (temp
, target
)
4065 && (CONSTANT_P (temp
) || want_value
))
4066 dont_return_target
= 1;
4069 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4070 the same as that of TARGET, adjust the constant. This is needed, for
4071 example, in case it is a CONST_DOUBLE and we want only a word-sized
4073 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
4074 && TREE_CODE (exp
) != ERROR_MARK
4075 && GET_MODE (target
) != TYPE_MODE (TREE_TYPE (exp
)))
4076 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
4077 temp
, TREE_UNSIGNED (TREE_TYPE (exp
)));
4079 if (current_function_check_memory_usage
4080 && GET_CODE (target
) == MEM
4081 && AGGREGATE_TYPE_P (TREE_TYPE (exp
)))
4083 in_check_memory_usage
= 1;
4084 if (GET_CODE (temp
) == MEM
)
4085 emit_library_call (chkr_copy_bitmap_libfunc
, LCT_CONST_MAKE_BLOCK
,
4086 VOIDmode
, 3, XEXP (target
, 0), Pmode
,
4087 XEXP (temp
, 0), Pmode
,
4088 expr_size (exp
), TYPE_MODE (sizetype
));
4090 emit_library_call (chkr_check_addr_libfunc
, LCT_CONST_MAKE_BLOCK
,
4091 VOIDmode
, 3, XEXP (target
, 0), Pmode
,
4092 expr_size (exp
), TYPE_MODE (sizetype
),
4093 GEN_INT (MEMORY_USE_WO
),
4094 TYPE_MODE (integer_type_node
));
4095 in_check_memory_usage
= 0;
4098 /* If value was not generated in the target, store it there.
4099 Convert the value to TARGET's type first if nec. */
4100 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
4101 one or both of them are volatile memory refs, we have to distinguish
4103 - expand_expr has used TARGET. In this case, we must not generate
4104 another copy. This can be detected by TARGET being equal according
4106 - expand_expr has not used TARGET - that means that the source just
4107 happens to have the same RTX form. Since temp will have been created
4108 by expand_expr, it will compare unequal according to == .
4109 We must generate a copy in this case, to reach the correct number
4110 of volatile memory references. */
4112 if ((! rtx_equal_p (temp
, target
)
4113 || (temp
!= target
&& (side_effects_p (temp
)
4114 || side_effects_p (target
))))
4115 && TREE_CODE (exp
) != ERROR_MARK
)
4117 target
= protect_from_queue (target
, 1);
4118 if (GET_MODE (temp
) != GET_MODE (target
)
4119 && GET_MODE (temp
) != VOIDmode
)
4121 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (exp
));
4122 if (dont_return_target
)
4124 /* In this case, we will return TEMP,
4125 so make sure it has the proper mode.
4126 But don't forget to store the value into TARGET. */
4127 temp
= convert_to_mode (GET_MODE (target
), temp
, unsignedp
);
4128 emit_move_insn (target
, temp
);
4131 convert_move (target
, temp
, unsignedp
);
4134 else if (GET_MODE (temp
) == BLKmode
&& TREE_CODE (exp
) == STRING_CST
)
4136 /* Handle copying a string constant into an array.
4137 The string constant may be shorter than the array.
4138 So copy just the string's actual length, and clear the rest. */
4142 /* Get the size of the data type of the string,
4143 which is actually the size of the target. */
4144 size
= expr_size (exp
);
4145 if (GET_CODE (size
) == CONST_INT
4146 && INTVAL (size
) < TREE_STRING_LENGTH (exp
))
4147 emit_block_move (target
, temp
, size
, TYPE_ALIGN (TREE_TYPE (exp
)));
4150 /* Compute the size of the data to copy from the string. */
4152 = size_binop (MIN_EXPR
,
4153 make_tree (sizetype
, size
),
4154 size_int (TREE_STRING_LENGTH (exp
)));
4155 unsigned int align
= TYPE_ALIGN (TREE_TYPE (exp
));
4156 rtx copy_size_rtx
= expand_expr (copy_size
, NULL_RTX
,
4160 /* Copy that much. */
4161 emit_block_move (target
, temp
, copy_size_rtx
,
4162 TYPE_ALIGN (TREE_TYPE (exp
)));
4164 /* Figure out how much is left in TARGET that we have to clear.
4165 Do all calculations in ptr_mode. */
4167 addr
= XEXP (target
, 0);
4168 addr
= convert_modes (ptr_mode
, Pmode
, addr
, 1);
4170 if (GET_CODE (copy_size_rtx
) == CONST_INT
)
4172 addr
= plus_constant (addr
, TREE_STRING_LENGTH (exp
));
4173 size
= plus_constant (size
, -TREE_STRING_LENGTH (exp
));
4175 (unsigned int) (BITS_PER_UNIT
4176 * (INTVAL (copy_size_rtx
)
4177 & - INTVAL (copy_size_rtx
))));
4181 addr
= force_reg (ptr_mode
, addr
);
4182 addr
= expand_binop (ptr_mode
, add_optab
, addr
,
4183 copy_size_rtx
, NULL_RTX
, 0,
4186 size
= expand_binop (ptr_mode
, sub_optab
, size
,
4187 copy_size_rtx
, NULL_RTX
, 0,
4190 align
= BITS_PER_UNIT
;
4191 label
= gen_label_rtx ();
4192 emit_cmp_and_jump_insns (size
, const0_rtx
, LT
, NULL_RTX
,
4193 GET_MODE (size
), 0, 0, label
);
4195 align
= MIN (align
, expr_align (copy_size
));
4197 if (size
!= const0_rtx
)
4199 rtx dest
= gen_rtx_MEM (BLKmode
, addr
);
4201 MEM_COPY_ATTRIBUTES (dest
, target
);
4203 /* Be sure we can write on ADDR. */
4204 in_check_memory_usage
= 1;
4205 if (current_function_check_memory_usage
)
4206 emit_library_call (chkr_check_addr_libfunc
,
4207 LCT_CONST_MAKE_BLOCK
, VOIDmode
, 3,
4209 size
, TYPE_MODE (sizetype
),
4210 GEN_INT (MEMORY_USE_WO
),
4211 TYPE_MODE (integer_type_node
));
4212 in_check_memory_usage
= 0;
4213 clear_storage (dest
, size
, align
);
4220 /* Handle calls that return values in multiple non-contiguous locations.
4221 The Irix 6 ABI has examples of this. */
4222 else if (GET_CODE (target
) == PARALLEL
)
4223 emit_group_load (target
, temp
, int_size_in_bytes (TREE_TYPE (exp
)),
4224 TYPE_ALIGN (TREE_TYPE (exp
)));
4225 else if (GET_MODE (temp
) == BLKmode
)
4226 emit_block_move (target
, temp
, expr_size (exp
),
4227 TYPE_ALIGN (TREE_TYPE (exp
)));
4229 emit_move_insn (target
, temp
);
4232 /* If we don't want a value, return NULL_RTX. */
4236 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4237 ??? The latter test doesn't seem to make sense. */
4238 else if (dont_return_target
&& GET_CODE (temp
) != MEM
)
4241 /* Return TARGET itself if it is a hard register. */
4242 else if (want_value
&& GET_MODE (target
) != BLKmode
4243 && ! (GET_CODE (target
) == REG
4244 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
4245 return copy_to_reg (target
);
4251 /* Return 1 if EXP just contains zeros. */
4259 switch (TREE_CODE (exp
))
4263 case NON_LVALUE_EXPR
:
4264 return is_zeros_p (TREE_OPERAND (exp
, 0));
4267 return integer_zerop (exp
);
4271 is_zeros_p (TREE_REALPART (exp
)) && is_zeros_p (TREE_IMAGPART (exp
));
4274 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp
), dconst0
);
4277 if (TREE_TYPE (exp
) && TREE_CODE (TREE_TYPE (exp
)) == SET_TYPE
)
4278 return CONSTRUCTOR_ELTS (exp
) == NULL_TREE
;
4279 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
4280 if (! is_zeros_p (TREE_VALUE (elt
)))
4290 /* Return 1 if EXP contains mostly (3/4) zeros. */
4293 mostly_zeros_p (exp
)
4296 if (TREE_CODE (exp
) == CONSTRUCTOR
)
4298 int elts
= 0, zeros
= 0;
4299 tree elt
= CONSTRUCTOR_ELTS (exp
);
4300 if (TREE_TYPE (exp
) && TREE_CODE (TREE_TYPE (exp
)) == SET_TYPE
)
4302 /* If there are no ranges of true bits, it is all zero. */
4303 return elt
== NULL_TREE
;
4305 for (; elt
; elt
= TREE_CHAIN (elt
))
4307 /* We do not handle the case where the index is a RANGE_EXPR,
4308 so the statistic will be somewhat inaccurate.
4309 We do make a more accurate count in store_constructor itself,
4310 so since this function is only used for nested array elements,
4311 this should be close enough. */
4312 if (mostly_zeros_p (TREE_VALUE (elt
)))
4317 return 4 * zeros
>= 3 * elts
;
4320 return is_zeros_p (exp
);
4323 /* Helper function for store_constructor.
4324 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4325 TYPE is the type of the CONSTRUCTOR, not the element type.
4326 ALIGN and CLEARED are as for store_constructor.
4327 ALIAS_SET is the alias set to use for any stores.
4329 This provides a recursive shortcut back to store_constructor when it isn't
4330 necessary to go through store_field. This is so that we can pass through
4331 the cleared field to let store_constructor know that we may not have to
4332 clear a substructure if the outer structure has already been cleared. */
4335 store_constructor_field (target
, bitsize
, bitpos
,
4336 mode
, exp
, type
, align
, cleared
, alias_set
)
4338 unsigned HOST_WIDE_INT bitsize
;
4339 HOST_WIDE_INT bitpos
;
4340 enum machine_mode mode
;
4346 if (TREE_CODE (exp
) == CONSTRUCTOR
4347 && bitpos
% BITS_PER_UNIT
== 0
4348 /* If we have a non-zero bitpos for a register target, then we just
4349 let store_field do the bitfield handling. This is unlikely to
4350 generate unnecessary clear instructions anyways. */
4351 && (bitpos
== 0 || GET_CODE (target
) == MEM
))
4355 = change_address (target
,
4356 GET_MODE (target
) == BLKmode
4358 % GET_MODE_ALIGNMENT (GET_MODE (target
)))
4359 ? BLKmode
: VOIDmode
,
4360 plus_constant (XEXP (target
, 0),
4361 bitpos
/ BITS_PER_UNIT
));
4364 /* Show the alignment may no longer be what it was and update the alias
4365 set, if required. */
4367 align
= MIN (align
, (unsigned int) bitpos
& - bitpos
);
4368 if (GET_CODE (target
) == MEM
)
4369 MEM_ALIAS_SET (target
) = alias_set
;
4371 store_constructor (exp
, target
, align
, cleared
, bitsize
/ BITS_PER_UNIT
);
4374 store_field (target
, bitsize
, bitpos
, mode
, exp
, VOIDmode
, 0, align
,
4375 int_size_in_bytes (type
), alias_set
);
4378 /* Store the value of constructor EXP into the rtx TARGET.
4379 TARGET is either a REG or a MEM.
4380 ALIGN is the maximum known alignment for TARGET.
4381 CLEARED is true if TARGET is known to have been zero'd.
4382 SIZE is the number of bytes of TARGET we are allowed to modify: this
4383 may not be the same as the size of EXP if we are assigning to a field
4384 which has been packed to exclude padding bits. */
4387 store_constructor (exp
, target
, align
, cleared
, size
)
4394 tree type
= TREE_TYPE (exp
);
4395 #ifdef WORD_REGISTER_OPERATIONS
4396 HOST_WIDE_INT exp_size
= int_size_in_bytes (type
);
4399 /* We know our target cannot conflict, since safe_from_p has been called. */
4401 /* Don't try copying piece by piece into a hard register
4402 since that is vulnerable to being clobbered by EXP.
4403 Instead, construct in a pseudo register and then copy it all. */
4404 if (GET_CODE (target
) == REG
&& REGNO (target
) < FIRST_PSEUDO_REGISTER
)
4406 rtx temp
= gen_reg_rtx (GET_MODE (target
));
4407 store_constructor (exp
, temp
, align
, cleared
, size
);
4408 emit_move_insn (target
, temp
);
4413 if (TREE_CODE (type
) == RECORD_TYPE
|| TREE_CODE (type
) == UNION_TYPE
4414 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
4418 /* Inform later passes that the whole union value is dead. */
4419 if ((TREE_CODE (type
) == UNION_TYPE
4420 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
4423 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
4425 /* If the constructor is empty, clear the union. */
4426 if (! CONSTRUCTOR_ELTS (exp
) && ! cleared
)
4427 clear_storage (target
, expr_size (exp
), TYPE_ALIGN (type
));
4430 /* If we are building a static constructor into a register,
4431 set the initial value as zero so we can fold the value into
4432 a constant. But if more than one register is involved,
4433 this probably loses. */
4434 else if (GET_CODE (target
) == REG
&& TREE_STATIC (exp
)
4435 && GET_MODE_SIZE (GET_MODE (target
)) <= UNITS_PER_WORD
)
4438 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
4443 /* If the constructor has fewer fields than the structure
4444 or if we are initializing the structure to mostly zeros,
4445 clear the whole structure first. Don't do this is TARGET is
4446 register whose mode size isn't equal to SIZE since clear_storage
4447 can't handle this case. */
4449 && ((list_length (CONSTRUCTOR_ELTS (exp
))
4450 != fields_length (type
))
4451 || mostly_zeros_p (exp
))
4452 && (GET_CODE (target
) != REG
4453 || (HOST_WIDE_INT
) GET_MODE_SIZE (GET_MODE (target
)) == size
))
4456 clear_storage (target
, GEN_INT (size
), align
);
4461 /* Inform later passes that the old value is dead. */
4462 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
4464 /* Store each element of the constructor into
4465 the corresponding field of TARGET. */
4467 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
4469 register tree field
= TREE_PURPOSE (elt
);
4470 #ifdef WORD_REGISTER_OPERATIONS
4471 tree value
= TREE_VALUE (elt
);
4473 register enum machine_mode mode
;
4474 HOST_WIDE_INT bitsize
;
4475 HOST_WIDE_INT bitpos
= 0;
4478 rtx to_rtx
= target
;
4480 /* Just ignore missing fields.
4481 We cleared the whole structure, above,
4482 if any fields are missing. */
4486 if (cleared
&& is_zeros_p (TREE_VALUE (elt
)))
4489 if (host_integerp (DECL_SIZE (field
), 1))
4490 bitsize
= tree_low_cst (DECL_SIZE (field
), 1);
4494 unsignedp
= TREE_UNSIGNED (field
);
4495 mode
= DECL_MODE (field
);
4496 if (DECL_BIT_FIELD (field
))
4499 offset
= DECL_FIELD_OFFSET (field
);
4500 if (host_integerp (offset
, 0)
4501 && host_integerp (bit_position (field
), 0))
4503 bitpos
= int_bit_position (field
);
4507 bitpos
= tree_low_cst (DECL_FIELD_BIT_OFFSET (field
), 0);
4513 if (contains_placeholder_p (offset
))
4514 offset
= build (WITH_RECORD_EXPR
, sizetype
,
4515 offset
, make_tree (TREE_TYPE (exp
), target
));
4517 offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
4518 if (GET_CODE (to_rtx
) != MEM
)
4521 if (GET_MODE (offset_rtx
) != ptr_mode
)
4523 #ifdef POINTERS_EXTEND_UNSIGNED
4524 offset_rtx
= convert_memory_address (ptr_mode
, offset_rtx
);
4526 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
4531 = change_address (to_rtx
, VOIDmode
,
4532 gen_rtx_PLUS (ptr_mode
, XEXP (to_rtx
, 0),
4533 force_reg (ptr_mode
,
4535 align
= DECL_OFFSET_ALIGN (field
);
4538 if (TREE_READONLY (field
))
4540 if (GET_CODE (to_rtx
) == MEM
)
4541 to_rtx
= copy_rtx (to_rtx
);
4543 RTX_UNCHANGING_P (to_rtx
) = 1;
4546 #ifdef WORD_REGISTER_OPERATIONS
4547 /* If this initializes a field that is smaller than a word, at the
4548 start of a word, try to widen it to a full word.
4549 This special case allows us to output C++ member function
4550 initializations in a form that the optimizers can understand. */
4551 if (GET_CODE (target
) == REG
4552 && bitsize
< BITS_PER_WORD
4553 && bitpos
% BITS_PER_WORD
== 0
4554 && GET_MODE_CLASS (mode
) == MODE_INT
4555 && TREE_CODE (value
) == INTEGER_CST
4557 && bitpos
+ BITS_PER_WORD
<= exp_size
* BITS_PER_UNIT
)
4559 tree type
= TREE_TYPE (value
);
4560 if (TYPE_PRECISION (type
) < BITS_PER_WORD
)
4562 type
= type_for_size (BITS_PER_WORD
, TREE_UNSIGNED (type
));
4563 value
= convert (type
, value
);
4565 if (BYTES_BIG_ENDIAN
)
4567 = fold (build (LSHIFT_EXPR
, type
, value
,
4568 build_int_2 (BITS_PER_WORD
- bitsize
, 0)));
4569 bitsize
= BITS_PER_WORD
;
4573 store_constructor_field (to_rtx
, bitsize
, bitpos
, mode
,
4574 TREE_VALUE (elt
), type
, align
, cleared
,
4575 (DECL_NONADDRESSABLE_P (field
)
4576 && GET_CODE (to_rtx
) == MEM
)
4577 ? MEM_ALIAS_SET (to_rtx
)
4578 : get_alias_set (TREE_TYPE (field
)));
4581 else if (TREE_CODE (type
) == ARRAY_TYPE
)
4586 tree domain
= TYPE_DOMAIN (type
);
4587 tree elttype
= TREE_TYPE (type
);
4588 int const_bounds_p
= (host_integerp (TYPE_MIN_VALUE (domain
), 0)
4589 && host_integerp (TYPE_MAX_VALUE (domain
), 0));
4590 HOST_WIDE_INT minelt
;
4591 HOST_WIDE_INT maxelt
;
4593 /* If we have constant bounds for the range of the type, get them. */
4596 minelt
= tree_low_cst (TYPE_MIN_VALUE (domain
), 0);
4597 maxelt
= tree_low_cst (TYPE_MAX_VALUE (domain
), 0);
4600 /* If the constructor has fewer elements than the array,
4601 clear the whole array first. Similarly if this is
4602 static constructor of a non-BLKmode object. */
4603 if (cleared
|| (GET_CODE (target
) == REG
&& TREE_STATIC (exp
)))
4607 HOST_WIDE_INT count
= 0, zero_count
= 0;
4608 need_to_clear
= ! const_bounds_p
;
4610 /* This loop is a more accurate version of the loop in
4611 mostly_zeros_p (it handles RANGE_EXPR in an index).
4612 It is also needed to check for missing elements. */
4613 for (elt
= CONSTRUCTOR_ELTS (exp
);
4614 elt
!= NULL_TREE
&& ! need_to_clear
;
4615 elt
= TREE_CHAIN (elt
))
4617 tree index
= TREE_PURPOSE (elt
);
4618 HOST_WIDE_INT this_node_count
;
4620 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
4622 tree lo_index
= TREE_OPERAND (index
, 0);
4623 tree hi_index
= TREE_OPERAND (index
, 1);
4625 if (! host_integerp (lo_index
, 1)
4626 || ! host_integerp (hi_index
, 1))
4632 this_node_count
= (tree_low_cst (hi_index
, 1)
4633 - tree_low_cst (lo_index
, 1) + 1);
4636 this_node_count
= 1;
4638 count
+= this_node_count
;
4639 if (mostly_zeros_p (TREE_VALUE (elt
)))
4640 zero_count
+= this_node_count
;
4643 /* Clear the entire array first if there are any missing elements,
4644 or if the incidence of zero elements is >= 75%. */
4646 && (count
< maxelt
- minelt
+ 1 || 4 * zero_count
>= 3 * count
))
4650 if (need_to_clear
&& size
> 0)
4653 clear_storage (target
, GEN_INT (size
), align
);
4657 /* Inform later passes that the old value is dead. */
4658 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
4660 /* Store each element of the constructor into
4661 the corresponding element of TARGET, determined
4662 by counting the elements. */
4663 for (elt
= CONSTRUCTOR_ELTS (exp
), i
= 0;
4665 elt
= TREE_CHAIN (elt
), i
++)
4667 register enum machine_mode mode
;
4668 HOST_WIDE_INT bitsize
;
4669 HOST_WIDE_INT bitpos
;
4671 tree value
= TREE_VALUE (elt
);
4672 unsigned int align
= TYPE_ALIGN (TREE_TYPE (value
));
4673 tree index
= TREE_PURPOSE (elt
);
4674 rtx xtarget
= target
;
4676 if (cleared
&& is_zeros_p (value
))
4679 unsignedp
= TREE_UNSIGNED (elttype
);
4680 mode
= TYPE_MODE (elttype
);
4681 if (mode
== BLKmode
)
4682 bitsize
= (host_integerp (TYPE_SIZE (elttype
), 1)
4683 ? tree_low_cst (TYPE_SIZE (elttype
), 1)
4686 bitsize
= GET_MODE_BITSIZE (mode
);
4688 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
4690 tree lo_index
= TREE_OPERAND (index
, 0);
4691 tree hi_index
= TREE_OPERAND (index
, 1);
4692 rtx index_r
, pos_rtx
, addr
, hi_r
, loop_top
, loop_end
;
4693 struct nesting
*loop
;
4694 HOST_WIDE_INT lo
, hi
, count
;
4697 /* If the range is constant and "small", unroll the loop. */
4699 && host_integerp (lo_index
, 0)
4700 && host_integerp (hi_index
, 0)
4701 && (lo
= tree_low_cst (lo_index
, 0),
4702 hi
= tree_low_cst (hi_index
, 0),
4703 count
= hi
- lo
+ 1,
4704 (GET_CODE (target
) != MEM
4706 || (host_integerp (TYPE_SIZE (elttype
), 1)
4707 && (tree_low_cst (TYPE_SIZE (elttype
), 1) * count
4710 lo
-= minelt
; hi
-= minelt
;
4711 for (; lo
<= hi
; lo
++)
4713 bitpos
= lo
* tree_low_cst (TYPE_SIZE (elttype
), 0);
4714 store_constructor_field
4715 (target
, bitsize
, bitpos
, mode
, value
, type
, align
,
4717 TYPE_NONALIASED_COMPONENT (type
)
4718 ? MEM_ALIAS_SET (target
) : get_alias_set (elttype
));
4723 hi_r
= expand_expr (hi_index
, NULL_RTX
, VOIDmode
, 0);
4724 loop_top
= gen_label_rtx ();
4725 loop_end
= gen_label_rtx ();
4727 unsignedp
= TREE_UNSIGNED (domain
);
4729 index
= build_decl (VAR_DECL
, NULL_TREE
, domain
);
4732 = gen_reg_rtx (promote_mode (domain
, DECL_MODE (index
),
4734 SET_DECL_RTL (index
, index_r
);
4735 if (TREE_CODE (value
) == SAVE_EXPR
4736 && SAVE_EXPR_RTL (value
) == 0)
4738 /* Make sure value gets expanded once before the
4740 expand_expr (value
, const0_rtx
, VOIDmode
, 0);
4743 store_expr (lo_index
, index_r
, 0);
4744 loop
= expand_start_loop (0);
4746 /* Assign value to element index. */
4748 = convert (ssizetype
,
4749 fold (build (MINUS_EXPR
, TREE_TYPE (index
),
4750 index
, TYPE_MIN_VALUE (domain
))));
4751 position
= size_binop (MULT_EXPR
, position
,
4753 TYPE_SIZE_UNIT (elttype
)));
4755 pos_rtx
= expand_expr (position
, 0, VOIDmode
, 0);
4756 addr
= gen_rtx_PLUS (Pmode
, XEXP (target
, 0), pos_rtx
);
4757 xtarget
= change_address (target
, mode
, addr
);
4758 if (TREE_CODE (value
) == CONSTRUCTOR
)
4759 store_constructor (value
, xtarget
, align
, cleared
,
4760 bitsize
/ BITS_PER_UNIT
);
4762 store_expr (value
, xtarget
, 0);
4764 expand_exit_loop_if_false (loop
,
4765 build (LT_EXPR
, integer_type_node
,
4768 expand_increment (build (PREINCREMENT_EXPR
,
4770 index
, integer_one_node
), 0, 0);
4772 emit_label (loop_end
);
4775 else if ((index
!= 0 && ! host_integerp (index
, 0))
4776 || ! host_integerp (TYPE_SIZE (elttype
), 1))
4782 index
= ssize_int (1);
4785 index
= convert (ssizetype
,
4786 fold (build (MINUS_EXPR
, index
,
4787 TYPE_MIN_VALUE (domain
))));
4789 position
= size_binop (MULT_EXPR
, index
,
4791 TYPE_SIZE_UNIT (elttype
)));
4792 pos_rtx
= expand_expr (position
, 0, VOIDmode
, 0);
4793 addr
= gen_rtx_PLUS (Pmode
, XEXP (target
, 0), pos_rtx
);
4794 xtarget
= change_address (target
, mode
, addr
);
4795 store_expr (value
, xtarget
, 0);
4800 bitpos
= ((tree_low_cst (index
, 0) - minelt
)
4801 * tree_low_cst (TYPE_SIZE (elttype
), 1));
4803 bitpos
= (i
* tree_low_cst (TYPE_SIZE (elttype
), 1));
4805 store_constructor_field (target
, bitsize
, bitpos
, mode
, value
,
4806 type
, align
, cleared
,
4807 TYPE_NONALIASED_COMPONENT (type
)
4808 && GET_CODE (target
) == MEM
4809 ? MEM_ALIAS_SET (target
) :
4810 get_alias_set (elttype
));
4816 /* Set constructor assignments. */
4817 else if (TREE_CODE (type
) == SET_TYPE
)
4819 tree elt
= CONSTRUCTOR_ELTS (exp
);
4820 unsigned HOST_WIDE_INT nbytes
= int_size_in_bytes (type
), nbits
;
4821 tree domain
= TYPE_DOMAIN (type
);
4822 tree domain_min
, domain_max
, bitlength
;
4824 /* The default implementation strategy is to extract the constant
4825 parts of the constructor, use that to initialize the target,
4826 and then "or" in whatever non-constant ranges we need in addition.
4828 If a large set is all zero or all ones, it is
4829 probably better to set it using memset (if available) or bzero.
4830 Also, if a large set has just a single range, it may also be
4831 better to first clear all the first clear the set (using
4832 bzero/memset), and set the bits we want. */
4834 /* Check for all zeros. */
4835 if (elt
== NULL_TREE
&& size
> 0)
4838 clear_storage (target
, GEN_INT (size
), TYPE_ALIGN (type
));
4842 domain_min
= convert (sizetype
, TYPE_MIN_VALUE (domain
));
4843 domain_max
= convert (sizetype
, TYPE_MAX_VALUE (domain
));
4844 bitlength
= size_binop (PLUS_EXPR
,
4845 size_diffop (domain_max
, domain_min
),
4848 nbits
= tree_low_cst (bitlength
, 1);
4850 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4851 are "complicated" (more than one range), initialize (the
4852 constant parts) by copying from a constant. */
4853 if (GET_MODE (target
) != BLKmode
|| nbits
<= 2 * BITS_PER_WORD
4854 || (nbytes
<= 32 && TREE_CHAIN (elt
) != NULL_TREE
))
4856 unsigned int set_word_size
= TYPE_ALIGN (TREE_TYPE (exp
));
4857 enum machine_mode mode
= mode_for_size (set_word_size
, MODE_INT
, 1);
4858 char *bit_buffer
= (char *) alloca (nbits
);
4859 HOST_WIDE_INT word
= 0;
4860 unsigned int bit_pos
= 0;
4861 unsigned int ibit
= 0;
4862 unsigned int offset
= 0; /* In bytes from beginning of set. */
4864 elt
= get_set_constructor_bits (exp
, bit_buffer
, nbits
);
4867 if (bit_buffer
[ibit
])
4869 if (BYTES_BIG_ENDIAN
)
4870 word
|= (1 << (set_word_size
- 1 - bit_pos
));
4872 word
|= 1 << bit_pos
;
4876 if (bit_pos
>= set_word_size
|| ibit
== nbits
)
4878 if (word
!= 0 || ! cleared
)
4880 rtx datum
= GEN_INT (word
);
4883 /* The assumption here is that it is safe to use
4884 XEXP if the set is multi-word, but not if
4885 it's single-word. */
4886 if (GET_CODE (target
) == MEM
)
4888 to_rtx
= plus_constant (XEXP (target
, 0), offset
);
4889 to_rtx
= change_address (target
, mode
, to_rtx
);
4891 else if (offset
== 0)
4895 emit_move_insn (to_rtx
, datum
);
4902 offset
+= set_word_size
/ BITS_PER_UNIT
;
4907 /* Don't bother clearing storage if the set is all ones. */
4908 if (TREE_CHAIN (elt
) != NULL_TREE
4909 || (TREE_PURPOSE (elt
) == NULL_TREE
4911 : ( ! host_integerp (TREE_VALUE (elt
), 0)
4912 || ! host_integerp (TREE_PURPOSE (elt
), 0)
4913 || (tree_low_cst (TREE_VALUE (elt
), 0)
4914 - tree_low_cst (TREE_PURPOSE (elt
), 0) + 1
4915 != (HOST_WIDE_INT
) nbits
))))
4916 clear_storage (target
, expr_size (exp
), TYPE_ALIGN (type
));
4918 for (; elt
!= NULL_TREE
; elt
= TREE_CHAIN (elt
))
4920 /* Start of range of element or NULL. */
4921 tree startbit
= TREE_PURPOSE (elt
);
4922 /* End of range of element, or element value. */
4923 tree endbit
= TREE_VALUE (elt
);
4924 #ifdef TARGET_MEM_FUNCTIONS
4925 HOST_WIDE_INT startb
, endb
;
4927 rtx bitlength_rtx
, startbit_rtx
, endbit_rtx
, targetx
;
4929 bitlength_rtx
= expand_expr (bitlength
,
4930 NULL_RTX
, MEM
, EXPAND_CONST_ADDRESS
);
4932 /* Handle non-range tuple element like [ expr ]. */
4933 if (startbit
== NULL_TREE
)
4935 startbit
= save_expr (endbit
);
4939 startbit
= convert (sizetype
, startbit
);
4940 endbit
= convert (sizetype
, endbit
);
4941 if (! integer_zerop (domain_min
))
4943 startbit
= size_binop (MINUS_EXPR
, startbit
, domain_min
);
4944 endbit
= size_binop (MINUS_EXPR
, endbit
, domain_min
);
4946 startbit_rtx
= expand_expr (startbit
, NULL_RTX
, MEM
,
4947 EXPAND_CONST_ADDRESS
);
4948 endbit_rtx
= expand_expr (endbit
, NULL_RTX
, MEM
,
4949 EXPAND_CONST_ADDRESS
);
4955 ((build_qualified_type (type_for_mode (GET_MODE (target
), 0),
4958 emit_move_insn (targetx
, target
);
4961 else if (GET_CODE (target
) == MEM
)
4966 #ifdef TARGET_MEM_FUNCTIONS
4967 /* Optimization: If startbit and endbit are
4968 constants divisible by BITS_PER_UNIT,
4969 call memset instead. */
4970 if (TREE_CODE (startbit
) == INTEGER_CST
4971 && TREE_CODE (endbit
) == INTEGER_CST
4972 && (startb
= TREE_INT_CST_LOW (startbit
)) % BITS_PER_UNIT
== 0
4973 && (endb
= TREE_INT_CST_LOW (endbit
) + 1) % BITS_PER_UNIT
== 0)
4975 emit_library_call (memset_libfunc
, LCT_NORMAL
,
4977 plus_constant (XEXP (targetx
, 0),
4978 startb
/ BITS_PER_UNIT
),
4980 constm1_rtx
, TYPE_MODE (integer_type_node
),
4981 GEN_INT ((endb
- startb
) / BITS_PER_UNIT
),
4982 TYPE_MODE (sizetype
));
4986 emit_library_call (gen_rtx_SYMBOL_REF (Pmode
, "__setbits"),
4987 LCT_NORMAL
, VOIDmode
, 4, XEXP (targetx
, 0),
4988 Pmode
, bitlength_rtx
, TYPE_MODE (sizetype
),
4989 startbit_rtx
, TYPE_MODE (sizetype
),
4990 endbit_rtx
, TYPE_MODE (sizetype
));
4993 emit_move_insn (target
, targetx
);
5001 /* Store the value of EXP (an expression tree)
5002 into a subfield of TARGET which has mode MODE and occupies
5003 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5004 If MODE is VOIDmode, it means that we are storing into a bit-field.
5006 If VALUE_MODE is VOIDmode, return nothing in particular.
5007 UNSIGNEDP is not used in this case.
5009 Otherwise, return an rtx for the value stored. This rtx
5010 has mode VALUE_MODE if that is convenient to do.
5011 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5013 ALIGN is the alignment that TARGET is known to have.
5014 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
5016 ALIAS_SET is the alias set for the destination. This value will
5017 (in general) be different from that for TARGET, since TARGET is a
5018 reference to the containing structure. */
5021 store_field (target
, bitsize
, bitpos
, mode
, exp
, value_mode
,
5022 unsignedp
, align
, total_size
, alias_set
)
5024 HOST_WIDE_INT bitsize
;
5025 HOST_WIDE_INT bitpos
;
5026 enum machine_mode mode
;
5028 enum machine_mode value_mode
;
5031 HOST_WIDE_INT total_size
;
5034 HOST_WIDE_INT width_mask
= 0;
5036 if (TREE_CODE (exp
) == ERROR_MARK
)
5039 /* If we have nothing to store, do nothing unless the expression has
5042 return expand_expr (exp
, const0_rtx
, VOIDmode
, 0);
5044 if (bitsize
< HOST_BITS_PER_WIDE_INT
)
5045 width_mask
= ((HOST_WIDE_INT
) 1 << bitsize
) - 1;
5047 /* If we are storing into an unaligned field of an aligned union that is
5048 in a register, we may have the mode of TARGET being an integer mode but
5049 MODE == BLKmode. In that case, get an aligned object whose size and
5050 alignment are the same as TARGET and store TARGET into it (we can avoid
5051 the store if the field being stored is the entire width of TARGET). Then
5052 call ourselves recursively to store the field into a BLKmode version of
5053 that object. Finally, load from the object into TARGET. This is not
5054 very efficient in general, but should only be slightly more expensive
5055 than the otherwise-required unaligned accesses. Perhaps this can be
5056 cleaned up later. */
5059 && (GET_CODE (target
) == REG
|| GET_CODE (target
) == SUBREG
))
5063 (build_qualified_type (type_for_mode (GET_MODE (target
), 0),
5066 rtx blk_object
= copy_rtx (object
);
5068 PUT_MODE (blk_object
, BLKmode
);
5070 if (bitsize
!= (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (target
)))
5071 emit_move_insn (object
, target
);
5073 store_field (blk_object
, bitsize
, bitpos
, mode
, exp
, VOIDmode
, 0,
5074 align
, total_size
, alias_set
);
5076 /* Even though we aren't returning target, we need to
5077 give it the updated value. */
5078 emit_move_insn (target
, object
);
5083 if (GET_CODE (target
) == CONCAT
)
5085 /* We're storing into a struct containing a single __complex. */
5089 return store_expr (exp
, target
, 0);
5092 /* If the structure is in a register or if the component
5093 is a bit field, we cannot use addressing to access it.
5094 Use bit-field techniques or SUBREG to store in it. */
5096 if (mode
== VOIDmode
5097 || (mode
!= BLKmode
&& ! direct_store
[(int) mode
]
5098 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
5099 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
)
5100 || GET_CODE (target
) == REG
5101 || GET_CODE (target
) == SUBREG
5102 /* If the field isn't aligned enough to store as an ordinary memref,
5103 store it as a bit field. */
5104 || (mode
!= BLKmode
&& SLOW_UNALIGNED_ACCESS (mode
, align
)
5105 && (align
< GET_MODE_ALIGNMENT (mode
)
5106 || bitpos
% GET_MODE_ALIGNMENT (mode
)))
5107 || (mode
== BLKmode
&& SLOW_UNALIGNED_ACCESS (mode
, align
)
5108 && (TYPE_ALIGN (TREE_TYPE (exp
)) > align
5109 || bitpos
% TYPE_ALIGN (TREE_TYPE (exp
)) != 0))
5110 /* If the RHS and field are a constant size and the size of the
5111 RHS isn't the same size as the bitfield, we must use bitfield
5114 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) == INTEGER_CST
5115 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)), bitsize
) != 0))
5117 rtx temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
5119 /* If BITSIZE is narrower than the size of the type of EXP
5120 we will be narrowing TEMP. Normally, what's wanted are the
5121 low-order bits. However, if EXP's type is a record and this is
5122 big-endian machine, we want the upper BITSIZE bits. */
5123 if (BYTES_BIG_ENDIAN
&& GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
5124 && bitsize
< GET_MODE_BITSIZE (GET_MODE (temp
))
5125 && TREE_CODE (TREE_TYPE (exp
)) == RECORD_TYPE
)
5126 temp
= expand_shift (RSHIFT_EXPR
, GET_MODE (temp
), temp
,
5127 size_int (GET_MODE_BITSIZE (GET_MODE (temp
))
5131 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5133 if (mode
!= VOIDmode
&& mode
!= BLKmode
5134 && mode
!= TYPE_MODE (TREE_TYPE (exp
)))
5135 temp
= convert_modes (mode
, TYPE_MODE (TREE_TYPE (exp
)), temp
, 1);
5137 /* If the modes of TARGET and TEMP are both BLKmode, both
5138 must be in memory and BITPOS must be aligned on a byte
5139 boundary. If so, we simply do a block copy. */
5140 if (GET_MODE (target
) == BLKmode
&& GET_MODE (temp
) == BLKmode
)
5142 unsigned int exp_align
= expr_align (exp
);
5144 if (GET_CODE (target
) != MEM
|| GET_CODE (temp
) != MEM
5145 || bitpos
% BITS_PER_UNIT
!= 0)
5148 target
= change_address (target
, VOIDmode
,
5149 plus_constant (XEXP (target
, 0),
5150 bitpos
/ BITS_PER_UNIT
));
5152 /* Make sure that ALIGN is no stricter than the alignment of EXP. */
5153 align
= MIN (exp_align
, align
);
5155 /* Find an alignment that is consistent with the bit position. */
5156 while ((bitpos
% align
) != 0)
5159 emit_block_move (target
, temp
,
5160 bitsize
== -1 ? expr_size (exp
)
5161 : GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
5165 return value_mode
== VOIDmode
? const0_rtx
: target
;
5168 /* Store the value in the bitfield. */
5169 store_bit_field (target
, bitsize
, bitpos
, mode
, temp
, align
, total_size
);
5170 if (value_mode
!= VOIDmode
)
5172 /* The caller wants an rtx for the value. */
5173 /* If possible, avoid refetching from the bitfield itself. */
5175 && ! (GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
)))
5178 enum machine_mode tmode
;
5181 return expand_and (temp
, GEN_INT (width_mask
), NULL_RTX
);
5182 tmode
= GET_MODE (temp
);
5183 if (tmode
== VOIDmode
)
5185 count
= build_int_2 (GET_MODE_BITSIZE (tmode
) - bitsize
, 0);
5186 temp
= expand_shift (LSHIFT_EXPR
, tmode
, temp
, count
, 0, 0);
5187 return expand_shift (RSHIFT_EXPR
, tmode
, temp
, count
, 0, 0);
5189 return extract_bit_field (target
, bitsize
, bitpos
, unsignedp
,
5190 NULL_RTX
, value_mode
, 0, align
,
5197 rtx addr
= XEXP (target
, 0);
5200 /* If a value is wanted, it must be the lhs;
5201 so make the address stable for multiple use. */
5203 if (value_mode
!= VOIDmode
&& GET_CODE (addr
) != REG
5204 && ! CONSTANT_ADDRESS_P (addr
)
5205 /* A frame-pointer reference is already stable. */
5206 && ! (GET_CODE (addr
) == PLUS
5207 && GET_CODE (XEXP (addr
, 1)) == CONST_INT
5208 && (XEXP (addr
, 0) == virtual_incoming_args_rtx
5209 || XEXP (addr
, 0) == virtual_stack_vars_rtx
)))
5210 addr
= copy_to_reg (addr
);
5212 /* Now build a reference to just the desired component. */
5214 to_rtx
= copy_rtx (change_address (target
, mode
,
5215 plus_constant (addr
,
5217 / BITS_PER_UNIT
))));
5218 MEM_SET_IN_STRUCT_P (to_rtx
, 1);
5219 MEM_ALIAS_SET (to_rtx
) = alias_set
;
5221 return store_expr (exp
, to_rtx
, value_mode
!= VOIDmode
);
5225 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5226 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
5227 ARRAY_REFs and find the ultimate containing object, which we return.
5229 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5230 bit position, and *PUNSIGNEDP to the signedness of the field.
5231 If the position of the field is variable, we store a tree
5232 giving the variable offset (in units) in *POFFSET.
5233 This offset is in addition to the bit position.
5234 If the position is not variable, we store 0 in *POFFSET.
5235 We set *PALIGNMENT to the alignment of the address that will be
5236 computed. This is the alignment of the thing we return if *POFFSET
5237 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
5239 If any of the extraction expressions is volatile,
5240 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5242 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5243 is a mode that can be used to access the field. In that case, *PBITSIZE
5246 If the field describes a variable-sized object, *PMODE is set to
5247 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5248 this case, but the address of the object can be found. */
5251 get_inner_reference (exp
, pbitsize
, pbitpos
, poffset
, pmode
,
5252 punsignedp
, pvolatilep
, palignment
)
5254 HOST_WIDE_INT
*pbitsize
;
5255 HOST_WIDE_INT
*pbitpos
;
5257 enum machine_mode
*pmode
;
5260 unsigned int *palignment
;
5263 enum machine_mode mode
= VOIDmode
;
5264 tree offset
= size_zero_node
;
5265 tree bit_offset
= bitsize_zero_node
;
5266 unsigned int alignment
= BIGGEST_ALIGNMENT
;
5269 /* First get the mode, signedness, and size. We do this from just the
5270 outermost expression. */
5271 if (TREE_CODE (exp
) == COMPONENT_REF
)
5273 size_tree
= DECL_SIZE (TREE_OPERAND (exp
, 1));
5274 if (! DECL_BIT_FIELD (TREE_OPERAND (exp
, 1)))
5275 mode
= DECL_MODE (TREE_OPERAND (exp
, 1));
5277 *punsignedp
= TREE_UNSIGNED (TREE_OPERAND (exp
, 1));
5279 else if (TREE_CODE (exp
) == BIT_FIELD_REF
)
5281 size_tree
= TREE_OPERAND (exp
, 1);
5282 *punsignedp
= TREE_UNSIGNED (exp
);
5286 mode
= TYPE_MODE (TREE_TYPE (exp
));
5287 *punsignedp
= TREE_UNSIGNED (TREE_TYPE (exp
));
5289 if (mode
== BLKmode
)
5290 size_tree
= TYPE_SIZE (TREE_TYPE (exp
));
5292 *pbitsize
= GET_MODE_BITSIZE (mode
);
5297 if (! host_integerp (size_tree
, 1))
5298 mode
= BLKmode
, *pbitsize
= -1;
5300 *pbitsize
= tree_low_cst (size_tree
, 1);
5303 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5304 and find the ultimate containing object. */
5307 if (TREE_CODE (exp
) == BIT_FIELD_REF
)
5308 bit_offset
= size_binop (PLUS_EXPR
, bit_offset
, TREE_OPERAND (exp
, 2));
5309 else if (TREE_CODE (exp
) == COMPONENT_REF
)
5311 tree field
= TREE_OPERAND (exp
, 1);
5312 tree this_offset
= DECL_FIELD_OFFSET (field
);
5314 /* If this field hasn't been filled in yet, don't go
5315 past it. This should only happen when folding expressions
5316 made during type construction. */
5317 if (this_offset
== 0)
5319 else if (! TREE_CONSTANT (this_offset
)
5320 && contains_placeholder_p (this_offset
))
5321 this_offset
= build (WITH_RECORD_EXPR
, sizetype
, this_offset
, exp
);
5323 offset
= size_binop (PLUS_EXPR
, offset
, this_offset
);
5324 bit_offset
= size_binop (PLUS_EXPR
, bit_offset
,
5325 DECL_FIELD_BIT_OFFSET (field
));
5327 if (! host_integerp (offset
, 0))
5328 alignment
= MIN (alignment
, DECL_OFFSET_ALIGN (field
));
5331 else if (TREE_CODE (exp
) == ARRAY_REF
)
5333 tree index
= TREE_OPERAND (exp
, 1);
5334 tree domain
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp
, 0)));
5335 tree low_bound
= (domain
? TYPE_MIN_VALUE (domain
) : 0);
5336 tree unit_size
= TYPE_SIZE_UNIT (TREE_TYPE (exp
));
5338 /* We assume all arrays have sizes that are a multiple of a byte.
5339 First subtract the lower bound, if any, in the type of the
5340 index, then convert to sizetype and multiply by the size of the
5342 if (low_bound
!= 0 && ! integer_zerop (low_bound
))
5343 index
= fold (build (MINUS_EXPR
, TREE_TYPE (index
),
5346 /* If the index has a self-referential type, pass it to a
5347 WITH_RECORD_EXPR; if the component size is, pass our
5348 component to one. */
5349 if (! TREE_CONSTANT (index
)
5350 && contains_placeholder_p (index
))
5351 index
= build (WITH_RECORD_EXPR
, TREE_TYPE (index
), index
, exp
);
5352 if (! TREE_CONSTANT (unit_size
)
5353 && contains_placeholder_p (unit_size
))
5354 unit_size
= build (WITH_RECORD_EXPR
, sizetype
, unit_size
,
5355 TREE_OPERAND (exp
, 0));
5357 offset
= size_binop (PLUS_EXPR
, offset
,
5358 size_binop (MULT_EXPR
,
5359 convert (sizetype
, index
),
5363 else if (TREE_CODE (exp
) != NON_LVALUE_EXPR
5364 && ! ((TREE_CODE (exp
) == NOP_EXPR
5365 || TREE_CODE (exp
) == CONVERT_EXPR
)
5366 && (TYPE_MODE (TREE_TYPE (exp
))
5367 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))))
5370 /* If any reference in the chain is volatile, the effect is volatile. */
5371 if (TREE_THIS_VOLATILE (exp
))
5374 /* If the offset is non-constant already, then we can't assume any
5375 alignment more than the alignment here. */
5376 if (! TREE_CONSTANT (offset
))
5377 alignment
= MIN (alignment
, TYPE_ALIGN (TREE_TYPE (exp
)));
5379 exp
= TREE_OPERAND (exp
, 0);
5383 alignment
= MIN (alignment
, DECL_ALIGN (exp
));
5384 else if (TREE_TYPE (exp
) != 0)
5385 alignment
= MIN (alignment
, TYPE_ALIGN (TREE_TYPE (exp
)));
5387 /* If OFFSET is constant, see if we can return the whole thing as a
5388 constant bit position. Otherwise, split it up. */
5389 if (host_integerp (offset
, 0)
5390 && 0 != (tem
= size_binop (MULT_EXPR
, convert (bitsizetype
, offset
),
5392 && 0 != (tem
= size_binop (PLUS_EXPR
, tem
, bit_offset
))
5393 && host_integerp (tem
, 0))
5394 *pbitpos
= tree_low_cst (tem
, 0), *poffset
= 0;
5396 *pbitpos
= tree_low_cst (bit_offset
, 0), *poffset
= offset
;
5399 *palignment
= alignment
;
5403 /* Subroutine of expand_exp: compute memory_usage from modifier. */
5405 static enum memory_use_mode
5406 get_memory_usage_from_modifier (modifier
)
5407 enum expand_modifier modifier
;
5413 return MEMORY_USE_RO
;
5415 case EXPAND_MEMORY_USE_WO
:
5416 return MEMORY_USE_WO
;
5418 case EXPAND_MEMORY_USE_RW
:
5419 return MEMORY_USE_RW
;
5421 case EXPAND_MEMORY_USE_DONT
:
5422 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5423 MEMORY_USE_DONT, because they are modifiers to a call of
5424 expand_expr in the ADDR_EXPR case of expand_expr. */
5425 case EXPAND_CONST_ADDRESS
:
5426 case EXPAND_INITIALIZER
:
5427 return MEMORY_USE_DONT
;
5428 case EXPAND_MEMORY_USE_BAD
:
5434 /* Given an rtx VALUE that may contain additions and multiplications, return
5435 an equivalent value that just refers to a register, memory, or constant.
5436 This is done by generating instructions to perform the arithmetic and
5437 returning a pseudo-register containing the value.
5439 The returned value may be a REG, SUBREG, MEM or constant. */
5442 force_operand (value
, target
)
5445 register optab binoptab
= 0;
5446 /* Use a temporary to force order of execution of calls to
5450 /* Use subtarget as the target for operand 0 of a binary operation. */
5451 register rtx subtarget
= get_subtarget (target
);
5453 /* Check for a PIC address load. */
5455 && (GET_CODE (value
) == PLUS
|| GET_CODE (value
) == MINUS
)
5456 && XEXP (value
, 0) == pic_offset_table_rtx
5457 && (GET_CODE (XEXP (value
, 1)) == SYMBOL_REF
5458 || GET_CODE (XEXP (value
, 1)) == LABEL_REF
5459 || GET_CODE (XEXP (value
, 1)) == CONST
))
5462 subtarget
= gen_reg_rtx (GET_MODE (value
));
5463 emit_move_insn (subtarget
, value
);
5467 if (GET_CODE (value
) == PLUS
)
5468 binoptab
= add_optab
;
5469 else if (GET_CODE (value
) == MINUS
)
5470 binoptab
= sub_optab
;
5471 else if (GET_CODE (value
) == MULT
)
5473 op2
= XEXP (value
, 1);
5474 if (!CONSTANT_P (op2
)
5475 && !(GET_CODE (op2
) == REG
&& op2
!= subtarget
))
5477 tmp
= force_operand (XEXP (value
, 0), subtarget
);
5478 return expand_mult (GET_MODE (value
), tmp
,
5479 force_operand (op2
, NULL_RTX
),
5485 op2
= XEXP (value
, 1);
5486 if (!CONSTANT_P (op2
)
5487 && !(GET_CODE (op2
) == REG
&& op2
!= subtarget
))
5489 if (binoptab
== sub_optab
&& GET_CODE (op2
) == CONST_INT
)
5491 binoptab
= add_optab
;
5492 op2
= negate_rtx (GET_MODE (value
), op2
);
5495 /* Check for an addition with OP2 a constant integer and our first
5496 operand a PLUS of a virtual register and something else. In that
5497 case, we want to emit the sum of the virtual register and the
5498 constant first and then add the other value. This allows virtual
5499 register instantiation to simply modify the constant rather than
5500 creating another one around this addition. */
5501 if (binoptab
== add_optab
&& GET_CODE (op2
) == CONST_INT
5502 && GET_CODE (XEXP (value
, 0)) == PLUS
5503 && GET_CODE (XEXP (XEXP (value
, 0), 0)) == REG
5504 && REGNO (XEXP (XEXP (value
, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5505 && REGNO (XEXP (XEXP (value
, 0), 0)) <= LAST_VIRTUAL_REGISTER
)
5507 rtx temp
= expand_binop (GET_MODE (value
), binoptab
,
5508 XEXP (XEXP (value
, 0), 0), op2
,
5509 subtarget
, 0, OPTAB_LIB_WIDEN
);
5510 return expand_binop (GET_MODE (value
), binoptab
, temp
,
5511 force_operand (XEXP (XEXP (value
, 0), 1), 0),
5512 target
, 0, OPTAB_LIB_WIDEN
);
5515 tmp
= force_operand (XEXP (value
, 0), subtarget
);
5516 return expand_binop (GET_MODE (value
), binoptab
, tmp
,
5517 force_operand (op2
, NULL_RTX
),
5518 target
, 0, OPTAB_LIB_WIDEN
);
5519 /* We give UNSIGNEDP = 0 to expand_binop
5520 because the only operations we are expanding here are signed ones. */
5525 /* Subroutine of expand_expr:
5526 save the non-copied parts (LIST) of an expr (LHS), and return a list
5527 which can restore these values to their previous values,
5528 should something modify their storage. */
5531 save_noncopied_parts (lhs
, list
)
5538 for (tail
= list
; tail
; tail
= TREE_CHAIN (tail
))
5539 if (TREE_CODE (TREE_VALUE (tail
)) == TREE_LIST
)
5540 parts
= chainon (parts
, save_noncopied_parts (lhs
, TREE_VALUE (tail
)));
5543 tree part
= TREE_VALUE (tail
);
5544 tree part_type
= TREE_TYPE (part
);
5545 tree to_be_saved
= build (COMPONENT_REF
, part_type
, lhs
, part
);
5547 = assign_temp (build_qualified_type (part_type
,
5548 (TYPE_QUALS (part_type
)
5549 | TYPE_QUAL_CONST
)),
5552 if (! memory_address_p (TYPE_MODE (part_type
), XEXP (target
, 0)))
5553 target
= change_address (target
, TYPE_MODE (part_type
), NULL_RTX
);
5554 parts
= tree_cons (to_be_saved
,
5555 build (RTL_EXPR
, part_type
, NULL_TREE
,
5558 store_expr (TREE_PURPOSE (parts
), RTL_EXPR_RTL (TREE_VALUE (parts
)), 0);
5563 /* Subroutine of expand_expr:
5564 record the non-copied parts (LIST) of an expr (LHS), and return a list
5565 which specifies the initial values of these parts. */
5568 init_noncopied_parts (lhs
, list
)
5575 for (tail
= list
; tail
; tail
= TREE_CHAIN (tail
))
5576 if (TREE_CODE (TREE_VALUE (tail
)) == TREE_LIST
)
5577 parts
= chainon (parts
, init_noncopied_parts (lhs
, TREE_VALUE (tail
)));
5578 else if (TREE_PURPOSE (tail
))
5580 tree part
= TREE_VALUE (tail
);
5581 tree part_type
= TREE_TYPE (part
);
5582 tree to_be_initialized
= build (COMPONENT_REF
, part_type
, lhs
, part
);
5583 parts
= tree_cons (TREE_PURPOSE (tail
), to_be_initialized
, parts
);
5588 /* Subroutine of expand_expr: return nonzero iff there is no way that
5589 EXP can reference X, which is being modified. TOP_P is nonzero if this
5590 call is going to be used to determine whether we need a temporary
5591 for EXP, as opposed to a recursive call to this function.
5593 It is always safe for this routine to return zero since it merely
5594 searches for optimization opportunities. */
5597 safe_from_p (x
, exp
, top_p
)
5604 static tree save_expr_list
;
5607 /* If EXP has varying size, we MUST use a target since we currently
5608 have no way of allocating temporaries of variable size
5609 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5610 So we assume here that something at a higher level has prevented a
5611 clash. This is somewhat bogus, but the best we can do. Only
5612 do this when X is BLKmode and when we are at the top level. */
5613 || (top_p
&& TREE_TYPE (exp
) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp
))
5614 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
5615 && (TREE_CODE (TREE_TYPE (exp
)) != ARRAY_TYPE
5616 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)) == NULL_TREE
5617 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)))
5619 && GET_MODE (x
) == BLKmode
)
5620 /* If X is in the outgoing argument area, it is always safe. */
5621 || (GET_CODE (x
) == MEM
5622 && (XEXP (x
, 0) == virtual_outgoing_args_rtx
5623 || (GET_CODE (XEXP (x
, 0)) == PLUS
5624 && XEXP (XEXP (x
, 0), 0) == virtual_outgoing_args_rtx
))))
5627 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5628 find the underlying pseudo. */
5629 if (GET_CODE (x
) == SUBREG
)
5632 if (GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
5636 /* A SAVE_EXPR might appear many times in the expression passed to the
5637 top-level safe_from_p call, and if it has a complex subexpression,
5638 examining it multiple times could result in a combinatorial explosion.
5639 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5640 with optimization took about 28 minutes to compile -- even though it was
5641 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5642 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5643 we have processed. Note that the only test of top_p was above. */
5652 rtn
= safe_from_p (x
, exp
, 0);
5654 for (t
= save_expr_list
; t
!= 0; t
= TREE_CHAIN (t
))
5655 TREE_PRIVATE (TREE_PURPOSE (t
)) = 0;
5660 /* Now look at our tree code and possibly recurse. */
5661 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
5664 exp_rtl
= DECL_RTL_SET_P (exp
) ? DECL_RTL (exp
) : NULL_RTX
;
5671 if (TREE_CODE (exp
) == TREE_LIST
)
5672 return ((TREE_VALUE (exp
) == 0
5673 || safe_from_p (x
, TREE_VALUE (exp
), 0))
5674 && (TREE_CHAIN (exp
) == 0
5675 || safe_from_p (x
, TREE_CHAIN (exp
), 0)));
5676 else if (TREE_CODE (exp
) == ERROR_MARK
)
5677 return 1; /* An already-visited SAVE_EXPR? */
5682 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
5686 return (safe_from_p (x
, TREE_OPERAND (exp
, 0), 0)
5687 && safe_from_p (x
, TREE_OPERAND (exp
, 1), 0));
5691 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5692 the expression. If it is set, we conflict iff we are that rtx or
5693 both are in memory. Otherwise, we check all operands of the
5694 expression recursively. */
5696 switch (TREE_CODE (exp
))
5699 return (staticp (TREE_OPERAND (exp
, 0))
5700 || TREE_STATIC (exp
)
5701 || safe_from_p (x
, TREE_OPERAND (exp
, 0), 0));
5704 if (GET_CODE (x
) == MEM
5705 && alias_sets_conflict_p (MEM_ALIAS_SET (x
),
5706 get_alias_set (exp
)))
5711 /* Assume that the call will clobber all hard registers and
5713 if ((GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
5714 || GET_CODE (x
) == MEM
)
5719 /* If a sequence exists, we would have to scan every instruction
5720 in the sequence to see if it was safe. This is probably not
5722 if (RTL_EXPR_SEQUENCE (exp
))
5725 exp_rtl
= RTL_EXPR_RTL (exp
);
5728 case WITH_CLEANUP_EXPR
:
5729 exp_rtl
= RTL_EXPR_RTL (exp
);
5732 case CLEANUP_POINT_EXPR
:
5733 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
5736 exp_rtl
= SAVE_EXPR_RTL (exp
);
5740 /* If we've already scanned this, don't do it again. Otherwise,
5741 show we've scanned it and record for clearing the flag if we're
5743 if (TREE_PRIVATE (exp
))
5746 TREE_PRIVATE (exp
) = 1;
5747 if (! safe_from_p (x
, TREE_OPERAND (exp
, 0), 0))
5749 TREE_PRIVATE (exp
) = 0;
5753 save_expr_list
= tree_cons (exp
, NULL_TREE
, save_expr_list
);
5757 /* The only operand we look at is operand 1. The rest aren't
5758 part of the expression. */
5759 return safe_from_p (x
, TREE_OPERAND (exp
, 1), 0);
5761 case METHOD_CALL_EXPR
:
5762 /* This takes a rtx argument, but shouldn't appear here. */
5769 /* If we have an rtx, we do not need to scan our operands. */
5773 nops
= first_rtl_op (TREE_CODE (exp
));
5774 for (i
= 0; i
< nops
; i
++)
5775 if (TREE_OPERAND (exp
, i
) != 0
5776 && ! safe_from_p (x
, TREE_OPERAND (exp
, i
), 0))
5779 /* If this is a language-specific tree code, it may require
5780 special handling. */
5781 if ((unsigned int) TREE_CODE (exp
)
5782 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5784 && !(*lang_safe_from_p
) (x
, exp
))
5788 /* If we have an rtl, find any enclosed object. Then see if we conflict
5792 if (GET_CODE (exp_rtl
) == SUBREG
)
5794 exp_rtl
= SUBREG_REG (exp_rtl
);
5795 if (GET_CODE (exp_rtl
) == REG
5796 && REGNO (exp_rtl
) < FIRST_PSEUDO_REGISTER
)
5800 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5801 are memory and they conflict. */
5802 return ! (rtx_equal_p (x
, exp_rtl
)
5803 || (GET_CODE (x
) == MEM
&& GET_CODE (exp_rtl
) == MEM
5804 && true_dependence (exp_rtl
, GET_MODE (x
), x
,
5805 rtx_addr_varies_p
)));
5808 /* If we reach here, it is safe. */
5812 /* Subroutine of expand_expr: return nonzero iff EXP is an
5813 expression whose type is statically determinable. */
5819 if (TREE_CODE (exp
) == PARM_DECL
5820 || TREE_CODE (exp
) == VAR_DECL
5821 || TREE_CODE (exp
) == CALL_EXPR
|| TREE_CODE (exp
) == TARGET_EXPR
5822 || TREE_CODE (exp
) == COMPONENT_REF
5823 || TREE_CODE (exp
) == ARRAY_REF
)
5828 /* Subroutine of expand_expr: return rtx if EXP is a
5829 variable or parameter; else return 0. */
5836 switch (TREE_CODE (exp
))
5840 return DECL_RTL (exp
);
5846 #ifdef MAX_INTEGER_COMPUTATION_MODE
5849 check_max_integer_computation_mode (exp
)
5852 enum tree_code code
;
5853 enum machine_mode mode
;
5855 /* Strip any NOPs that don't change the mode. */
5857 code
= TREE_CODE (exp
);
5859 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5860 if (code
== NOP_EXPR
5861 && TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
)
5864 /* First check the type of the overall operation. We need only look at
5865 unary, binary and relational operations. */
5866 if (TREE_CODE_CLASS (code
) == '1'
5867 || TREE_CODE_CLASS (code
) == '2'
5868 || TREE_CODE_CLASS (code
) == '<')
5870 mode
= TYPE_MODE (TREE_TYPE (exp
));
5871 if (GET_MODE_CLASS (mode
) == MODE_INT
5872 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
5873 internal_error ("unsupported wide integer operation");
5876 /* Check operand of a unary op. */
5877 if (TREE_CODE_CLASS (code
) == '1')
5879 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
5880 if (GET_MODE_CLASS (mode
) == MODE_INT
5881 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
5882 internal_error ("unsupported wide integer operation");
5885 /* Check operands of a binary/comparison op. */
5886 if (TREE_CODE_CLASS (code
) == '2' || TREE_CODE_CLASS (code
) == '<')
5888 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
5889 if (GET_MODE_CLASS (mode
) == MODE_INT
5890 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
5891 internal_error ("unsupported wide integer operation");
5893 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1)));
5894 if (GET_MODE_CLASS (mode
) == MODE_INT
5895 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
5896 internal_error ("unsupported wide integer operation");
5901 /* expand_expr: generate code for computing expression EXP.
5902 An rtx for the computed value is returned. The value is never null.
5903 In the case of a void EXP, const0_rtx is returned.
5905 The value may be stored in TARGET if TARGET is nonzero.
5906 TARGET is just a suggestion; callers must assume that
5907 the rtx returned may not be the same as TARGET.
5909 If TARGET is CONST0_RTX, it means that the value will be ignored.
5911 If TMODE is not VOIDmode, it suggests generating the
5912 result in mode TMODE. But this is done only when convenient.
5913 Otherwise, TMODE is ignored and the value generated in its natural mode.
5914 TMODE is just a suggestion; callers must assume that
5915 the rtx returned may not have mode TMODE.
5917 Note that TARGET may have neither TMODE nor MODE. In that case, it
5918 probably will not be used.
5920 If MODIFIER is EXPAND_SUM then when EXP is an addition
5921 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5922 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5923 products as above, or REG or MEM, or constant.
5924 Ordinarily in such cases we would output mul or add instructions
5925 and then return a pseudo reg containing the sum.
5927 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5928 it also marks a label as absolutely required (it can't be dead).
5929 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
5930 This is used for outputting expressions used in initializers.
5932 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5933 with a constant address even if that address is not normally legitimate.
5934 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
5937 expand_expr (exp
, target
, tmode
, modifier
)
5940 enum machine_mode tmode
;
5941 enum expand_modifier modifier
;
5943 register rtx op0
, op1
, temp
;
5944 tree type
= TREE_TYPE (exp
);
5945 int unsignedp
= TREE_UNSIGNED (type
);
5946 register enum machine_mode mode
;
5947 register enum tree_code code
= TREE_CODE (exp
);
5949 rtx subtarget
, original_target
;
5952 /* Used by check-memory-usage to make modifier read only. */
5953 enum expand_modifier ro_modifier
;
5955 /* Handle ERROR_MARK before anybody tries to access its type. */
5956 if (TREE_CODE (exp
) == ERROR_MARK
|| TREE_CODE (type
) == ERROR_MARK
)
5958 op0
= CONST0_RTX (tmode
);
5964 mode
= TYPE_MODE (type
);
5965 /* Use subtarget as the target for operand 0 of a binary operation. */
5966 subtarget
= get_subtarget (target
);
5967 original_target
= target
;
5968 ignore
= (target
== const0_rtx
5969 || ((code
== NON_LVALUE_EXPR
|| code
== NOP_EXPR
5970 || code
== CONVERT_EXPR
|| code
== REFERENCE_EXPR
5971 || code
== COND_EXPR
)
5972 && TREE_CODE (type
) == VOID_TYPE
));
5974 /* Make a read-only version of the modifier. */
5975 if (modifier
== EXPAND_NORMAL
|| modifier
== EXPAND_SUM
5976 || modifier
== EXPAND_CONST_ADDRESS
|| modifier
== EXPAND_INITIALIZER
)
5977 ro_modifier
= modifier
;
5979 ro_modifier
= EXPAND_NORMAL
;
5981 /* If we are going to ignore this result, we need only do something
5982 if there is a side-effect somewhere in the expression. If there
5983 is, short-circuit the most common cases here. Note that we must
5984 not call expand_expr with anything but const0_rtx in case this
5985 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
5989 if (! TREE_SIDE_EFFECTS (exp
))
5992 /* Ensure we reference a volatile object even if value is ignored, but
5993 don't do this if all we are doing is taking its address. */
5994 if (TREE_THIS_VOLATILE (exp
)
5995 && TREE_CODE (exp
) != FUNCTION_DECL
5996 && mode
!= VOIDmode
&& mode
!= BLKmode
5997 && modifier
!= EXPAND_CONST_ADDRESS
)
5999 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, ro_modifier
);
6000 if (GET_CODE (temp
) == MEM
)
6001 temp
= copy_to_reg (temp
);
6005 if (TREE_CODE_CLASS (code
) == '1' || code
== COMPONENT_REF
6006 || code
== INDIRECT_REF
|| code
== BUFFER_REF
)
6007 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
,
6008 VOIDmode
, ro_modifier
);
6009 else if (TREE_CODE_CLASS (code
) == '2' || TREE_CODE_CLASS (code
) == '<'
6010 || code
== ARRAY_REF
)
6012 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, ro_modifier
);
6013 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
, ro_modifier
);
6016 else if ((code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
)
6017 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 1)))
6018 /* If the second operand has no side effects, just evaluate
6020 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
,
6021 VOIDmode
, ro_modifier
);
6022 else if (code
== BIT_FIELD_REF
)
6024 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, ro_modifier
);
6025 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
, ro_modifier
);
6026 expand_expr (TREE_OPERAND (exp
, 2), const0_rtx
, VOIDmode
, ro_modifier
);
6033 #ifdef MAX_INTEGER_COMPUTATION_MODE
6034 /* Only check stuff here if the mode we want is different from the mode
6035 of the expression; if it's the same, check_max_integer_computiation_mode
6036 will handle it. Do we really need to check this stuff at all? */
6039 && GET_MODE (target
) != mode
6040 && TREE_CODE (exp
) != INTEGER_CST
6041 && TREE_CODE (exp
) != PARM_DECL
6042 && TREE_CODE (exp
) != ARRAY_REF
6043 && TREE_CODE (exp
) != COMPONENT_REF
6044 && TREE_CODE (exp
) != BIT_FIELD_REF
6045 && TREE_CODE (exp
) != INDIRECT_REF
6046 && TREE_CODE (exp
) != CALL_EXPR
6047 && TREE_CODE (exp
) != VAR_DECL
6048 && TREE_CODE (exp
) != RTL_EXPR
)
6050 enum machine_mode mode
= GET_MODE (target
);
6052 if (GET_MODE_CLASS (mode
) == MODE_INT
6053 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
6054 internal_error ("unsupported wide integer operation");
6058 && TREE_CODE (exp
) != INTEGER_CST
6059 && TREE_CODE (exp
) != PARM_DECL
6060 && TREE_CODE (exp
) != ARRAY_REF
6061 && TREE_CODE (exp
) != COMPONENT_REF
6062 && TREE_CODE (exp
) != BIT_FIELD_REF
6063 && TREE_CODE (exp
) != INDIRECT_REF
6064 && TREE_CODE (exp
) != VAR_DECL
6065 && TREE_CODE (exp
) != CALL_EXPR
6066 && TREE_CODE (exp
) != RTL_EXPR
6067 && GET_MODE_CLASS (tmode
) == MODE_INT
6068 && tmode
> MAX_INTEGER_COMPUTATION_MODE
)
6069 internal_error ("unsupported wide integer operation");
6071 check_max_integer_computation_mode (exp
);
6074 /* If will do cse, generate all results into pseudo registers
6075 since 1) that allows cse to find more things
6076 and 2) otherwise cse could produce an insn the machine
6079 if (! cse_not_expected
&& mode
!= BLKmode
&& target
6080 && (GET_CODE (target
) != REG
|| REGNO (target
) < FIRST_PSEUDO_REGISTER
))
6087 tree function
= decl_function_context (exp
);
6088 /* Handle using a label in a containing function. */
6089 if (function
!= current_function_decl
6090 && function
!= inline_function_decl
&& function
!= 0)
6092 struct function
*p
= find_function_data (function
);
6093 p
->expr
->x_forced_labels
6094 = gen_rtx_EXPR_LIST (VOIDmode
, label_rtx (exp
),
6095 p
->expr
->x_forced_labels
);
6099 if (modifier
== EXPAND_INITIALIZER
)
6100 forced_labels
= gen_rtx_EXPR_LIST (VOIDmode
,
6105 temp
= gen_rtx_MEM (FUNCTION_MODE
,
6106 gen_rtx_LABEL_REF (Pmode
, label_rtx (exp
)));
6107 if (function
!= current_function_decl
6108 && function
!= inline_function_decl
&& function
!= 0)
6109 LABEL_REF_NONLOCAL_P (XEXP (temp
, 0)) = 1;
6114 if (DECL_RTL (exp
) == 0)
6116 error_with_decl (exp
, "prior parameter's size depends on `%s'");
6117 return CONST0_RTX (mode
);
6120 /* ... fall through ... */
6123 /* If a static var's type was incomplete when the decl was written,
6124 but the type is complete now, lay out the decl now. */
6125 if (DECL_SIZE (exp
) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp
))
6126 && (TREE_STATIC (exp
) || DECL_EXTERNAL (exp
)))
6128 layout_decl (exp
, 0);
6129 PUT_MODE (DECL_RTL (exp
), DECL_MODE (exp
));
6132 /* Although static-storage variables start off initialized, according to
6133 ANSI C, a memcpy could overwrite them with uninitialized values. So
6134 we check them too. This also lets us check for read-only variables
6135 accessed via a non-const declaration, in case it won't be detected
6136 any other way (e.g., in an embedded system or OS kernel without
6139 Aggregates are not checked here; they're handled elsewhere. */
6140 if (cfun
&& current_function_check_memory_usage
6142 && GET_CODE (DECL_RTL (exp
)) == MEM
6143 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp
)))
6145 enum memory_use_mode memory_usage
;
6146 memory_usage
= get_memory_usage_from_modifier (modifier
);
6148 in_check_memory_usage
= 1;
6149 if (memory_usage
!= MEMORY_USE_DONT
)
6150 emit_library_call (chkr_check_addr_libfunc
,
6151 LCT_CONST_MAKE_BLOCK
, VOIDmode
, 3,
6152 XEXP (DECL_RTL (exp
), 0), Pmode
,
6153 GEN_INT (int_size_in_bytes (type
)),
6154 TYPE_MODE (sizetype
),
6155 GEN_INT (memory_usage
),
6156 TYPE_MODE (integer_type_node
));
6157 in_check_memory_usage
= 0;
6160 /* ... fall through ... */
6164 if (DECL_RTL (exp
) == 0)
6167 /* Ensure variable marked as used even if it doesn't go through
6168 a parser. If it hasn't be used yet, write out an external
6170 if (! TREE_USED (exp
))
6172 assemble_external (exp
);
6173 TREE_USED (exp
) = 1;
6176 /* Show we haven't gotten RTL for this yet. */
6179 /* Handle variables inherited from containing functions. */
6180 context
= decl_function_context (exp
);
6182 /* We treat inline_function_decl as an alias for the current function
6183 because that is the inline function whose vars, types, etc.
6184 are being merged into the current function.
6185 See expand_inline_function. */
6187 if (context
!= 0 && context
!= current_function_decl
6188 && context
!= inline_function_decl
6189 /* If var is static, we don't need a static chain to access it. */
6190 && ! (GET_CODE (DECL_RTL (exp
)) == MEM
6191 && CONSTANT_P (XEXP (DECL_RTL (exp
), 0))))
6195 /* Mark as non-local and addressable. */
6196 DECL_NONLOCAL (exp
) = 1;
6197 if (DECL_NO_STATIC_CHAIN (current_function_decl
))
6199 mark_addressable (exp
);
6200 if (GET_CODE (DECL_RTL (exp
)) != MEM
)
6202 addr
= XEXP (DECL_RTL (exp
), 0);
6203 if (GET_CODE (addr
) == MEM
)
6204 addr
= change_address (addr
, Pmode
,
6205 fix_lexical_addr (XEXP (addr
, 0), exp
));
6207 addr
= fix_lexical_addr (addr
, exp
);
6209 temp
= change_address (DECL_RTL (exp
), mode
, addr
);
6212 /* This is the case of an array whose size is to be determined
6213 from its initializer, while the initializer is still being parsed.
6216 else if (GET_CODE (DECL_RTL (exp
)) == MEM
6217 && GET_CODE (XEXP (DECL_RTL (exp
), 0)) == REG
)
6218 temp
= change_address (DECL_RTL (exp
), GET_MODE (DECL_RTL (exp
)),
6219 XEXP (DECL_RTL (exp
), 0));
6221 /* If DECL_RTL is memory, we are in the normal case and either
6222 the address is not valid or it is not a register and -fforce-addr
6223 is specified, get the address into a register. */
6225 else if (GET_CODE (DECL_RTL (exp
)) == MEM
6226 && modifier
!= EXPAND_CONST_ADDRESS
6227 && modifier
!= EXPAND_SUM
6228 && modifier
!= EXPAND_INITIALIZER
6229 && (! memory_address_p (DECL_MODE (exp
),
6230 XEXP (DECL_RTL (exp
), 0))
6232 && GET_CODE (XEXP (DECL_RTL (exp
), 0)) != REG
)))
6233 temp
= change_address (DECL_RTL (exp
), VOIDmode
,
6234 copy_rtx (XEXP (DECL_RTL (exp
), 0)));
6236 /* If we got something, return it. But first, set the alignment
6237 the address is a register. */
6240 if (GET_CODE (temp
) == MEM
&& GET_CODE (XEXP (temp
, 0)) == REG
)
6241 mark_reg_pointer (XEXP (temp
, 0), DECL_ALIGN (exp
));
6246 /* If the mode of DECL_RTL does not match that of the decl, it
6247 must be a promoted value. We return a SUBREG of the wanted mode,
6248 but mark it so that we know that it was already extended. */
6250 if (GET_CODE (DECL_RTL (exp
)) == REG
6251 && GET_MODE (DECL_RTL (exp
)) != mode
)
6253 /* Get the signedness used for this variable. Ensure we get the
6254 same mode we got when the variable was declared. */
6255 if (GET_MODE (DECL_RTL (exp
))
6256 != promote_mode (type
, DECL_MODE (exp
), &unsignedp
, 0))
6259 temp
= gen_rtx_SUBREG (mode
, DECL_RTL (exp
), 0);
6260 SUBREG_PROMOTED_VAR_P (temp
) = 1;
6261 SUBREG_PROMOTED_UNSIGNED_P (temp
) = unsignedp
;
6265 return DECL_RTL (exp
);
6268 return immed_double_const (TREE_INT_CST_LOW (exp
),
6269 TREE_INT_CST_HIGH (exp
), mode
);
6272 return expand_expr (DECL_INITIAL (exp
), target
, VOIDmode
,
6273 EXPAND_MEMORY_USE_BAD
);
6276 /* If optimized, generate immediate CONST_DOUBLE
6277 which will be turned into memory by reload if necessary.
6279 We used to force a register so that loop.c could see it. But
6280 this does not allow gen_* patterns to perform optimizations with
6281 the constants. It also produces two insns in cases like "x = 1.0;".
6282 On most machines, floating-point constants are not permitted in
6283 many insns, so we'd end up copying it to a register in any case.
6285 Now, we do the copying in expand_binop, if appropriate. */
6286 return immed_real_const (exp
);
6290 if (! TREE_CST_RTL (exp
))
6291 output_constant_def (exp
, 1);
6293 /* TREE_CST_RTL probably contains a constant address.
6294 On RISC machines where a constant address isn't valid,
6295 make some insns to get that address into a register. */
6296 if (GET_CODE (TREE_CST_RTL (exp
)) == MEM
6297 && modifier
!= EXPAND_CONST_ADDRESS
6298 && modifier
!= EXPAND_INITIALIZER
6299 && modifier
!= EXPAND_SUM
6300 && (! memory_address_p (mode
, XEXP (TREE_CST_RTL (exp
), 0))
6302 && GET_CODE (XEXP (TREE_CST_RTL (exp
), 0)) != REG
)))
6303 return change_address (TREE_CST_RTL (exp
), VOIDmode
,
6304 copy_rtx (XEXP (TREE_CST_RTL (exp
), 0)));
6305 return TREE_CST_RTL (exp
);
6307 case EXPR_WITH_FILE_LOCATION
:
6310 const char *saved_input_filename
= input_filename
;
6311 int saved_lineno
= lineno
;
6312 input_filename
= EXPR_WFL_FILENAME (exp
);
6313 lineno
= EXPR_WFL_LINENO (exp
);
6314 if (EXPR_WFL_EMIT_LINE_NOTE (exp
))
6315 emit_line_note (input_filename
, lineno
);
6316 /* Possibly avoid switching back and force here. */
6317 to_return
= expand_expr (EXPR_WFL_NODE (exp
), target
, tmode
, modifier
);
6318 input_filename
= saved_input_filename
;
6319 lineno
= saved_lineno
;
6324 context
= decl_function_context (exp
);
6326 /* If this SAVE_EXPR was at global context, assume we are an
6327 initialization function and move it into our context. */
6329 SAVE_EXPR_CONTEXT (exp
) = current_function_decl
;
6331 /* We treat inline_function_decl as an alias for the current function
6332 because that is the inline function whose vars, types, etc.
6333 are being merged into the current function.
6334 See expand_inline_function. */
6335 if (context
== current_function_decl
|| context
== inline_function_decl
)
6338 /* If this is non-local, handle it. */
6341 /* The following call just exists to abort if the context is
6342 not of a containing function. */
6343 find_function_data (context
);
6345 temp
= SAVE_EXPR_RTL (exp
);
6346 if (temp
&& GET_CODE (temp
) == REG
)
6348 put_var_into_stack (exp
);
6349 temp
= SAVE_EXPR_RTL (exp
);
6351 if (temp
== 0 || GET_CODE (temp
) != MEM
)
6353 return change_address (temp
, mode
,
6354 fix_lexical_addr (XEXP (temp
, 0), exp
));
6356 if (SAVE_EXPR_RTL (exp
) == 0)
6358 if (mode
== VOIDmode
)
6361 temp
= assign_temp (build_qualified_type (type
,
6363 | TYPE_QUAL_CONST
)),
6366 SAVE_EXPR_RTL (exp
) = temp
;
6367 if (!optimize
&& GET_CODE (temp
) == REG
)
6368 save_expr_regs
= gen_rtx_EXPR_LIST (VOIDmode
, temp
,
6371 /* If the mode of TEMP does not match that of the expression, it
6372 must be a promoted value. We pass store_expr a SUBREG of the
6373 wanted mode but mark it so that we know that it was already
6374 extended. Note that `unsignedp' was modified above in
6377 if (GET_CODE (temp
) == REG
&& GET_MODE (temp
) != mode
)
6379 temp
= gen_rtx_SUBREG (mode
, SAVE_EXPR_RTL (exp
), 0);
6380 SUBREG_PROMOTED_VAR_P (temp
) = 1;
6381 SUBREG_PROMOTED_UNSIGNED_P (temp
) = unsignedp
;
6384 if (temp
== const0_rtx
)
6385 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
6386 EXPAND_MEMORY_USE_BAD
);
6388 store_expr (TREE_OPERAND (exp
, 0), temp
, 0);
6390 TREE_USED (exp
) = 1;
6393 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6394 must be a promoted value. We return a SUBREG of the wanted mode,
6395 but mark it so that we know that it was already extended. */
6397 if (GET_CODE (SAVE_EXPR_RTL (exp
)) == REG
6398 && GET_MODE (SAVE_EXPR_RTL (exp
)) != mode
)
6400 /* Compute the signedness and make the proper SUBREG. */
6401 promote_mode (type
, mode
, &unsignedp
, 0);
6402 temp
= gen_rtx_SUBREG (mode
, SAVE_EXPR_RTL (exp
), 0);
6403 SUBREG_PROMOTED_VAR_P (temp
) = 1;
6404 SUBREG_PROMOTED_UNSIGNED_P (temp
) = unsignedp
;
6408 return SAVE_EXPR_RTL (exp
);
6413 temp
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
6414 TREE_OPERAND (exp
, 0) = unsave_expr_now (TREE_OPERAND (exp
, 0));
6418 case PLACEHOLDER_EXPR
:
6420 tree placeholder_expr
;
6422 /* If there is an object on the head of the placeholder list,
6423 see if some object in it of type TYPE or a pointer to it. For
6424 further information, see tree.def. */
6425 for (placeholder_expr
= placeholder_list
;
6426 placeholder_expr
!= 0;
6427 placeholder_expr
= TREE_CHAIN (placeholder_expr
))
6429 tree need_type
= TYPE_MAIN_VARIANT (type
);
6431 tree old_list
= placeholder_list
;
6434 /* Find the outermost reference that is of the type we want.
6435 If none, see if any object has a type that is a pointer to
6436 the type we want. */
6437 for (elt
= TREE_PURPOSE (placeholder_expr
);
6438 elt
!= 0 && object
== 0;
6440 = ((TREE_CODE (elt
) == COMPOUND_EXPR
6441 || TREE_CODE (elt
) == COND_EXPR
)
6442 ? TREE_OPERAND (elt
, 1)
6443 : (TREE_CODE_CLASS (TREE_CODE (elt
)) == 'r'
6444 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '1'
6445 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '2'
6446 || TREE_CODE_CLASS (TREE_CODE (elt
)) == 'e')
6447 ? TREE_OPERAND (elt
, 0) : 0))
6448 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt
)) == need_type
)
6451 for (elt
= TREE_PURPOSE (placeholder_expr
);
6452 elt
!= 0 && object
== 0;
6454 = ((TREE_CODE (elt
) == COMPOUND_EXPR
6455 || TREE_CODE (elt
) == COND_EXPR
)
6456 ? TREE_OPERAND (elt
, 1)
6457 : (TREE_CODE_CLASS (TREE_CODE (elt
)) == 'r'
6458 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '1'
6459 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '2'
6460 || TREE_CODE_CLASS (TREE_CODE (elt
)) == 'e')
6461 ? TREE_OPERAND (elt
, 0) : 0))
6462 if (POINTER_TYPE_P (TREE_TYPE (elt
))
6463 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt
)))
6465 object
= build1 (INDIRECT_REF
, need_type
, elt
);
6469 /* Expand this object skipping the list entries before
6470 it was found in case it is also a PLACEHOLDER_EXPR.
6471 In that case, we want to translate it using subsequent
6473 placeholder_list
= TREE_CHAIN (placeholder_expr
);
6474 temp
= expand_expr (object
, original_target
, tmode
,
6476 placeholder_list
= old_list
;
6482 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6485 case WITH_RECORD_EXPR
:
6486 /* Put the object on the placeholder list, expand our first operand,
6487 and pop the list. */
6488 placeholder_list
= tree_cons (TREE_OPERAND (exp
, 1), NULL_TREE
,
6490 target
= expand_expr (TREE_OPERAND (exp
, 0), original_target
,
6491 tmode
, ro_modifier
);
6492 placeholder_list
= TREE_CHAIN (placeholder_list
);
6496 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == LABEL_DECL
)
6497 expand_goto (TREE_OPERAND (exp
, 0));
6499 expand_computed_goto (TREE_OPERAND (exp
, 0));
6503 expand_exit_loop_if_false (NULL_PTR
,
6504 invert_truthvalue (TREE_OPERAND (exp
, 0)));
6507 case LABELED_BLOCK_EXPR
:
6508 if (LABELED_BLOCK_BODY (exp
))
6509 expand_expr_stmt (LABELED_BLOCK_BODY (exp
));
6510 /* Should perhaps use expand_label, but this is simpler and safer. */
6511 do_pending_stack_adjust ();
6512 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp
)));
6515 case EXIT_BLOCK_EXPR
:
6516 if (EXIT_BLOCK_RETURN (exp
))
6517 sorry ("returned value in block_exit_expr");
6518 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp
)));
6523 expand_start_loop (1);
6524 expand_expr_stmt (TREE_OPERAND (exp
, 0));
6532 tree vars
= TREE_OPERAND (exp
, 0);
6533 int vars_need_expansion
= 0;
6535 /* Need to open a binding contour here because
6536 if there are any cleanups they must be contained here. */
6537 expand_start_bindings (2);
6539 /* Mark the corresponding BLOCK for output in its proper place. */
6540 if (TREE_OPERAND (exp
, 2) != 0
6541 && ! TREE_USED (TREE_OPERAND (exp
, 2)))
6542 insert_block (TREE_OPERAND (exp
, 2));
6544 /* If VARS have not yet been expanded, expand them now. */
6547 if (!DECL_RTL_SET_P (vars
))
6549 vars_need_expansion
= 1;
6552 expand_decl_init (vars
);
6553 vars
= TREE_CHAIN (vars
);
6556 temp
= expand_expr (TREE_OPERAND (exp
, 1), target
, tmode
, ro_modifier
);
6558 expand_end_bindings (TREE_OPERAND (exp
, 0), 0, 0);
6564 if (RTL_EXPR_SEQUENCE (exp
))
6566 if (RTL_EXPR_SEQUENCE (exp
) == const0_rtx
)
6568 emit_insns (RTL_EXPR_SEQUENCE (exp
));
6569 RTL_EXPR_SEQUENCE (exp
) = const0_rtx
;
6571 preserve_rtl_expr_result (RTL_EXPR_RTL (exp
));
6572 free_temps_for_rtl_expr (exp
);
6573 return RTL_EXPR_RTL (exp
);
6576 /* If we don't need the result, just ensure we evaluate any
6581 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
6582 expand_expr (TREE_VALUE (elt
), const0_rtx
, VOIDmode
,
6583 EXPAND_MEMORY_USE_BAD
);
6587 /* All elts simple constants => refer to a constant in memory. But
6588 if this is a non-BLKmode mode, let it store a field at a time
6589 since that should make a CONST_INT or CONST_DOUBLE when we
6590 fold. Likewise, if we have a target we can use, it is best to
6591 store directly into the target unless the type is large enough
6592 that memcpy will be used. If we are making an initializer and
6593 all operands are constant, put it in memory as well. */
6594 else if ((TREE_STATIC (exp
)
6595 && ((mode
== BLKmode
6596 && ! (target
!= 0 && safe_from_p (target
, exp
, 1)))
6597 || TREE_ADDRESSABLE (exp
)
6598 || (host_integerp (TYPE_SIZE_UNIT (type
), 1)
6599 && (! MOVE_BY_PIECES_P
6600 (tree_low_cst (TYPE_SIZE_UNIT (type
), 1),
6602 && ! mostly_zeros_p (exp
))))
6603 || (modifier
== EXPAND_INITIALIZER
&& TREE_CONSTANT (exp
)))
6605 rtx constructor
= output_constant_def (exp
, 1);
6607 if (modifier
!= EXPAND_CONST_ADDRESS
6608 && modifier
!= EXPAND_INITIALIZER
6609 && modifier
!= EXPAND_SUM
6610 && (! memory_address_p (GET_MODE (constructor
),
6611 XEXP (constructor
, 0))
6613 && GET_CODE (XEXP (constructor
, 0)) != REG
)))
6614 constructor
= change_address (constructor
, VOIDmode
,
6615 XEXP (constructor
, 0));
6620 /* Handle calls that pass values in multiple non-contiguous
6621 locations. The Irix 6 ABI has examples of this. */
6622 if (target
== 0 || ! safe_from_p (target
, exp
, 1)
6623 || GET_CODE (target
) == PARALLEL
)
6625 = assign_temp (build_qualified_type (type
,
6627 | (TREE_READONLY (exp
)
6628 * TYPE_QUAL_CONST
))),
6629 TREE_ADDRESSABLE (exp
), 1, 1);
6631 store_constructor (exp
, target
, TYPE_ALIGN (TREE_TYPE (exp
)), 0,
6632 int_size_in_bytes (TREE_TYPE (exp
)));
6638 tree exp1
= TREE_OPERAND (exp
, 0);
6640 tree string
= string_constant (exp1
, &index
);
6642 /* Try to optimize reads from const strings. */
6644 && TREE_CODE (string
) == STRING_CST
6645 && TREE_CODE (index
) == INTEGER_CST
6646 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
6647 && GET_MODE_CLASS (mode
) == MODE_INT
6648 && GET_MODE_SIZE (mode
) == 1
6649 && modifier
!= EXPAND_MEMORY_USE_WO
)
6651 GEN_INT (TREE_STRING_POINTER (string
)[TREE_INT_CST_LOW (index
)]);
6653 op0
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
6654 op0
= memory_address (mode
, op0
);
6656 if (cfun
&& current_function_check_memory_usage
6657 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp
)))
6659 enum memory_use_mode memory_usage
;
6660 memory_usage
= get_memory_usage_from_modifier (modifier
);
6662 if (memory_usage
!= MEMORY_USE_DONT
)
6664 in_check_memory_usage
= 1;
6665 emit_library_call (chkr_check_addr_libfunc
,
6666 LCT_CONST_MAKE_BLOCK
, VOIDmode
, 3, op0
,
6667 Pmode
, GEN_INT (int_size_in_bytes (type
)),
6668 TYPE_MODE (sizetype
),
6669 GEN_INT (memory_usage
),
6670 TYPE_MODE (integer_type_node
));
6671 in_check_memory_usage
= 0;
6675 temp
= gen_rtx_MEM (mode
, op0
);
6676 set_mem_attributes (temp
, exp
, 0);
6678 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6679 here, because, in C and C++, the fact that a location is accessed
6680 through a pointer to const does not mean that the value there can
6681 never change. Languages where it can never change should
6682 also set TREE_STATIC. */
6683 RTX_UNCHANGING_P (temp
) = TREE_READONLY (exp
) & TREE_STATIC (exp
);
6685 /* If we are writing to this object and its type is a record with
6686 readonly fields, we must mark it as readonly so it will
6687 conflict with readonly references to those fields. */
6688 if (modifier
== EXPAND_MEMORY_USE_WO
&& readonly_fields_p (type
))
6689 RTX_UNCHANGING_P (temp
) = 1;
6695 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) != ARRAY_TYPE
)
6699 tree array
= TREE_OPERAND (exp
, 0);
6700 tree domain
= TYPE_DOMAIN (TREE_TYPE (array
));
6701 tree low_bound
= domain
? TYPE_MIN_VALUE (domain
) : integer_zero_node
;
6702 tree index
= convert (sizetype
, TREE_OPERAND (exp
, 1));
6705 /* Optimize the special-case of a zero lower bound.
6707 We convert the low_bound to sizetype to avoid some problems
6708 with constant folding. (E.g. suppose the lower bound is 1,
6709 and its mode is QI. Without the conversion, (ARRAY
6710 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6711 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6713 if (! integer_zerop (low_bound
))
6714 index
= size_diffop (index
, convert (sizetype
, low_bound
));
6716 /* Fold an expression like: "foo"[2].
6717 This is not done in fold so it won't happen inside &.
6718 Don't fold if this is for wide characters since it's too
6719 difficult to do correctly and this is a very rare case. */
6721 if (TREE_CODE (array
) == STRING_CST
6722 && TREE_CODE (index
) == INTEGER_CST
6723 && compare_tree_int (index
, TREE_STRING_LENGTH (array
)) < 0
6724 && GET_MODE_CLASS (mode
) == MODE_INT
6725 && GET_MODE_SIZE (mode
) == 1)
6727 GEN_INT (TREE_STRING_POINTER (array
)[TREE_INT_CST_LOW (index
)]);
6729 /* If this is a constant index into a constant array,
6730 just get the value from the array. Handle both the cases when
6731 we have an explicit constructor and when our operand is a variable
6732 that was declared const. */
6734 if (TREE_CODE (array
) == CONSTRUCTOR
&& ! TREE_SIDE_EFFECTS (array
)
6735 && TREE_CODE (index
) == INTEGER_CST
6736 && 0 > compare_tree_int (index
,
6737 list_length (CONSTRUCTOR_ELTS
6738 (TREE_OPERAND (exp
, 0)))))
6742 for (elem
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)),
6743 i
= TREE_INT_CST_LOW (index
);
6744 elem
!= 0 && i
!= 0; i
--, elem
= TREE_CHAIN (elem
))
6748 return expand_expr (fold (TREE_VALUE (elem
)), target
,
6749 tmode
, ro_modifier
);
6752 else if (optimize
>= 1
6753 && TREE_READONLY (array
) && ! TREE_SIDE_EFFECTS (array
)
6754 && TREE_CODE (array
) == VAR_DECL
&& DECL_INITIAL (array
)
6755 && TREE_CODE (DECL_INITIAL (array
)) != ERROR_MARK
)
6757 if (TREE_CODE (index
) == INTEGER_CST
)
6759 tree init
= DECL_INITIAL (array
);
6761 if (TREE_CODE (init
) == CONSTRUCTOR
)
6765 for (elem
= CONSTRUCTOR_ELTS (init
);
6767 && !tree_int_cst_equal (TREE_PURPOSE (elem
), index
));
6768 elem
= TREE_CHAIN (elem
))
6772 return expand_expr (fold (TREE_VALUE (elem
)), target
,
6773 tmode
, ro_modifier
);
6775 else if (TREE_CODE (init
) == STRING_CST
6776 && 0 > compare_tree_int (index
,
6777 TREE_STRING_LENGTH (init
)))
6779 tree type
= TREE_TYPE (TREE_TYPE (init
));
6780 enum machine_mode mode
= TYPE_MODE (type
);
6782 if (GET_MODE_CLASS (mode
) == MODE_INT
6783 && GET_MODE_SIZE (mode
) == 1)
6785 (TREE_STRING_POINTER
6786 (init
)[TREE_INT_CST_LOW (index
)]));
6795 /* If the operand is a CONSTRUCTOR, we can just extract the
6796 appropriate field if it is present. Don't do this if we have
6797 already written the data since we want to refer to that copy
6798 and varasm.c assumes that's what we'll do. */
6799 if (code
!= ARRAY_REF
6800 && TREE_CODE (TREE_OPERAND (exp
, 0)) == CONSTRUCTOR
6801 && TREE_CST_RTL (TREE_OPERAND (exp
, 0)) == 0)
6805 for (elt
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)); elt
;
6806 elt
= TREE_CHAIN (elt
))
6807 if (TREE_PURPOSE (elt
) == TREE_OPERAND (exp
, 1)
6808 /* We can normally use the value of the field in the
6809 CONSTRUCTOR. However, if this is a bitfield in
6810 an integral mode that we can fit in a HOST_WIDE_INT,
6811 we must mask only the number of bits in the bitfield,
6812 since this is done implicitly by the constructor. If
6813 the bitfield does not meet either of those conditions,
6814 we can't do this optimization. */
6815 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt
))
6816 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt
)))
6818 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt
)))
6819 <= HOST_BITS_PER_WIDE_INT
))))
6821 op0
= expand_expr (TREE_VALUE (elt
), target
, tmode
, modifier
);
6822 if (DECL_BIT_FIELD (TREE_PURPOSE (elt
)))
6824 HOST_WIDE_INT bitsize
6825 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt
)));
6827 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt
))))
6829 op1
= GEN_INT (((HOST_WIDE_INT
) 1 << bitsize
) - 1);
6830 op0
= expand_and (op0
, op1
, target
);
6834 enum machine_mode imode
6835 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt
)));
6837 = build_int_2 (GET_MODE_BITSIZE (imode
) - bitsize
,
6840 op0
= expand_shift (LSHIFT_EXPR
, imode
, op0
, count
,
6842 op0
= expand_shift (RSHIFT_EXPR
, imode
, op0
, count
,
6852 enum machine_mode mode1
;
6853 HOST_WIDE_INT bitsize
, bitpos
;
6856 unsigned int alignment
;
6857 tree tem
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
6858 &mode1
, &unsignedp
, &volatilep
,
6861 /* If we got back the original object, something is wrong. Perhaps
6862 we are evaluating an expression too early. In any event, don't
6863 infinitely recurse. */
6867 /* If TEM's type is a union of variable size, pass TARGET to the inner
6868 computation, since it will need a temporary and TARGET is known
6869 to have to do. This occurs in unchecked conversion in Ada. */
6871 op0
= expand_expr (tem
,
6872 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
6873 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
6875 ? target
: NULL_RTX
),
6877 (modifier
== EXPAND_INITIALIZER
6878 || modifier
== EXPAND_CONST_ADDRESS
)
6879 ? modifier
: EXPAND_NORMAL
);
6881 /* If this is a constant, put it into a register if it is a
6882 legitimate constant and OFFSET is 0 and memory if it isn't. */
6883 if (CONSTANT_P (op0
))
6885 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (tem
));
6886 if (mode
!= BLKmode
&& LEGITIMATE_CONSTANT_P (op0
)
6888 op0
= force_reg (mode
, op0
);
6890 op0
= validize_mem (force_const_mem (mode
, op0
));
6895 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
6897 /* If this object is in memory, put it into a register.
6898 This case can't occur in C, but can in Ada if we have
6899 unchecked conversion of an expression from a scalar type to
6900 an array or record type. */
6901 if (GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
6902 || GET_CODE (op0
) == CONCAT
|| GET_CODE (op0
) == ADDRESSOF
)
6904 tree nt
= build_qualified_type (TREE_TYPE (tem
),
6905 (TYPE_QUALS (TREE_TYPE (tem
))
6906 | TYPE_QUAL_CONST
));
6907 rtx memloc
= assign_temp (nt
, 1, 1, 1);
6909 mark_temp_addr_taken (memloc
);
6910 emit_move_insn (memloc
, op0
);
6914 if (GET_CODE (op0
) != MEM
)
6917 if (GET_MODE (offset_rtx
) != ptr_mode
)
6919 #ifdef POINTERS_EXTEND_UNSIGNED
6920 offset_rtx
= convert_memory_address (ptr_mode
, offset_rtx
);
6922 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
6926 /* A constant address in OP0 can have VOIDmode, we must not try
6927 to call force_reg for that case. Avoid that case. */
6928 if (GET_CODE (op0
) == MEM
6929 && GET_MODE (op0
) == BLKmode
6930 && GET_MODE (XEXP (op0
, 0)) != VOIDmode
6932 && (bitpos
% bitsize
) == 0
6933 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
6934 && alignment
== GET_MODE_ALIGNMENT (mode1
))
6936 rtx temp
= change_address (op0
, mode1
,
6937 plus_constant (XEXP (op0
, 0),
6940 if (GET_CODE (XEXP (temp
, 0)) == REG
)
6943 op0
= change_address (op0
, mode1
,
6944 force_reg (GET_MODE (XEXP (temp
, 0)),
6949 op0
= change_address (op0
, VOIDmode
,
6950 gen_rtx_PLUS (ptr_mode
, XEXP (op0
, 0),
6951 force_reg (ptr_mode
,
6955 /* Don't forget about volatility even if this is a bitfield. */
6956 if (GET_CODE (op0
) == MEM
&& volatilep
&& ! MEM_VOLATILE_P (op0
))
6958 op0
= copy_rtx (op0
);
6959 MEM_VOLATILE_P (op0
) = 1;
6962 /* Check the access. */
6963 if (cfun
!= 0 && current_function_check_memory_usage
6964 && GET_CODE (op0
) == MEM
)
6966 enum memory_use_mode memory_usage
;
6967 memory_usage
= get_memory_usage_from_modifier (modifier
);
6969 if (memory_usage
!= MEMORY_USE_DONT
)
6974 to
= plus_constant (XEXP (op0
, 0), (bitpos
/ BITS_PER_UNIT
));
6975 size
= (bitpos
% BITS_PER_UNIT
) + bitsize
+ BITS_PER_UNIT
- 1;
6977 /* Check the access right of the pointer. */
6978 in_check_memory_usage
= 1;
6979 if (size
> BITS_PER_UNIT
)
6980 emit_library_call (chkr_check_addr_libfunc
,
6981 LCT_CONST_MAKE_BLOCK
, VOIDmode
, 3, to
,
6982 Pmode
, GEN_INT (size
/ BITS_PER_UNIT
),
6983 TYPE_MODE (sizetype
),
6984 GEN_INT (memory_usage
),
6985 TYPE_MODE (integer_type_node
));
6986 in_check_memory_usage
= 0;
6990 /* In cases where an aligned union has an unaligned object
6991 as a field, we might be extracting a BLKmode value from
6992 an integer-mode (e.g., SImode) object. Handle this case
6993 by doing the extract into an object as wide as the field
6994 (which we know to be the width of a basic mode), then
6995 storing into memory, and changing the mode to BLKmode.
6996 If we ultimately want the address (EXPAND_CONST_ADDRESS or
6997 EXPAND_INITIALIZER), then we must not copy to a temporary. */
6998 if (mode1
== VOIDmode
6999 || GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
7000 || (modifier
!= EXPAND_CONST_ADDRESS
7001 && modifier
!= EXPAND_INITIALIZER
7002 && ((mode1
!= BLKmode
&& ! direct_load
[(int) mode1
]
7003 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
7004 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
)
7005 /* If the field isn't aligned enough to fetch as a memref,
7006 fetch it as a bit field. */
7007 || (mode1
!= BLKmode
7008 && SLOW_UNALIGNED_ACCESS (mode1
, alignment
)
7009 && ((TYPE_ALIGN (TREE_TYPE (tem
))
7010 < GET_MODE_ALIGNMENT (mode
))
7011 || (bitpos
% GET_MODE_ALIGNMENT (mode
) != 0)))
7012 /* If the type and the field are a constant size and the
7013 size of the type isn't the same size as the bitfield,
7014 we must use bitfield operations. */
7016 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
)))
7018 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)),
7020 || (modifier
!= EXPAND_CONST_ADDRESS
7021 && modifier
!= EXPAND_INITIALIZER
7023 && SLOW_UNALIGNED_ACCESS (mode
, alignment
)
7024 && (TYPE_ALIGN (type
) > alignment
7025 || bitpos
% TYPE_ALIGN (type
) != 0)))
7027 enum machine_mode ext_mode
= mode
;
7029 if (ext_mode
== BLKmode
7030 && ! (target
!= 0 && GET_CODE (op0
) == MEM
7031 && GET_CODE (target
) == MEM
7032 && bitpos
% BITS_PER_UNIT
== 0))
7033 ext_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
7035 if (ext_mode
== BLKmode
)
7037 /* In this case, BITPOS must start at a byte boundary and
7038 TARGET, if specified, must be a MEM. */
7039 if (GET_CODE (op0
) != MEM
7040 || (target
!= 0 && GET_CODE (target
) != MEM
)
7041 || bitpos
% BITS_PER_UNIT
!= 0)
7044 op0
= change_address (op0
, VOIDmode
,
7045 plus_constant (XEXP (op0
, 0),
7046 bitpos
/ BITS_PER_UNIT
));
7048 target
= assign_temp (type
, 0, 1, 1);
7050 emit_block_move (target
, op0
,
7051 bitsize
== -1 ? expr_size (exp
)
7052 : GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
7059 op0
= validize_mem (op0
);
7061 if (GET_CODE (op0
) == MEM
&& GET_CODE (XEXP (op0
, 0)) == REG
)
7062 mark_reg_pointer (XEXP (op0
, 0), alignment
);
7064 op0
= extract_bit_field (op0
, bitsize
, bitpos
,
7065 unsignedp
, target
, ext_mode
, ext_mode
,
7067 int_size_in_bytes (TREE_TYPE (tem
)));
7069 /* If the result is a record type and BITSIZE is narrower than
7070 the mode of OP0, an integral mode, and this is a big endian
7071 machine, we must put the field into the high-order bits. */
7072 if (TREE_CODE (type
) == RECORD_TYPE
&& BYTES_BIG_ENDIAN
7073 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
7074 && bitsize
< GET_MODE_BITSIZE (GET_MODE (op0
)))
7075 op0
= expand_shift (LSHIFT_EXPR
, GET_MODE (op0
), op0
,
7076 size_int (GET_MODE_BITSIZE (GET_MODE (op0
))
7080 if (mode
== BLKmode
)
7082 tree nt
= build_qualified_type (type_for_mode (ext_mode
, 0),
7084 rtx
new = assign_temp (nt
, 0, 1, 1);
7086 emit_move_insn (new, op0
);
7087 op0
= copy_rtx (new);
7088 PUT_MODE (op0
, BLKmode
);
7094 /* If the result is BLKmode, use that to access the object
7096 if (mode
== BLKmode
)
7099 /* Get a reference to just this component. */
7100 if (modifier
== EXPAND_CONST_ADDRESS
7101 || modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7103 rtx
new = gen_rtx_MEM (mode1
,
7104 plus_constant (XEXP (op0
, 0),
7105 (bitpos
/ BITS_PER_UNIT
)));
7107 MEM_COPY_ATTRIBUTES (new, op0
);
7111 op0
= change_address (op0
, mode1
,
7112 plus_constant (XEXP (op0
, 0),
7113 (bitpos
/ BITS_PER_UNIT
)));
7115 set_mem_attributes (op0
, exp
, 0);
7116 if (GET_CODE (XEXP (op0
, 0)) == REG
)
7117 mark_reg_pointer (XEXP (op0
, 0), alignment
);
7119 MEM_VOLATILE_P (op0
) |= volatilep
;
7120 if (mode
== mode1
|| mode1
== BLKmode
|| mode1
== tmode
7121 || modifier
== EXPAND_CONST_ADDRESS
7122 || modifier
== EXPAND_INITIALIZER
)
7124 else if (target
== 0)
7125 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
7127 convert_move (target
, op0
, unsignedp
);
7131 /* Intended for a reference to a buffer of a file-object in Pascal.
7132 But it's not certain that a special tree code will really be
7133 necessary for these. INDIRECT_REF might work for them. */
7139 /* Pascal set IN expression.
7142 rlo = set_low - (set_low%bits_per_word);
7143 the_word = set [ (index - rlo)/bits_per_word ];
7144 bit_index = index % bits_per_word;
7145 bitmask = 1 << bit_index;
7146 return !!(the_word & bitmask); */
7148 tree set
= TREE_OPERAND (exp
, 0);
7149 tree index
= TREE_OPERAND (exp
, 1);
7150 int iunsignedp
= TREE_UNSIGNED (TREE_TYPE (index
));
7151 tree set_type
= TREE_TYPE (set
);
7152 tree set_low_bound
= TYPE_MIN_VALUE (TYPE_DOMAIN (set_type
));
7153 tree set_high_bound
= TYPE_MAX_VALUE (TYPE_DOMAIN (set_type
));
7154 rtx index_val
= expand_expr (index
, 0, VOIDmode
, 0);
7155 rtx lo_r
= expand_expr (set_low_bound
, 0, VOIDmode
, 0);
7156 rtx hi_r
= expand_expr (set_high_bound
, 0, VOIDmode
, 0);
7157 rtx setval
= expand_expr (set
, 0, VOIDmode
, 0);
7158 rtx setaddr
= XEXP (setval
, 0);
7159 enum machine_mode index_mode
= TYPE_MODE (TREE_TYPE (index
));
7161 rtx diff
, quo
, rem
, addr
, bit
, result
;
7163 /* If domain is empty, answer is no. Likewise if index is constant
7164 and out of bounds. */
7165 if (((TREE_CODE (set_high_bound
) == INTEGER_CST
7166 && TREE_CODE (set_low_bound
) == INTEGER_CST
7167 && tree_int_cst_lt (set_high_bound
, set_low_bound
))
7168 || (TREE_CODE (index
) == INTEGER_CST
7169 && TREE_CODE (set_low_bound
) == INTEGER_CST
7170 && tree_int_cst_lt (index
, set_low_bound
))
7171 || (TREE_CODE (set_high_bound
) == INTEGER_CST
7172 && TREE_CODE (index
) == INTEGER_CST
7173 && tree_int_cst_lt (set_high_bound
, index
))))
7177 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
7179 /* If we get here, we have to generate the code for both cases
7180 (in range and out of range). */
7182 op0
= gen_label_rtx ();
7183 op1
= gen_label_rtx ();
7185 if (! (GET_CODE (index_val
) == CONST_INT
7186 && GET_CODE (lo_r
) == CONST_INT
))
7188 emit_cmp_and_jump_insns (index_val
, lo_r
, LT
, NULL_RTX
,
7189 GET_MODE (index_val
), iunsignedp
, 0, op1
);
7192 if (! (GET_CODE (index_val
) == CONST_INT
7193 && GET_CODE (hi_r
) == CONST_INT
))
7195 emit_cmp_and_jump_insns (index_val
, hi_r
, GT
, NULL_RTX
,
7196 GET_MODE (index_val
), iunsignedp
, 0, op1
);
7199 /* Calculate the element number of bit zero in the first word
7201 if (GET_CODE (lo_r
) == CONST_INT
)
7202 rlow
= GEN_INT (INTVAL (lo_r
)
7203 & ~((HOST_WIDE_INT
) 1 << BITS_PER_UNIT
));
7205 rlow
= expand_binop (index_mode
, and_optab
, lo_r
,
7206 GEN_INT (~((HOST_WIDE_INT
) 1 << BITS_PER_UNIT
)),
7207 NULL_RTX
, iunsignedp
, OPTAB_LIB_WIDEN
);
7209 diff
= expand_binop (index_mode
, sub_optab
, index_val
, rlow
,
7210 NULL_RTX
, iunsignedp
, OPTAB_LIB_WIDEN
);
7212 quo
= expand_divmod (0, TRUNC_DIV_EXPR
, index_mode
, diff
,
7213 GEN_INT (BITS_PER_UNIT
), NULL_RTX
, iunsignedp
);
7214 rem
= expand_divmod (1, TRUNC_MOD_EXPR
, index_mode
, index_val
,
7215 GEN_INT (BITS_PER_UNIT
), NULL_RTX
, iunsignedp
);
7217 addr
= memory_address (byte_mode
,
7218 expand_binop (index_mode
, add_optab
, diff
,
7219 setaddr
, NULL_RTX
, iunsignedp
,
7222 /* Extract the bit we want to examine. */
7223 bit
= expand_shift (RSHIFT_EXPR
, byte_mode
,
7224 gen_rtx_MEM (byte_mode
, addr
),
7225 make_tree (TREE_TYPE (index
), rem
),
7227 result
= expand_binop (byte_mode
, and_optab
, bit
, const1_rtx
,
7228 GET_MODE (target
) == byte_mode
? target
: 0,
7229 1, OPTAB_LIB_WIDEN
);
7231 if (result
!= target
)
7232 convert_move (target
, result
, 1);
7234 /* Output the code to handle the out-of-range case. */
7237 emit_move_insn (target
, const0_rtx
);
7242 case WITH_CLEANUP_EXPR
:
7243 if (RTL_EXPR_RTL (exp
) == 0)
7246 = expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, ro_modifier
);
7247 expand_decl_cleanup (NULL_TREE
, TREE_OPERAND (exp
, 2));
7249 /* That's it for this cleanup. */
7250 TREE_OPERAND (exp
, 2) = 0;
7252 return RTL_EXPR_RTL (exp
);
7254 case CLEANUP_POINT_EXPR
:
7256 /* Start a new binding layer that will keep track of all cleanup
7257 actions to be performed. */
7258 expand_start_bindings (2);
7260 target_temp_slot_level
= temp_slot_level
;
7262 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, ro_modifier
);
7263 /* If we're going to use this value, load it up now. */
7265 op0
= force_not_mem (op0
);
7266 preserve_temp_slots (op0
);
7267 expand_end_bindings (NULL_TREE
, 0, 0);
7272 /* Check for a built-in function. */
7273 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
7274 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
7276 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7278 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
7279 == BUILT_IN_FRONTEND
)
7280 return (*lang_expand_expr
) (exp
, original_target
, tmode
, modifier
);
7282 return expand_builtin (exp
, target
, subtarget
, tmode
, ignore
);
7285 return expand_call (exp
, target
, ignore
);
7287 case NON_LVALUE_EXPR
:
7290 case REFERENCE_EXPR
:
7291 if (TREE_OPERAND (exp
, 0) == error_mark_node
)
7294 if (TREE_CODE (type
) == UNION_TYPE
)
7296 tree valtype
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7298 /* If both input and output are BLKmode, this conversion
7299 isn't actually doing anything unless we need to make the
7300 alignment stricter. */
7301 if (mode
== BLKmode
&& TYPE_MODE (valtype
) == BLKmode
7302 && (TYPE_ALIGN (type
) <= TYPE_ALIGN (valtype
)
7303 || TYPE_ALIGN (type
) >= BIGGEST_ALIGNMENT
))
7304 return expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
,
7308 target
= assign_temp (type
, 0, 1, 1);
7310 if (GET_CODE (target
) == MEM
)
7311 /* Store data into beginning of memory target. */
7312 store_expr (TREE_OPERAND (exp
, 0),
7313 change_address (target
, TYPE_MODE (valtype
), 0), 0);
7315 else if (GET_CODE (target
) == REG
)
7316 /* Store this field into a union of the proper type. */
7317 store_field (target
,
7318 MIN ((int_size_in_bytes (TREE_TYPE
7319 (TREE_OPERAND (exp
, 0)))
7321 (HOST_WIDE_INT
) GET_MODE_BITSIZE (mode
)),
7322 0, TYPE_MODE (valtype
), TREE_OPERAND (exp
, 0),
7323 VOIDmode
, 0, BITS_PER_UNIT
,
7324 int_size_in_bytes (type
), 0);
7328 /* Return the entire union. */
7332 if (mode
== TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
7334 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
,
7337 /* If the signedness of the conversion differs and OP0 is
7338 a promoted SUBREG, clear that indication since we now
7339 have to do the proper extension. */
7340 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))) != unsignedp
7341 && GET_CODE (op0
) == SUBREG
)
7342 SUBREG_PROMOTED_VAR_P (op0
) = 0;
7347 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, mode
, 0);
7348 if (GET_MODE (op0
) == mode
)
7351 /* If OP0 is a constant, just convert it into the proper mode. */
7352 if (CONSTANT_P (op0
))
7354 convert_modes (mode
, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
7355 op0
, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7357 if (modifier
== EXPAND_INITIALIZER
)
7358 return gen_rtx_fmt_e (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
);
7362 convert_to_mode (mode
, op0
,
7363 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7365 convert_move (target
, op0
,
7366 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7370 /* We come here from MINUS_EXPR when the second operand is a
7373 this_optab
= ! unsignedp
&& flag_trapv
7374 && (GET_MODE_CLASS(mode
) == MODE_INT
)
7375 ? addv_optab
: add_optab
;
7377 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7378 something else, make sure we add the register to the constant and
7379 then to the other thing. This case can occur during strength
7380 reduction and doing it this way will produce better code if the
7381 frame pointer or argument pointer is eliminated.
7383 fold-const.c will ensure that the constant is always in the inner
7384 PLUS_EXPR, so the only case we need to do anything about is if
7385 sp, ap, or fp is our second argument, in which case we must swap
7386 the innermost first argument and our second argument. */
7388 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == PLUS_EXPR
7389 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1)) == INTEGER_CST
7390 && TREE_CODE (TREE_OPERAND (exp
, 1)) == RTL_EXPR
7391 && (RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == frame_pointer_rtx
7392 || RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == stack_pointer_rtx
7393 || RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == arg_pointer_rtx
))
7395 tree t
= TREE_OPERAND (exp
, 1);
7397 TREE_OPERAND (exp
, 1) = TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
7398 TREE_OPERAND (TREE_OPERAND (exp
, 0), 0) = t
;
7401 /* If the result is to be ptr_mode and we are adding an integer to
7402 something, we might be forming a constant. So try to use
7403 plus_constant. If it produces a sum and we can't accept it,
7404 use force_operand. This allows P = &ARR[const] to generate
7405 efficient code on machines where a SYMBOL_REF is not a valid
7408 If this is an EXPAND_SUM call, always return the sum. */
7409 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
7410 || (mode
== ptr_mode
&& (unsignedp
|| ! flag_trapv
)))
7412 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
7413 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
7414 && TREE_CONSTANT (TREE_OPERAND (exp
, 1)))
7418 op1
= expand_expr (TREE_OPERAND (exp
, 1), subtarget
, VOIDmode
,
7420 /* Use immed_double_const to ensure that the constant is
7421 truncated according to the mode of OP1, then sign extended
7422 to a HOST_WIDE_INT. Using the constant directly can result
7423 in non-canonical RTL in a 64x32 cross compile. */
7425 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 0)),
7427 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))));
7428 op1
= plus_constant (op1
, INTVAL (constant_part
));
7429 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7430 op1
= force_operand (op1
, target
);
7434 else if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
7435 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_INT
7436 && TREE_CONSTANT (TREE_OPERAND (exp
, 0)))
7440 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
7442 if (! CONSTANT_P (op0
))
7444 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
7445 VOIDmode
, modifier
);
7446 /* Don't go to both_summands if modifier
7447 says it's not right to return a PLUS. */
7448 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7452 /* Use immed_double_const to ensure that the constant is
7453 truncated according to the mode of OP1, then sign extended
7454 to a HOST_WIDE_INT. Using the constant directly can result
7455 in non-canonical RTL in a 64x32 cross compile. */
7457 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)),
7459 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7460 op0
= plus_constant (op0
, INTVAL (constant_part
));
7461 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7462 op0
= force_operand (op0
, target
);
7467 /* No sense saving up arithmetic to be done
7468 if it's all in the wrong mode to form part of an address.
7469 And force_operand won't know whether to sign-extend or
7471 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
7472 || mode
!= ptr_mode
)
7475 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
7478 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, ro_modifier
);
7479 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, ro_modifier
);
7482 /* Make sure any term that's a sum with a constant comes last. */
7483 if (GET_CODE (op0
) == PLUS
7484 && CONSTANT_P (XEXP (op0
, 1)))
7490 /* If adding to a sum including a constant,
7491 associate it to put the constant outside. */
7492 if (GET_CODE (op1
) == PLUS
7493 && CONSTANT_P (XEXP (op1
, 1)))
7495 rtx constant_term
= const0_rtx
;
7497 temp
= simplify_binary_operation (PLUS
, mode
, XEXP (op1
, 0), op0
);
7500 /* Ensure that MULT comes first if there is one. */
7501 else if (GET_CODE (op0
) == MULT
)
7502 op0
= gen_rtx_PLUS (mode
, op0
, XEXP (op1
, 0));
7504 op0
= gen_rtx_PLUS (mode
, XEXP (op1
, 0), op0
);
7506 /* Let's also eliminate constants from op0 if possible. */
7507 op0
= eliminate_constant_term (op0
, &constant_term
);
7509 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7510 their sum should be a constant. Form it into OP1, since the
7511 result we want will then be OP0 + OP1. */
7513 temp
= simplify_binary_operation (PLUS
, mode
, constant_term
,
7518 op1
= gen_rtx_PLUS (mode
, constant_term
, XEXP (op1
, 1));
7521 /* Put a constant term last and put a multiplication first. */
7522 if (CONSTANT_P (op0
) || GET_CODE (op1
) == MULT
)
7523 temp
= op1
, op1
= op0
, op0
= temp
;
7525 temp
= simplify_binary_operation (PLUS
, mode
, op0
, op1
);
7526 return temp
? temp
: gen_rtx_PLUS (mode
, op0
, op1
);
7529 /* For initializers, we are allowed to return a MINUS of two
7530 symbolic constants. Here we handle all cases when both operands
7532 /* Handle difference of two symbolic constants,
7533 for the sake of an initializer. */
7534 if ((modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7535 && really_constant_p (TREE_OPERAND (exp
, 0))
7536 && really_constant_p (TREE_OPERAND (exp
, 1)))
7538 rtx op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
,
7539 VOIDmode
, ro_modifier
);
7540 rtx op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
7541 VOIDmode
, ro_modifier
);
7543 /* If the last operand is a CONST_INT, use plus_constant of
7544 the negated constant. Else make the MINUS. */
7545 if (GET_CODE (op1
) == CONST_INT
)
7546 return plus_constant (op0
, - INTVAL (op1
));
7548 return gen_rtx_MINUS (mode
, op0
, op1
);
7550 /* Convert A - const to A + (-const). */
7551 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
7553 tree negated
= fold (build1 (NEGATE_EXPR
, type
,
7554 TREE_OPERAND (exp
, 1)));
7556 if (TREE_UNSIGNED (type
) || TREE_OVERFLOW (negated
))
7557 /* If we can't negate the constant in TYPE, leave it alone and
7558 expand_binop will negate it for us. We used to try to do it
7559 here in the signed version of TYPE, but that doesn't work
7560 on POINTER_TYPEs. */;
7563 exp
= build (PLUS_EXPR
, type
, TREE_OPERAND (exp
, 0), negated
);
7567 this_optab
= ! unsignedp
&& flag_trapv
7568 && (GET_MODE_CLASS(mode
) == MODE_INT
)
7569 ? subv_optab
: sub_optab
;
7573 /* If first operand is constant, swap them.
7574 Thus the following special case checks need only
7575 check the second operand. */
7576 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
)
7578 register tree t1
= TREE_OPERAND (exp
, 0);
7579 TREE_OPERAND (exp
, 0) = TREE_OPERAND (exp
, 1);
7580 TREE_OPERAND (exp
, 1) = t1
;
7583 /* Attempt to return something suitable for generating an
7584 indexed address, for machines that support that. */
7586 if (modifier
== EXPAND_SUM
&& mode
== ptr_mode
7587 && TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
7588 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
7590 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
7593 /* Apply distributive law if OP0 is x+c. */
7594 if (GET_CODE (op0
) == PLUS
7595 && GET_CODE (XEXP (op0
, 1)) == CONST_INT
)
7600 (mode
, XEXP (op0
, 0),
7601 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)))),
7602 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))
7603 * INTVAL (XEXP (op0
, 1))));
7605 if (GET_CODE (op0
) != REG
)
7606 op0
= force_operand (op0
, NULL_RTX
);
7607 if (GET_CODE (op0
) != REG
)
7608 op0
= copy_to_mode_reg (mode
, op0
);
7611 gen_rtx_MULT (mode
, op0
,
7612 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))));
7615 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
7618 /* Check for multiplying things that have been extended
7619 from a narrower type. If this machine supports multiplying
7620 in that narrower type with a result in the desired type,
7621 do it that way, and avoid the explicit type-conversion. */
7622 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == NOP_EXPR
7623 && TREE_CODE (type
) == INTEGER_TYPE
7624 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7625 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))))
7626 && ((TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
7627 && int_fits_type_p (TREE_OPERAND (exp
, 1),
7628 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7629 /* Don't use a widening multiply if a shift will do. */
7630 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
7631 > HOST_BITS_PER_WIDE_INT
)
7632 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))) < 0))
7634 (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
7635 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
7637 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))))
7638 /* If both operands are extended, they must either both
7639 be zero-extended or both be sign-extended. */
7640 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
7642 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))))))
7644 enum machine_mode innermode
7645 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)));
7646 optab other_optab
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7647 ? smul_widen_optab
: umul_widen_optab
);
7648 this_optab
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7649 ? umul_widen_optab
: smul_widen_optab
);
7650 if (mode
== GET_MODE_WIDER_MODE (innermode
))
7652 if (this_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
7654 op0
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
7655 NULL_RTX
, VOIDmode
, 0);
7656 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
7657 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
7660 op1
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
7661 NULL_RTX
, VOIDmode
, 0);
7664 else if (other_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
7665 && innermode
== word_mode
)
7668 op0
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
7669 NULL_RTX
, VOIDmode
, 0);
7670 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
7671 op1
= convert_modes (innermode
, mode
,
7672 expand_expr (TREE_OPERAND (exp
, 1),
7673 NULL_RTX
, VOIDmode
, 0),
7676 op1
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
7677 NULL_RTX
, VOIDmode
, 0);
7678 temp
= expand_binop (mode
, other_optab
, op0
, op1
, target
,
7679 unsignedp
, OPTAB_LIB_WIDEN
);
7680 htem
= expand_mult_highpart_adjust (innermode
,
7681 gen_highpart (innermode
, temp
),
7683 gen_highpart (innermode
, temp
),
7685 emit_move_insn (gen_highpart (innermode
, temp
), htem
);
7690 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7691 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
7692 return expand_mult (mode
, op0
, op1
, target
, unsignedp
);
7694 case TRUNC_DIV_EXPR
:
7695 case FLOOR_DIV_EXPR
:
7697 case ROUND_DIV_EXPR
:
7698 case EXACT_DIV_EXPR
:
7699 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
7701 /* Possible optimization: compute the dividend with EXPAND_SUM
7702 then if the divisor is constant can optimize the case
7703 where some terms of the dividend have coeffs divisible by it. */
7704 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7705 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
7706 return expand_divmod (0, code
, mode
, op0
, op1
, target
, unsignedp
);
7709 this_optab
= flodiv_optab
;
7712 case TRUNC_MOD_EXPR
:
7713 case FLOOR_MOD_EXPR
:
7715 case ROUND_MOD_EXPR
:
7716 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
7718 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7719 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
7720 return expand_divmod (1, code
, mode
, op0
, op1
, target
, unsignedp
);
7722 case FIX_ROUND_EXPR
:
7723 case FIX_FLOOR_EXPR
:
7725 abort (); /* Not used for C. */
7727 case FIX_TRUNC_EXPR
:
7728 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
7730 target
= gen_reg_rtx (mode
);
7731 expand_fix (target
, op0
, unsignedp
);
7735 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
7737 target
= gen_reg_rtx (mode
);
7738 /* expand_float can't figure out what to do if FROM has VOIDmode.
7739 So give it the correct mode. With -O, cse will optimize this. */
7740 if (GET_MODE (op0
) == VOIDmode
)
7741 op0
= copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
7743 expand_float (target
, op0
,
7744 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7748 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7749 temp
= expand_unop (mode
,
7750 ! unsignedp
&& flag_trapv
7751 && (GET_MODE_CLASS(mode
) == MODE_INT
)
7752 ? negv_optab
: neg_optab
, op0
, target
, 0);
7758 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7760 /* Handle complex values specially. */
7761 if (GET_MODE_CLASS (mode
) == MODE_COMPLEX_INT
7762 || GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
)
7763 return expand_complex_abs (mode
, op0
, target
, unsignedp
);
7765 /* Unsigned abs is simply the operand. Testing here means we don't
7766 risk generating incorrect code below. */
7767 if (TREE_UNSIGNED (type
))
7770 return expand_abs (mode
, op0
, target
, unsignedp
,
7771 safe_from_p (target
, TREE_OPERAND (exp
, 0), 1));
7775 target
= original_target
;
7776 if (target
== 0 || ! safe_from_p (target
, TREE_OPERAND (exp
, 1), 1)
7777 || (GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
))
7778 || GET_MODE (target
) != mode
7779 || (GET_CODE (target
) == REG
7780 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
7781 target
= gen_reg_rtx (mode
);
7782 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
7783 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, 0);
7785 /* First try to do it with a special MIN or MAX instruction.
7786 If that does not win, use a conditional jump to select the proper
7788 this_optab
= (TREE_UNSIGNED (type
)
7789 ? (code
== MIN_EXPR
? umin_optab
: umax_optab
)
7790 : (code
== MIN_EXPR
? smin_optab
: smax_optab
));
7792 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
, unsignedp
,
7797 /* At this point, a MEM target is no longer useful; we will get better
7800 if (GET_CODE (target
) == MEM
)
7801 target
= gen_reg_rtx (mode
);
7804 emit_move_insn (target
, op0
);
7806 op0
= gen_label_rtx ();
7808 /* If this mode is an integer too wide to compare properly,
7809 compare word by word. Rely on cse to optimize constant cases. */
7810 if (GET_MODE_CLASS (mode
) == MODE_INT
7811 && ! can_compare_p (GE
, mode
, ccp_jump
))
7813 if (code
== MAX_EXPR
)
7814 do_jump_by_parts_greater_rtx (mode
, TREE_UNSIGNED (type
),
7815 target
, op1
, NULL_RTX
, op0
);
7817 do_jump_by_parts_greater_rtx (mode
, TREE_UNSIGNED (type
),
7818 op1
, target
, NULL_RTX
, op0
);
7822 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 1)));
7823 do_compare_rtx_and_jump (target
, op1
, code
== MAX_EXPR
? GE
: LE
,
7824 unsignedp
, mode
, NULL_RTX
, 0, NULL_RTX
,
7827 emit_move_insn (target
, op1
);
7832 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7833 temp
= expand_unop (mode
, one_cmpl_optab
, op0
, target
, 1);
7839 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7840 temp
= expand_unop (mode
, ffs_optab
, op0
, target
, 1);
7845 /* ??? Can optimize bitwise operations with one arg constant.
7846 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7847 and (a bitwise1 b) bitwise2 b (etc)
7848 but that is probably not worth while. */
7850 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7851 boolean values when we want in all cases to compute both of them. In
7852 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7853 as actual zero-or-1 values and then bitwise anding. In cases where
7854 there cannot be any side effects, better code would be made by
7855 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7856 how to recognize those cases. */
7858 case TRUTH_AND_EXPR
:
7860 this_optab
= and_optab
;
7865 this_optab
= ior_optab
;
7868 case TRUTH_XOR_EXPR
:
7870 this_optab
= xor_optab
;
7877 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
7879 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7880 return expand_shift (code
, mode
, op0
, TREE_OPERAND (exp
, 1), target
,
7883 /* Could determine the answer when only additive constants differ. Also,
7884 the addition of one can be handled by changing the condition. */
7891 case UNORDERED_EXPR
:
7898 temp
= do_store_flag (exp
, target
, tmode
!= VOIDmode
? tmode
: mode
, 0);
7902 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7903 if (code
== NE_EXPR
&& integer_zerop (TREE_OPERAND (exp
, 1))
7905 && GET_CODE (original_target
) == REG
7906 && (GET_MODE (original_target
)
7907 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
7909 temp
= expand_expr (TREE_OPERAND (exp
, 0), original_target
,
7912 if (temp
!= original_target
)
7913 temp
= copy_to_reg (temp
);
7915 op1
= gen_label_rtx ();
7916 emit_cmp_and_jump_insns (temp
, const0_rtx
, EQ
, NULL_RTX
,
7917 GET_MODE (temp
), unsignedp
, 0, op1
);
7918 emit_move_insn (temp
, const1_rtx
);
7923 /* If no set-flag instruction, must generate a conditional
7924 store into a temporary variable. Drop through
7925 and handle this like && and ||. */
7927 case TRUTH_ANDIF_EXPR
:
7928 case TRUTH_ORIF_EXPR
:
7930 && (target
== 0 || ! safe_from_p (target
, exp
, 1)
7931 /* Make sure we don't have a hard reg (such as function's return
7932 value) live across basic blocks, if not optimizing. */
7933 || (!optimize
&& GET_CODE (target
) == REG
7934 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)))
7935 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
7938 emit_clr_insn (target
);
7940 op1
= gen_label_rtx ();
7941 jumpifnot (exp
, op1
);
7944 emit_0_to_1_insn (target
);
7947 return ignore
? const0_rtx
: target
;
7949 case TRUTH_NOT_EXPR
:
7950 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, 0);
7951 /* The parser is careful to generate TRUTH_NOT_EXPR
7952 only with operands that are always zero or one. */
7953 temp
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
,
7954 target
, 1, OPTAB_LIB_WIDEN
);
7960 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
7962 return expand_expr (TREE_OPERAND (exp
, 1),
7963 (ignore
? const0_rtx
: target
),
7967 /* If we would have a "singleton" (see below) were it not for a
7968 conversion in each arm, bring that conversion back out. */
7969 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
7970 && TREE_CODE (TREE_OPERAND (exp
, 2)) == NOP_EXPR
7971 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0))
7972 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 2), 0))))
7974 tree iftrue
= TREE_OPERAND (TREE_OPERAND (exp
, 1), 0);
7975 tree iffalse
= TREE_OPERAND (TREE_OPERAND (exp
, 2), 0);
7977 if ((TREE_CODE_CLASS (TREE_CODE (iftrue
)) == '2'
7978 && operand_equal_p (iffalse
, TREE_OPERAND (iftrue
, 0), 0))
7979 || (TREE_CODE_CLASS (TREE_CODE (iffalse
)) == '2'
7980 && operand_equal_p (iftrue
, TREE_OPERAND (iffalse
, 0), 0))
7981 || (TREE_CODE_CLASS (TREE_CODE (iftrue
)) == '1'
7982 && operand_equal_p (iffalse
, TREE_OPERAND (iftrue
, 0), 0))
7983 || (TREE_CODE_CLASS (TREE_CODE (iffalse
)) == '1'
7984 && operand_equal_p (iftrue
, TREE_OPERAND (iffalse
, 0), 0)))
7985 return expand_expr (build1 (NOP_EXPR
, type
,
7986 build (COND_EXPR
, TREE_TYPE (iftrue
),
7987 TREE_OPERAND (exp
, 0),
7989 target
, tmode
, modifier
);
7993 /* Note that COND_EXPRs whose type is a structure or union
7994 are required to be constructed to contain assignments of
7995 a temporary variable, so that we can evaluate them here
7996 for side effect only. If type is void, we must do likewise. */
7998 /* If an arm of the branch requires a cleanup,
7999 only that cleanup is performed. */
8002 tree binary_op
= 0, unary_op
= 0;
8004 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8005 convert it to our mode, if necessary. */
8006 if (integer_onep (TREE_OPERAND (exp
, 1))
8007 && integer_zerop (TREE_OPERAND (exp
, 2))
8008 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<')
8012 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
8017 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, mode
, ro_modifier
);
8018 if (GET_MODE (op0
) == mode
)
8022 target
= gen_reg_rtx (mode
);
8023 convert_move (target
, op0
, unsignedp
);
8027 /* Check for X ? A + B : A. If we have this, we can copy A to the
8028 output and conditionally add B. Similarly for unary operations.
8029 Don't do this if X has side-effects because those side effects
8030 might affect A or B and the "?" operation is a sequence point in
8031 ANSI. (operand_equal_p tests for side effects.) */
8033 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 1))) == '2'
8034 && operand_equal_p (TREE_OPERAND (exp
, 2),
8035 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0), 0))
8036 singleton
= TREE_OPERAND (exp
, 2), binary_op
= TREE_OPERAND (exp
, 1);
8037 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 2))) == '2'
8038 && operand_equal_p (TREE_OPERAND (exp
, 1),
8039 TREE_OPERAND (TREE_OPERAND (exp
, 2), 0), 0))
8040 singleton
= TREE_OPERAND (exp
, 1), binary_op
= TREE_OPERAND (exp
, 2);
8041 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 1))) == '1'
8042 && operand_equal_p (TREE_OPERAND (exp
, 2),
8043 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0), 0))
8044 singleton
= TREE_OPERAND (exp
, 2), unary_op
= TREE_OPERAND (exp
, 1);
8045 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 2))) == '1'
8046 && operand_equal_p (TREE_OPERAND (exp
, 1),
8047 TREE_OPERAND (TREE_OPERAND (exp
, 2), 0), 0))
8048 singleton
= TREE_OPERAND (exp
, 1), unary_op
= TREE_OPERAND (exp
, 2);
8050 /* If we are not to produce a result, we have no target. Otherwise,
8051 if a target was specified use it; it will not be used as an
8052 intermediate target unless it is safe. If no target, use a
8057 else if (original_target
8058 && (safe_from_p (original_target
, TREE_OPERAND (exp
, 0), 1)
8059 || (singleton
&& GET_CODE (original_target
) == REG
8060 && REGNO (original_target
) >= FIRST_PSEUDO_REGISTER
8061 && original_target
== var_rtx (singleton
)))
8062 && GET_MODE (original_target
) == mode
8063 #ifdef HAVE_conditional_move
8064 && (! can_conditionally_move_p (mode
)
8065 || GET_CODE (original_target
) == REG
8066 || TREE_ADDRESSABLE (type
))
8068 && ! (GET_CODE (original_target
) == MEM
8069 && MEM_VOLATILE_P (original_target
)))
8070 temp
= original_target
;
8071 else if (TREE_ADDRESSABLE (type
))
8074 temp
= assign_temp (type
, 0, 0, 1);
8076 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8077 do the test of X as a store-flag operation, do this as
8078 A + ((X != 0) << log C). Similarly for other simple binary
8079 operators. Only do for C == 1 if BRANCH_COST is low. */
8080 if (temp
&& singleton
&& binary_op
8081 && (TREE_CODE (binary_op
) == PLUS_EXPR
8082 || TREE_CODE (binary_op
) == MINUS_EXPR
8083 || TREE_CODE (binary_op
) == BIT_IOR_EXPR
8084 || TREE_CODE (binary_op
) == BIT_XOR_EXPR
)
8085 && (BRANCH_COST
>= 3 ? integer_pow2p (TREE_OPERAND (binary_op
, 1))
8086 : integer_onep (TREE_OPERAND (binary_op
, 1)))
8087 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<')
8090 optab boptab
= (TREE_CODE (binary_op
) == PLUS_EXPR
8091 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op
))
8092 ? addv_optab
: add_optab
)
8093 : TREE_CODE (binary_op
) == MINUS_EXPR
8094 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op
))
8095 ? subv_optab
: sub_optab
)
8096 : TREE_CODE (binary_op
) == BIT_IOR_EXPR
? ior_optab
8099 /* If we had X ? A : A + 1, do this as A + (X == 0).
8101 We have to invert the truth value here and then put it
8102 back later if do_store_flag fails. We cannot simply copy
8103 TREE_OPERAND (exp, 0) to another variable and modify that
8104 because invert_truthvalue can modify the tree pointed to
8106 if (singleton
== TREE_OPERAND (exp
, 1))
8107 TREE_OPERAND (exp
, 0)
8108 = invert_truthvalue (TREE_OPERAND (exp
, 0));
8110 result
= do_store_flag (TREE_OPERAND (exp
, 0),
8111 (safe_from_p (temp
, singleton
, 1)
8113 mode
, BRANCH_COST
<= 1);
8115 if (result
!= 0 && ! integer_onep (TREE_OPERAND (binary_op
, 1)))
8116 result
= expand_shift (LSHIFT_EXPR
, mode
, result
,
8117 build_int_2 (tree_log2
8121 (safe_from_p (temp
, singleton
, 1)
8122 ? temp
: NULL_RTX
), 0);
8126 op1
= expand_expr (singleton
, NULL_RTX
, VOIDmode
, 0);
8127 return expand_binop (mode
, boptab
, op1
, result
, temp
,
8128 unsignedp
, OPTAB_LIB_WIDEN
);
8130 else if (singleton
== TREE_OPERAND (exp
, 1))
8131 TREE_OPERAND (exp
, 0)
8132 = invert_truthvalue (TREE_OPERAND (exp
, 0));
8135 do_pending_stack_adjust ();
8137 op0
= gen_label_rtx ();
8139 if (singleton
&& ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0)))
8143 /* If the target conflicts with the other operand of the
8144 binary op, we can't use it. Also, we can't use the target
8145 if it is a hard register, because evaluating the condition
8146 might clobber it. */
8148 && ! safe_from_p (temp
, TREE_OPERAND (binary_op
, 1), 1))
8149 || (GET_CODE (temp
) == REG
8150 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
))
8151 temp
= gen_reg_rtx (mode
);
8152 store_expr (singleton
, temp
, 0);
8155 expand_expr (singleton
,
8156 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8157 if (singleton
== TREE_OPERAND (exp
, 1))
8158 jumpif (TREE_OPERAND (exp
, 0), op0
);
8160 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
8162 start_cleanup_deferral ();
8163 if (binary_op
&& temp
== 0)
8164 /* Just touch the other operand. */
8165 expand_expr (TREE_OPERAND (binary_op
, 1),
8166 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8168 store_expr (build (TREE_CODE (binary_op
), type
,
8169 make_tree (type
, temp
),
8170 TREE_OPERAND (binary_op
, 1)),
8173 store_expr (build1 (TREE_CODE (unary_op
), type
,
8174 make_tree (type
, temp
)),
8178 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8179 comparison operator. If we have one of these cases, set the
8180 output to A, branch on A (cse will merge these two references),
8181 then set the output to FOO. */
8183 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<'
8184 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1))
8185 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
8186 TREE_OPERAND (exp
, 1), 0)
8187 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
8188 || TREE_CODE (TREE_OPERAND (exp
, 1)) == SAVE_EXPR
)
8189 && safe_from_p (temp
, TREE_OPERAND (exp
, 2), 1))
8191 if (GET_CODE (temp
) == REG
8192 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
8193 temp
= gen_reg_rtx (mode
);
8194 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
8195 jumpif (TREE_OPERAND (exp
, 0), op0
);
8197 start_cleanup_deferral ();
8198 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
8202 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<'
8203 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1))
8204 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
8205 TREE_OPERAND (exp
, 2), 0)
8206 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
8207 || TREE_CODE (TREE_OPERAND (exp
, 2)) == SAVE_EXPR
)
8208 && safe_from_p (temp
, TREE_OPERAND (exp
, 1), 1))
8210 if (GET_CODE (temp
) == REG
8211 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
8212 temp
= gen_reg_rtx (mode
);
8213 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
8214 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
8216 start_cleanup_deferral ();
8217 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
8222 op1
= gen_label_rtx ();
8223 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
8225 start_cleanup_deferral ();
8227 /* One branch of the cond can be void, if it never returns. For
8228 example A ? throw : E */
8230 && TREE_TYPE (TREE_OPERAND (exp
, 1)) != void_type_node
)
8231 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
8233 expand_expr (TREE_OPERAND (exp
, 1),
8234 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8235 end_cleanup_deferral ();
8237 emit_jump_insn (gen_jump (op1
));
8240 start_cleanup_deferral ();
8242 && TREE_TYPE (TREE_OPERAND (exp
, 2)) != void_type_node
)
8243 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
8245 expand_expr (TREE_OPERAND (exp
, 2),
8246 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
8249 end_cleanup_deferral ();
8260 /* Something needs to be initialized, but we didn't know
8261 where that thing was when building the tree. For example,
8262 it could be the return value of a function, or a parameter
8263 to a function which lays down in the stack, or a temporary
8264 variable which must be passed by reference.
8266 We guarantee that the expression will either be constructed
8267 or copied into our original target. */
8269 tree slot
= TREE_OPERAND (exp
, 0);
8270 tree cleanups
= NULL_TREE
;
8273 if (TREE_CODE (slot
) != VAR_DECL
)
8277 target
= original_target
;
8279 /* Set this here so that if we get a target that refers to a
8280 register variable that's already been used, put_reg_into_stack
8281 knows that it should fix up those uses. */
8282 TREE_USED (slot
) = 1;
8286 if (DECL_RTL_SET_P (slot
))
8288 target
= DECL_RTL (slot
);
8289 /* If we have already expanded the slot, so don't do
8291 if (TREE_OPERAND (exp
, 1) == NULL_TREE
)
8296 target
= assign_temp (type
, 2, 0, 1);
8297 /* All temp slots at this level must not conflict. */
8298 preserve_temp_slots (target
);
8299 SET_DECL_RTL (slot
, target
);
8300 if (TREE_ADDRESSABLE (slot
))
8301 put_var_into_stack (slot
);
8303 /* Since SLOT is not known to the called function
8304 to belong to its stack frame, we must build an explicit
8305 cleanup. This case occurs when we must build up a reference
8306 to pass the reference as an argument. In this case,
8307 it is very likely that such a reference need not be
8310 if (TREE_OPERAND (exp
, 2) == 0)
8311 TREE_OPERAND (exp
, 2) = maybe_build_cleanup (slot
);
8312 cleanups
= TREE_OPERAND (exp
, 2);
8317 /* This case does occur, when expanding a parameter which
8318 needs to be constructed on the stack. The target
8319 is the actual stack address that we want to initialize.
8320 The function we call will perform the cleanup in this case. */
8322 /* If we have already assigned it space, use that space,
8323 not target that we were passed in, as our target
8324 parameter is only a hint. */
8325 if (DECL_RTL_SET_P (slot
))
8327 target
= DECL_RTL (slot
);
8328 /* If we have already expanded the slot, so don't do
8330 if (TREE_OPERAND (exp
, 1) == NULL_TREE
)
8335 SET_DECL_RTL (slot
, target
);
8336 /* If we must have an addressable slot, then make sure that
8337 the RTL that we just stored in slot is OK. */
8338 if (TREE_ADDRESSABLE (slot
))
8339 put_var_into_stack (slot
);
8343 exp1
= TREE_OPERAND (exp
, 3) = TREE_OPERAND (exp
, 1);
8344 /* Mark it as expanded. */
8345 TREE_OPERAND (exp
, 1) = NULL_TREE
;
8347 store_expr (exp1
, target
, 0);
8349 expand_decl_cleanup (NULL_TREE
, cleanups
);
8356 tree lhs
= TREE_OPERAND (exp
, 0);
8357 tree rhs
= TREE_OPERAND (exp
, 1);
8358 tree noncopied_parts
= 0;
8359 tree lhs_type
= TREE_TYPE (lhs
);
8361 temp
= expand_assignment (lhs
, rhs
, ! ignore
, original_target
!= 0);
8362 if (TYPE_NONCOPIED_PARTS (lhs_type
) != 0 && !fixed_type_p (rhs
))
8363 noncopied_parts
= init_noncopied_parts (stabilize_reference (lhs
),
8364 TYPE_NONCOPIED_PARTS (lhs_type
));
8365 while (noncopied_parts
!= 0)
8367 expand_assignment (TREE_VALUE (noncopied_parts
),
8368 TREE_PURPOSE (noncopied_parts
), 0, 0);
8369 noncopied_parts
= TREE_CHAIN (noncopied_parts
);
8376 /* If lhs is complex, expand calls in rhs before computing it.
8377 That's so we don't compute a pointer and save it over a call.
8378 If lhs is simple, compute it first so we can give it as a
8379 target if the rhs is just a call. This avoids an extra temp and copy
8380 and that prevents a partial-subsumption which makes bad code.
8381 Actually we could treat component_ref's of vars like vars. */
8383 tree lhs
= TREE_OPERAND (exp
, 0);
8384 tree rhs
= TREE_OPERAND (exp
, 1);
8385 tree noncopied_parts
= 0;
8386 tree lhs_type
= TREE_TYPE (lhs
);
8390 /* Check for |= or &= of a bitfield of size one into another bitfield
8391 of size 1. In this case, (unless we need the result of the
8392 assignment) we can do this more efficiently with a
8393 test followed by an assignment, if necessary.
8395 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8396 things change so we do, this code should be enhanced to
8399 && TREE_CODE (lhs
) == COMPONENT_REF
8400 && (TREE_CODE (rhs
) == BIT_IOR_EXPR
8401 || TREE_CODE (rhs
) == BIT_AND_EXPR
)
8402 && TREE_OPERAND (rhs
, 0) == lhs
8403 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == COMPONENT_REF
8404 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs
, 1)))
8405 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs
, 1), 1))))
8407 rtx label
= gen_label_rtx ();
8409 do_jump (TREE_OPERAND (rhs
, 1),
8410 TREE_CODE (rhs
) == BIT_IOR_EXPR
? label
: 0,
8411 TREE_CODE (rhs
) == BIT_AND_EXPR
? label
: 0);
8412 expand_assignment (lhs
, convert (TREE_TYPE (rhs
),
8413 (TREE_CODE (rhs
) == BIT_IOR_EXPR
8415 : integer_zero_node
)),
8417 do_pending_stack_adjust ();
8422 if (TYPE_NONCOPIED_PARTS (lhs_type
) != 0
8423 && ! (fixed_type_p (lhs
) && fixed_type_p (rhs
)))
8424 noncopied_parts
= save_noncopied_parts (stabilize_reference (lhs
),
8425 TYPE_NONCOPIED_PARTS (lhs_type
));
8427 temp
= expand_assignment (lhs
, rhs
, ! ignore
, original_target
!= 0);
8428 while (noncopied_parts
!= 0)
8430 expand_assignment (TREE_PURPOSE (noncopied_parts
),
8431 TREE_VALUE (noncopied_parts
), 0, 0);
8432 noncopied_parts
= TREE_CHAIN (noncopied_parts
);
8438 if (!TREE_OPERAND (exp
, 0))
8439 expand_null_return ();
8441 expand_return (TREE_OPERAND (exp
, 0));
8444 case PREINCREMENT_EXPR
:
8445 case PREDECREMENT_EXPR
:
8446 return expand_increment (exp
, 0, ignore
);
8448 case POSTINCREMENT_EXPR
:
8449 case POSTDECREMENT_EXPR
:
8450 /* Faster to treat as pre-increment if result is not used. */
8451 return expand_increment (exp
, ! ignore
, ignore
);
8454 /* If nonzero, TEMP will be set to the address of something that might
8455 be a MEM corresponding to a stack slot. */
8458 /* Are we taking the address of a nested function? */
8459 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == FUNCTION_DECL
8460 && decl_function_context (TREE_OPERAND (exp
, 0)) != 0
8461 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp
, 0))
8462 && ! TREE_STATIC (exp
))
8464 op0
= trampoline_address (TREE_OPERAND (exp
, 0));
8465 op0
= force_operand (op0
, target
);
8467 /* If we are taking the address of something erroneous, just
8469 else if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ERROR_MARK
)
8473 /* We make sure to pass const0_rtx down if we came in with
8474 ignore set, to avoid doing the cleanups twice for something. */
8475 op0
= expand_expr (TREE_OPERAND (exp
, 0),
8476 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
,
8477 (modifier
== EXPAND_INITIALIZER
8478 ? modifier
: EXPAND_CONST_ADDRESS
));
8480 /* If we are going to ignore the result, OP0 will have been set
8481 to const0_rtx, so just return it. Don't get confused and
8482 think we are taking the address of the constant. */
8486 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8487 clever and returns a REG when given a MEM. */
8488 op0
= protect_from_queue (op0
, 1);
8490 /* We would like the object in memory. If it is a constant, we can
8491 have it be statically allocated into memory. For a non-constant,
8492 we need to allocate some memory and store the value into it. */
8494 if (CONSTANT_P (op0
))
8495 op0
= force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
8497 else if (GET_CODE (op0
) == MEM
)
8499 mark_temp_addr_taken (op0
);
8500 temp
= XEXP (op0
, 0);
8503 else if (GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
8504 || GET_CODE (op0
) == CONCAT
|| GET_CODE (op0
) == ADDRESSOF
8505 || GET_CODE (op0
) == PARALLEL
)
8507 /* If this object is in a register, it must be not
8509 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
8510 tree nt
= build_qualified_type (inner_type
,
8511 (TYPE_QUALS (inner_type
)
8512 | TYPE_QUAL_CONST
));
8513 rtx memloc
= assign_temp (nt
, 1, 1, 1);
8515 mark_temp_addr_taken (memloc
);
8516 if (GET_CODE (op0
) == PARALLEL
)
8517 /* Handle calls that pass values in multiple non-contiguous
8518 locations. The Irix 6 ABI has examples of this. */
8519 emit_group_store (memloc
, op0
,
8520 int_size_in_bytes (inner_type
),
8521 TYPE_ALIGN (inner_type
));
8523 emit_move_insn (memloc
, op0
);
8527 if (GET_CODE (op0
) != MEM
)
8530 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
8532 temp
= XEXP (op0
, 0);
8533 #ifdef POINTERS_EXTEND_UNSIGNED
8534 if (GET_MODE (temp
) == Pmode
&& GET_MODE (temp
) != mode
8535 && mode
== ptr_mode
)
8536 temp
= convert_memory_address (ptr_mode
, temp
);
8541 op0
= force_operand (XEXP (op0
, 0), target
);
8544 if (flag_force_addr
&& GET_CODE (op0
) != REG
)
8545 op0
= force_reg (Pmode
, op0
);
8547 if (GET_CODE (op0
) == REG
8548 && ! REG_USERVAR_P (op0
))
8549 mark_reg_pointer (op0
, TYPE_ALIGN (TREE_TYPE (type
)));
8551 /* If we might have had a temp slot, add an equivalent address
8554 update_temp_slot_address (temp
, op0
);
8556 #ifdef POINTERS_EXTEND_UNSIGNED
8557 if (GET_MODE (op0
) == Pmode
&& GET_MODE (op0
) != mode
8558 && mode
== ptr_mode
)
8559 op0
= convert_memory_address (ptr_mode
, op0
);
8564 case ENTRY_VALUE_EXPR
:
8567 /* COMPLEX type for Extended Pascal & Fortran */
8570 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
8573 /* Get the rtx code of the operands. */
8574 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8575 op1
= expand_expr (TREE_OPERAND (exp
, 1), 0, VOIDmode
, 0);
8578 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
8582 /* Move the real (op0) and imaginary (op1) parts to their location. */
8583 emit_move_insn (gen_realpart (mode
, target
), op0
);
8584 emit_move_insn (gen_imagpart (mode
, target
), op1
);
8586 insns
= get_insns ();
8589 /* Complex construction should appear as a single unit. */
8590 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8591 each with a separate pseudo as destination.
8592 It's not correct for flow to treat them as a unit. */
8593 if (GET_CODE (target
) != CONCAT
)
8594 emit_no_conflict_block (insns
, target
, op0
, op1
, NULL_RTX
);
8602 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8603 return gen_realpart (mode
, op0
);
8606 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8607 return gen_imagpart (mode
, op0
);
8611 enum machine_mode partmode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
8615 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8618 target
= gen_reg_rtx (mode
);
8622 /* Store the realpart and the negated imagpart to target. */
8623 emit_move_insn (gen_realpart (partmode
, target
),
8624 gen_realpart (partmode
, op0
));
8626 imag_t
= gen_imagpart (partmode
, target
);
8627 temp
= expand_unop (partmode
,
8628 ! unsignedp
&& flag_trapv
8629 && (GET_MODE_CLASS(partmode
) == MODE_INT
)
8630 ? negv_optab
: neg_optab
,
8631 gen_imagpart (partmode
, op0
), imag_t
, 0);
8633 emit_move_insn (imag_t
, temp
);
8635 insns
= get_insns ();
8638 /* Conjugate should appear as a single unit
8639 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8640 each with a separate pseudo as destination.
8641 It's not correct for flow to treat them as a unit. */
8642 if (GET_CODE (target
) != CONCAT
)
8643 emit_no_conflict_block (insns
, target
, op0
, NULL_RTX
, NULL_RTX
);
8650 case TRY_CATCH_EXPR
:
8652 tree handler
= TREE_OPERAND (exp
, 1);
8654 expand_eh_region_start ();
8656 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8658 expand_eh_region_end_cleanup (handler
);
8663 case TRY_FINALLY_EXPR
:
8665 tree try_block
= TREE_OPERAND (exp
, 0);
8666 tree finally_block
= TREE_OPERAND (exp
, 1);
8667 rtx finally_label
= gen_label_rtx ();
8668 rtx done_label
= gen_label_rtx ();
8669 rtx return_link
= gen_reg_rtx (Pmode
);
8670 tree cleanup
= build (GOTO_SUBROUTINE_EXPR
, void_type_node
,
8671 (tree
) finally_label
, (tree
) return_link
);
8672 TREE_SIDE_EFFECTS (cleanup
) = 1;
8674 /* Start a new binding layer that will keep track of all cleanup
8675 actions to be performed. */
8676 expand_start_bindings (2);
8678 target_temp_slot_level
= temp_slot_level
;
8680 expand_decl_cleanup (NULL_TREE
, cleanup
);
8681 op0
= expand_expr (try_block
, target
, tmode
, modifier
);
8683 preserve_temp_slots (op0
);
8684 expand_end_bindings (NULL_TREE
, 0, 0);
8685 emit_jump (done_label
);
8686 emit_label (finally_label
);
8687 expand_expr (finally_block
, const0_rtx
, VOIDmode
, 0);
8688 emit_indirect_jump (return_link
);
8689 emit_label (done_label
);
8693 case GOTO_SUBROUTINE_EXPR
:
8695 rtx subr
= (rtx
) TREE_OPERAND (exp
, 0);
8696 rtx return_link
= *(rtx
*) &TREE_OPERAND (exp
, 1);
8697 rtx return_address
= gen_label_rtx ();
8698 emit_move_insn (return_link
,
8699 gen_rtx_LABEL_REF (Pmode
, return_address
));
8701 emit_label (return_address
);
8706 return expand_builtin_va_arg (TREE_OPERAND (exp
, 0), type
);
8709 return get_exception_pointer ();
8712 return (*lang_expand_expr
) (exp
, original_target
, tmode
, modifier
);
8715 /* Here to do an ordinary binary operator, generating an instruction
8716 from the optab already placed in `this_optab'. */
8718 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
8720 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8721 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
8723 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
,
8724 unsignedp
, OPTAB_LIB_WIDEN
);
8730 /* Similar to expand_expr, except that we don't specify a target, target
8731 mode, or modifier and we return the alignment of the inner type. This is
8732 used in cases where it is not necessary to align the result to the
8733 alignment of its type as long as we know the alignment of the result, for
8734 example for comparisons of BLKmode values. */
8737 expand_expr_unaligned (exp
, palign
)
8739 unsigned int *palign
;
8742 tree type
= TREE_TYPE (exp
);
8743 register enum machine_mode mode
= TYPE_MODE (type
);
8745 /* Default the alignment we return to that of the type. */
8746 *palign
= TYPE_ALIGN (type
);
8748 /* The only cases in which we do anything special is if the resulting mode
8750 if (mode
!= BLKmode
)
8751 return expand_expr (exp
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
8753 switch (TREE_CODE (exp
))
8757 case NON_LVALUE_EXPR
:
8758 /* Conversions between BLKmode values don't change the underlying
8759 alignment or value. */
8760 if (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == BLKmode
)
8761 return expand_expr_unaligned (TREE_OPERAND (exp
, 0), palign
);
8765 /* Much of the code for this case is copied directly from expand_expr.
8766 We need to duplicate it here because we will do something different
8767 in the fall-through case, so we need to handle the same exceptions
8770 tree array
= TREE_OPERAND (exp
, 0);
8771 tree domain
= TYPE_DOMAIN (TREE_TYPE (array
));
8772 tree low_bound
= domain
? TYPE_MIN_VALUE (domain
) : integer_zero_node
;
8773 tree index
= convert (sizetype
, TREE_OPERAND (exp
, 1));
8776 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) != ARRAY_TYPE
)
8779 /* Optimize the special-case of a zero lower bound.
8781 We convert the low_bound to sizetype to avoid some problems
8782 with constant folding. (E.g. suppose the lower bound is 1,
8783 and its mode is QI. Without the conversion, (ARRAY
8784 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8785 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
8787 if (! integer_zerop (low_bound
))
8788 index
= size_diffop (index
, convert (sizetype
, low_bound
));
8790 /* If this is a constant index into a constant array,
8791 just get the value from the array. Handle both the cases when
8792 we have an explicit constructor and when our operand is a variable
8793 that was declared const. */
8795 if (TREE_CODE (array
) == CONSTRUCTOR
&& ! TREE_SIDE_EFFECTS (array
)
8796 && host_integerp (index
, 0)
8797 && 0 > compare_tree_int (index
,
8798 list_length (CONSTRUCTOR_ELTS
8799 (TREE_OPERAND (exp
, 0)))))
8803 for (elem
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)),
8804 i
= tree_low_cst (index
, 0);
8805 elem
!= 0 && i
!= 0; i
--, elem
= TREE_CHAIN (elem
))
8809 return expand_expr_unaligned (fold (TREE_VALUE (elem
)), palign
);
8812 else if (optimize
>= 1
8813 && TREE_READONLY (array
) && ! TREE_SIDE_EFFECTS (array
)
8814 && TREE_CODE (array
) == VAR_DECL
&& DECL_INITIAL (array
)
8815 && TREE_CODE (DECL_INITIAL (array
)) != ERROR_MARK
)
8817 if (TREE_CODE (index
) == INTEGER_CST
)
8819 tree init
= DECL_INITIAL (array
);
8821 if (TREE_CODE (init
) == CONSTRUCTOR
)
8825 for (elem
= CONSTRUCTOR_ELTS (init
);
8826 ! tree_int_cst_equal (TREE_PURPOSE (elem
), index
);
8827 elem
= TREE_CHAIN (elem
))
8831 return expand_expr_unaligned (fold (TREE_VALUE (elem
)),
8841 /* If the operand is a CONSTRUCTOR, we can just extract the
8842 appropriate field if it is present. Don't do this if we have
8843 already written the data since we want to refer to that copy
8844 and varasm.c assumes that's what we'll do. */
8845 if (TREE_CODE (exp
) != ARRAY_REF
8846 && TREE_CODE (TREE_OPERAND (exp
, 0)) == CONSTRUCTOR
8847 && TREE_CST_RTL (TREE_OPERAND (exp
, 0)) == 0)
8851 for (elt
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)); elt
;
8852 elt
= TREE_CHAIN (elt
))
8853 if (TREE_PURPOSE (elt
) == TREE_OPERAND (exp
, 1))
8854 /* Note that unlike the case in expand_expr, we know this is
8855 BLKmode and hence not an integer. */
8856 return expand_expr_unaligned (TREE_VALUE (elt
), palign
);
8860 enum machine_mode mode1
;
8861 HOST_WIDE_INT bitsize
, bitpos
;
8864 unsigned int alignment
;
8866 tree tem
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
8867 &mode1
, &unsignedp
, &volatilep
,
8870 /* If we got back the original object, something is wrong. Perhaps
8871 we are evaluating an expression too early. In any event, don't
8872 infinitely recurse. */
8876 op0
= expand_expr (tem
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
8878 /* If this is a constant, put it into a register if it is a
8879 legitimate constant and OFFSET is 0 and memory if it isn't. */
8880 if (CONSTANT_P (op0
))
8882 enum machine_mode inner_mode
= TYPE_MODE (TREE_TYPE (tem
));
8884 if (inner_mode
!= BLKmode
&& LEGITIMATE_CONSTANT_P (op0
)
8886 op0
= force_reg (inner_mode
, op0
);
8888 op0
= validize_mem (force_const_mem (inner_mode
, op0
));
8893 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
8895 /* If this object is in a register, put it into memory.
8896 This case can't occur in C, but can in Ada if we have
8897 unchecked conversion of an expression from a scalar type to
8898 an array or record type. */
8899 if (GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
8900 || GET_CODE (op0
) == CONCAT
|| GET_CODE (op0
) == ADDRESSOF
)
8902 tree nt
= build_qualified_type (TREE_TYPE (tem
),
8903 (TYPE_QUALS (TREE_TYPE (tem
))
8904 | TYPE_QUAL_CONST
));
8905 rtx memloc
= assign_temp (nt
, 1, 1, 1);
8907 mark_temp_addr_taken (memloc
);
8908 emit_move_insn (memloc
, op0
);
8912 if (GET_CODE (op0
) != MEM
)
8915 if (GET_MODE (offset_rtx
) != ptr_mode
)
8917 #ifdef POINTERS_EXTEND_UNSIGNED
8918 offset_rtx
= convert_memory_address (ptr_mode
, offset_rtx
);
8920 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
8924 op0
= change_address (op0
, VOIDmode
,
8925 gen_rtx_PLUS (ptr_mode
, XEXP (op0
, 0),
8926 force_reg (ptr_mode
,
8930 /* Don't forget about volatility even if this is a bitfield. */
8931 if (GET_CODE (op0
) == MEM
&& volatilep
&& ! MEM_VOLATILE_P (op0
))
8933 op0
= copy_rtx (op0
);
8934 MEM_VOLATILE_P (op0
) = 1;
8937 /* Check the access. */
8938 if (current_function_check_memory_usage
&& GET_CODE (op0
) == MEM
)
8943 to
= plus_constant (XEXP (op0
, 0), (bitpos
/ BITS_PER_UNIT
));
8944 size
= (bitpos
% BITS_PER_UNIT
) + bitsize
+ BITS_PER_UNIT
- 1;
8946 /* Check the access right of the pointer. */
8947 in_check_memory_usage
= 1;
8948 if (size
> BITS_PER_UNIT
)
8949 emit_library_call (chkr_check_addr_libfunc
,
8950 LCT_CONST_MAKE_BLOCK
, VOIDmode
, 3,
8951 to
, ptr_mode
, GEN_INT (size
/ BITS_PER_UNIT
),
8952 TYPE_MODE (sizetype
),
8953 GEN_INT (MEMORY_USE_RO
),
8954 TYPE_MODE (integer_type_node
));
8955 in_check_memory_usage
= 0;
8958 /* In cases where an aligned union has an unaligned object
8959 as a field, we might be extracting a BLKmode value from
8960 an integer-mode (e.g., SImode) object. Handle this case
8961 by doing the extract into an object as wide as the field
8962 (which we know to be the width of a basic mode), then
8963 storing into memory, and changing the mode to BLKmode.
8964 If we ultimately want the address (EXPAND_CONST_ADDRESS or
8965 EXPAND_INITIALIZER), then we must not copy to a temporary. */
8966 if (mode1
== VOIDmode
8967 || GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
8968 || (SLOW_UNALIGNED_ACCESS (mode1
, alignment
)
8969 && (TYPE_ALIGN (type
) > alignment
8970 || bitpos
% TYPE_ALIGN (type
) != 0)))
8972 enum machine_mode ext_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
8974 if (ext_mode
== BLKmode
)
8976 /* In this case, BITPOS must start at a byte boundary. */
8977 if (GET_CODE (op0
) != MEM
8978 || bitpos
% BITS_PER_UNIT
!= 0)
8981 op0
= change_address (op0
, VOIDmode
,
8982 plus_constant (XEXP (op0
, 0),
8983 bitpos
/ BITS_PER_UNIT
));
8987 tree nt
= build_qualified_type (type_for_mode (ext_mode
, 0),
8989 rtx
new = assign_temp (nt
, 0, 1, 1);
8991 op0
= extract_bit_field (validize_mem (op0
), bitsize
, bitpos
,
8992 unsignedp
, NULL_RTX
, ext_mode
,
8993 ext_mode
, alignment
,
8994 int_size_in_bytes (TREE_TYPE (tem
)));
8996 /* If the result is a record type and BITSIZE is narrower than
8997 the mode of OP0, an integral mode, and this is a big endian
8998 machine, we must put the field into the high-order bits. */
8999 if (TREE_CODE (type
) == RECORD_TYPE
&& BYTES_BIG_ENDIAN
9000 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
9001 && bitsize
< GET_MODE_BITSIZE (GET_MODE (op0
)))
9002 op0
= expand_shift (LSHIFT_EXPR
, GET_MODE (op0
), op0
,
9003 size_int (GET_MODE_BITSIZE
9008 emit_move_insn (new, op0
);
9009 op0
= copy_rtx (new);
9010 PUT_MODE (op0
, BLKmode
);
9014 /* Get a reference to just this component. */
9015 op0
= change_address (op0
, mode1
,
9016 plus_constant (XEXP (op0
, 0),
9017 (bitpos
/ BITS_PER_UNIT
)));
9019 MEM_ALIAS_SET (op0
) = get_alias_set (exp
);
9021 /* Adjust the alignment in case the bit position is not
9022 a multiple of the alignment of the inner object. */
9023 while (bitpos
% alignment
!= 0)
9026 if (GET_CODE (XEXP (op0
, 0)) == REG
)
9027 mark_reg_pointer (XEXP (op0
, 0), alignment
);
9029 MEM_IN_STRUCT_P (op0
) = 1;
9030 MEM_VOLATILE_P (op0
) |= volatilep
;
9032 *palign
= alignment
;
9041 return expand_expr (exp
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
9044 /* Return the tree node if a ARG corresponds to a string constant or zero
9045 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
9046 in bytes within the string that ARG is accessing. The type of the
9047 offset will be `sizetype'. */
9050 string_constant (arg
, ptr_offset
)
9056 if (TREE_CODE (arg
) == ADDR_EXPR
9057 && TREE_CODE (TREE_OPERAND (arg
, 0)) == STRING_CST
)
9059 *ptr_offset
= size_zero_node
;
9060 return TREE_OPERAND (arg
, 0);
9062 else if (TREE_CODE (arg
) == PLUS_EXPR
)
9064 tree arg0
= TREE_OPERAND (arg
, 0);
9065 tree arg1
= TREE_OPERAND (arg
, 1);
9070 if (TREE_CODE (arg0
) == ADDR_EXPR
9071 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == STRING_CST
)
9073 *ptr_offset
= convert (sizetype
, arg1
);
9074 return TREE_OPERAND (arg0
, 0);
9076 else if (TREE_CODE (arg1
) == ADDR_EXPR
9077 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == STRING_CST
)
9079 *ptr_offset
= convert (sizetype
, arg0
);
9080 return TREE_OPERAND (arg1
, 0);
9087 /* Expand code for a post- or pre- increment or decrement
9088 and return the RTX for the result.
9089 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9092 expand_increment (exp
, post
, ignore
)
9096 register rtx op0
, op1
;
9097 register rtx temp
, value
;
9098 register tree incremented
= TREE_OPERAND (exp
, 0);
9099 optab this_optab
= add_optab
;
9101 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
9102 int op0_is_copy
= 0;
9103 int single_insn
= 0;
9104 /* 1 means we can't store into OP0 directly,
9105 because it is a subreg narrower than a word,
9106 and we don't dare clobber the rest of the word. */
9109 /* Stabilize any component ref that might need to be
9110 evaluated more than once below. */
9112 || TREE_CODE (incremented
) == BIT_FIELD_REF
9113 || (TREE_CODE (incremented
) == COMPONENT_REF
9114 && (TREE_CODE (TREE_OPERAND (incremented
, 0)) != INDIRECT_REF
9115 || DECL_BIT_FIELD (TREE_OPERAND (incremented
, 1)))))
9116 incremented
= stabilize_reference (incremented
);
9117 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9118 ones into save exprs so that they don't accidentally get evaluated
9119 more than once by the code below. */
9120 if (TREE_CODE (incremented
) == PREINCREMENT_EXPR
9121 || TREE_CODE (incremented
) == PREDECREMENT_EXPR
)
9122 incremented
= save_expr (incremented
);
9124 /* Compute the operands as RTX.
9125 Note whether OP0 is the actual lvalue or a copy of it:
9126 I believe it is a copy iff it is a register or subreg
9127 and insns were generated in computing it. */
9129 temp
= get_last_insn ();
9130 op0
= expand_expr (incremented
, NULL_RTX
, VOIDmode
, EXPAND_MEMORY_USE_RW
);
9132 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9133 in place but instead must do sign- or zero-extension during assignment,
9134 so we copy it into a new register and let the code below use it as
9137 Note that we can safely modify this SUBREG since it is know not to be
9138 shared (it was made by the expand_expr call above). */
9140 if (GET_CODE (op0
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (op0
))
9143 SUBREG_REG (op0
) = copy_to_reg (SUBREG_REG (op0
));
9147 else if (GET_CODE (op0
) == SUBREG
9148 && GET_MODE_BITSIZE (GET_MODE (op0
)) < BITS_PER_WORD
)
9150 /* We cannot increment this SUBREG in place. If we are
9151 post-incrementing, get a copy of the old value. Otherwise,
9152 just mark that we cannot increment in place. */
9154 op0
= copy_to_reg (op0
);
9159 op0_is_copy
= ((GET_CODE (op0
) == SUBREG
|| GET_CODE (op0
) == REG
)
9160 && temp
!= get_last_insn ());
9161 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
,
9162 EXPAND_MEMORY_USE_BAD
);
9164 /* Decide whether incrementing or decrementing. */
9165 if (TREE_CODE (exp
) == POSTDECREMENT_EXPR
9166 || TREE_CODE (exp
) == PREDECREMENT_EXPR
)
9167 this_optab
= sub_optab
;
9169 /* Convert decrement by a constant into a negative increment. */
9170 if (this_optab
== sub_optab
9171 && GET_CODE (op1
) == CONST_INT
)
9173 op1
= GEN_INT (-INTVAL (op1
));
9174 this_optab
= add_optab
;
9177 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp
)))
9178 this_optab
= this_optab
== add_optab
? addv_optab
: subv_optab
;
9180 /* For a preincrement, see if we can do this with a single instruction. */
9183 icode
= (int) this_optab
->handlers
[(int) mode
].insn_code
;
9184 if (icode
!= (int) CODE_FOR_nothing
9185 /* Make sure that OP0 is valid for operands 0 and 1
9186 of the insn we want to queue. */
9187 && (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode
)
9188 && (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode
)
9189 && (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
9193 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9194 then we cannot just increment OP0. We must therefore contrive to
9195 increment the original value. Then, for postincrement, we can return
9196 OP0 since it is a copy of the old value. For preincrement, expand here
9197 unless we can do it with a single insn.
9199 Likewise if storing directly into OP0 would clobber high bits
9200 we need to preserve (bad_subreg). */
9201 if (op0_is_copy
|| (!post
&& !single_insn
) || bad_subreg
)
9203 /* This is the easiest way to increment the value wherever it is.
9204 Problems with multiple evaluation of INCREMENTED are prevented
9205 because either (1) it is a component_ref or preincrement,
9206 in which case it was stabilized above, or (2) it is an array_ref
9207 with constant index in an array in a register, which is
9208 safe to reevaluate. */
9209 tree newexp
= build (((TREE_CODE (exp
) == POSTDECREMENT_EXPR
9210 || TREE_CODE (exp
) == PREDECREMENT_EXPR
)
9211 ? MINUS_EXPR
: PLUS_EXPR
),
9214 TREE_OPERAND (exp
, 1));
9216 while (TREE_CODE (incremented
) == NOP_EXPR
9217 || TREE_CODE (incremented
) == CONVERT_EXPR
)
9219 newexp
= convert (TREE_TYPE (incremented
), newexp
);
9220 incremented
= TREE_OPERAND (incremented
, 0);
9223 temp
= expand_assignment (incremented
, newexp
, ! post
&& ! ignore
, 0);
9224 return post
? op0
: temp
;
9229 /* We have a true reference to the value in OP0.
9230 If there is an insn to add or subtract in this mode, queue it.
9231 Queueing the increment insn avoids the register shuffling
9232 that often results if we must increment now and first save
9233 the old value for subsequent use. */
9235 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9236 op0
= stabilize (op0
);
9239 icode
= (int) this_optab
->handlers
[(int) mode
].insn_code
;
9240 if (icode
!= (int) CODE_FOR_nothing
9241 /* Make sure that OP0 is valid for operands 0 and 1
9242 of the insn we want to queue. */
9243 && (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode
)
9244 && (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode
))
9246 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
9247 op1
= force_reg (mode
, op1
);
9249 return enqueue_insn (op0
, GEN_FCN (icode
) (op0
, op0
, op1
));
9251 if (icode
!= (int) CODE_FOR_nothing
&& GET_CODE (op0
) == MEM
)
9253 rtx addr
= (general_operand (XEXP (op0
, 0), mode
)
9254 ? force_reg (Pmode
, XEXP (op0
, 0))
9255 : copy_to_reg (XEXP (op0
, 0)));
9258 op0
= change_address (op0
, VOIDmode
, addr
);
9259 temp
= force_reg (GET_MODE (op0
), op0
);
9260 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode
))
9261 op1
= force_reg (mode
, op1
);
9263 /* The increment queue is LIFO, thus we have to `queue'
9264 the instructions in reverse order. */
9265 enqueue_insn (op0
, gen_move_insn (op0
, temp
));
9266 result
= enqueue_insn (temp
, GEN_FCN (icode
) (temp
, temp
, op1
));
9271 /* Preincrement, or we can't increment with one simple insn. */
9273 /* Save a copy of the value before inc or dec, to return it later. */
9274 temp
= value
= copy_to_reg (op0
);
9276 /* Arrange to return the incremented value. */
9277 /* Copy the rtx because expand_binop will protect from the queue,
9278 and the results of that would be invalid for us to return
9279 if our caller does emit_queue before using our result. */
9280 temp
= copy_rtx (value
= op0
);
9282 /* Increment however we can. */
9283 op1
= expand_binop (mode
, this_optab
, value
, op1
,
9284 current_function_check_memory_usage
? NULL_RTX
: op0
,
9285 TREE_UNSIGNED (TREE_TYPE (exp
)), OPTAB_LIB_WIDEN
);
9286 /* Make sure the value is stored into OP0. */
9288 emit_move_insn (op0
, op1
);
9293 /* At the start of a function, record that we have no previously-pushed
9294 arguments waiting to be popped. */
9297 init_pending_stack_adjust ()
9299 pending_stack_adjust
= 0;
9302 /* When exiting from function, if safe, clear out any pending stack adjust
9303 so the adjustment won't get done.
9305 Note, if the current function calls alloca, then it must have a
9306 frame pointer regardless of the value of flag_omit_frame_pointer. */
9309 clear_pending_stack_adjust ()
9311 #ifdef EXIT_IGNORE_STACK
9313 && (! flag_omit_frame_pointer
|| current_function_calls_alloca
)
9314 && EXIT_IGNORE_STACK
9315 && ! (DECL_INLINE (current_function_decl
) && ! flag_no_inline
)
9316 && ! flag_inline_functions
)
9318 stack_pointer_delta
-= pending_stack_adjust
,
9319 pending_stack_adjust
= 0;
9324 /* Pop any previously-pushed arguments that have not been popped yet. */
9327 do_pending_stack_adjust ()
9329 if (inhibit_defer_pop
== 0)
9331 if (pending_stack_adjust
!= 0)
9332 adjust_stack (GEN_INT (pending_stack_adjust
));
9333 pending_stack_adjust
= 0;
9337 /* Expand conditional expressions. */
9339 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9340 LABEL is an rtx of code CODE_LABEL, in this function and all the
9344 jumpifnot (exp
, label
)
9348 do_jump (exp
, label
, NULL_RTX
);
9351 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9358 do_jump (exp
, NULL_RTX
, label
);
9361 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9362 the result is zero, or IF_TRUE_LABEL if the result is one.
9363 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9364 meaning fall through in that case.
9366 do_jump always does any pending stack adjust except when it does not
9367 actually perform a jump. An example where there is no jump
9368 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9370 This function is responsible for optimizing cases such as
9371 &&, || and comparison operators in EXP. */
9374 do_jump (exp
, if_false_label
, if_true_label
)
9376 rtx if_false_label
, if_true_label
;
9378 register enum tree_code code
= TREE_CODE (exp
);
9379 /* Some cases need to create a label to jump to
9380 in order to properly fall through.
9381 These cases set DROP_THROUGH_LABEL nonzero. */
9382 rtx drop_through_label
= 0;
9386 enum machine_mode mode
;
9388 #ifdef MAX_INTEGER_COMPUTATION_MODE
9389 check_max_integer_computation_mode (exp
);
9400 temp
= integer_zerop (exp
) ? if_false_label
: if_true_label
;
9406 /* This is not true with #pragma weak */
9408 /* The address of something can never be zero. */
9410 emit_jump (if_true_label
);
9415 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == COMPONENT_REF
9416 || TREE_CODE (TREE_OPERAND (exp
, 0)) == BIT_FIELD_REF
9417 || TREE_CODE (TREE_OPERAND (exp
, 0)) == ARRAY_REF
)
9420 /* If we are narrowing the operand, we have to do the compare in the
9422 if ((TYPE_PRECISION (TREE_TYPE (exp
))
9423 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
9425 case NON_LVALUE_EXPR
:
9426 case REFERENCE_EXPR
:
9431 /* These cannot change zero->non-zero or vice versa. */
9432 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
9435 case WITH_RECORD_EXPR
:
9436 /* Put the object on the placeholder list, recurse through our first
9437 operand, and pop the list. */
9438 placeholder_list
= tree_cons (TREE_OPERAND (exp
, 1), NULL_TREE
,
9440 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
9441 placeholder_list
= TREE_CHAIN (placeholder_list
);
9445 /* This is never less insns than evaluating the PLUS_EXPR followed by
9446 a test and can be longer if the test is eliminated. */
9448 /* Reduce to minus. */
9449 exp
= build (MINUS_EXPR
, TREE_TYPE (exp
),
9450 TREE_OPERAND (exp
, 0),
9451 fold (build1 (NEGATE_EXPR
, TREE_TYPE (TREE_OPERAND (exp
, 1)),
9452 TREE_OPERAND (exp
, 1))));
9453 /* Process as MINUS. */
9457 /* Non-zero iff operands of minus differ. */
9458 do_compare_and_jump (build (NE_EXPR
, TREE_TYPE (exp
),
9459 TREE_OPERAND (exp
, 0),
9460 TREE_OPERAND (exp
, 1)),
9461 NE
, NE
, if_false_label
, if_true_label
);
9465 /* If we are AND'ing with a small constant, do this comparison in the
9466 smallest type that fits. If the machine doesn't have comparisons
9467 that small, it will be converted back to the wider comparison.
9468 This helps if we are testing the sign bit of a narrower object.
9469 combine can't do this for us because it can't know whether a
9470 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9472 if (! SLOW_BYTE_ACCESS
9473 && TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
9474 && TYPE_PRECISION (TREE_TYPE (exp
)) <= HOST_BITS_PER_WIDE_INT
9475 && (i
= tree_floor_log2 (TREE_OPERAND (exp
, 1))) >= 0
9476 && (mode
= mode_for_size (i
+ 1, MODE_INT
, 0)) != BLKmode
9477 && (type
= type_for_mode (mode
, 1)) != 0
9478 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (exp
))
9479 && (cmp_optab
->handlers
[(int) TYPE_MODE (type
)].insn_code
9480 != CODE_FOR_nothing
))
9482 do_jump (convert (type
, exp
), if_false_label
, if_true_label
);
9487 case TRUTH_NOT_EXPR
:
9488 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
9491 case TRUTH_ANDIF_EXPR
:
9492 if (if_false_label
== 0)
9493 if_false_label
= drop_through_label
= gen_label_rtx ();
9494 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, NULL_RTX
);
9495 start_cleanup_deferral ();
9496 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
9497 end_cleanup_deferral ();
9500 case TRUTH_ORIF_EXPR
:
9501 if (if_true_label
== 0)
9502 if_true_label
= drop_through_label
= gen_label_rtx ();
9503 do_jump (TREE_OPERAND (exp
, 0), NULL_RTX
, if_true_label
);
9504 start_cleanup_deferral ();
9505 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
9506 end_cleanup_deferral ();
9511 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
9512 preserve_temp_slots (NULL_RTX
);
9516 do_pending_stack_adjust ();
9517 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
9524 HOST_WIDE_INT bitsize
, bitpos
;
9526 enum machine_mode mode
;
9530 unsigned int alignment
;
9532 /* Get description of this reference. We don't actually care
9533 about the underlying object here. */
9534 get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
, &mode
,
9535 &unsignedp
, &volatilep
, &alignment
);
9537 type
= type_for_size (bitsize
, unsignedp
);
9538 if (! SLOW_BYTE_ACCESS
9539 && type
!= 0 && bitsize
>= 0
9540 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (exp
))
9541 && (cmp_optab
->handlers
[(int) TYPE_MODE (type
)].insn_code
9542 != CODE_FOR_nothing
))
9544 do_jump (convert (type
, exp
), if_false_label
, if_true_label
);
9551 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9552 if (integer_onep (TREE_OPERAND (exp
, 1))
9553 && integer_zerop (TREE_OPERAND (exp
, 2)))
9554 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
9556 else if (integer_zerop (TREE_OPERAND (exp
, 1))
9557 && integer_onep (TREE_OPERAND (exp
, 2)))
9558 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
9562 register rtx label1
= gen_label_rtx ();
9563 drop_through_label
= gen_label_rtx ();
9565 do_jump (TREE_OPERAND (exp
, 0), label1
, NULL_RTX
);
9567 start_cleanup_deferral ();
9568 /* Now the THEN-expression. */
9569 do_jump (TREE_OPERAND (exp
, 1),
9570 if_false_label
? if_false_label
: drop_through_label
,
9571 if_true_label
? if_true_label
: drop_through_label
);
9572 /* In case the do_jump just above never jumps. */
9573 do_pending_stack_adjust ();
9574 emit_label (label1
);
9576 /* Now the ELSE-expression. */
9577 do_jump (TREE_OPERAND (exp
, 2),
9578 if_false_label
? if_false_label
: drop_through_label
,
9579 if_true_label
? if_true_label
: drop_through_label
);
9580 end_cleanup_deferral ();
9586 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
9588 if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_FLOAT
9589 || GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_INT
)
9591 tree exp0
= save_expr (TREE_OPERAND (exp
, 0));
9592 tree exp1
= save_expr (TREE_OPERAND (exp
, 1));
9595 (build (TRUTH_ANDIF_EXPR
, TREE_TYPE (exp
),
9596 fold (build (EQ_EXPR
, TREE_TYPE (exp
),
9597 fold (build1 (REALPART_EXPR
,
9598 TREE_TYPE (inner_type
),
9600 fold (build1 (REALPART_EXPR
,
9601 TREE_TYPE (inner_type
),
9603 fold (build (EQ_EXPR
, TREE_TYPE (exp
),
9604 fold (build1 (IMAGPART_EXPR
,
9605 TREE_TYPE (inner_type
),
9607 fold (build1 (IMAGPART_EXPR
,
9608 TREE_TYPE (inner_type
),
9610 if_false_label
, if_true_label
);
9613 else if (integer_zerop (TREE_OPERAND (exp
, 1)))
9614 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
9616 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_INT
9617 && !can_compare_p (EQ
, TYPE_MODE (inner_type
), ccp_jump
))
9618 do_jump_by_parts_equality (exp
, if_false_label
, if_true_label
);
9620 do_compare_and_jump (exp
, EQ
, EQ
, if_false_label
, if_true_label
);
9626 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
9628 if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_FLOAT
9629 || GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_INT
)
9631 tree exp0
= save_expr (TREE_OPERAND (exp
, 0));
9632 tree exp1
= save_expr (TREE_OPERAND (exp
, 1));
9635 (build (TRUTH_ORIF_EXPR
, TREE_TYPE (exp
),
9636 fold (build (NE_EXPR
, TREE_TYPE (exp
),
9637 fold (build1 (REALPART_EXPR
,
9638 TREE_TYPE (inner_type
),
9640 fold (build1 (REALPART_EXPR
,
9641 TREE_TYPE (inner_type
),
9643 fold (build (NE_EXPR
, TREE_TYPE (exp
),
9644 fold (build1 (IMAGPART_EXPR
,
9645 TREE_TYPE (inner_type
),
9647 fold (build1 (IMAGPART_EXPR
,
9648 TREE_TYPE (inner_type
),
9650 if_false_label
, if_true_label
);
9653 else if (integer_zerop (TREE_OPERAND (exp
, 1)))
9654 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
9656 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_INT
9657 && !can_compare_p (NE
, TYPE_MODE (inner_type
), ccp_jump
))
9658 do_jump_by_parts_equality (exp
, if_true_label
, if_false_label
);
9660 do_compare_and_jump (exp
, NE
, NE
, if_false_label
, if_true_label
);
9665 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9666 if (GET_MODE_CLASS (mode
) == MODE_INT
9667 && ! can_compare_p (LT
, mode
, ccp_jump
))
9668 do_jump_by_parts_greater (exp
, 1, if_false_label
, if_true_label
);
9670 do_compare_and_jump (exp
, LT
, LTU
, if_false_label
, if_true_label
);
9674 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9675 if (GET_MODE_CLASS (mode
) == MODE_INT
9676 && ! can_compare_p (LE
, mode
, ccp_jump
))
9677 do_jump_by_parts_greater (exp
, 0, if_true_label
, if_false_label
);
9679 do_compare_and_jump (exp
, LE
, LEU
, if_false_label
, if_true_label
);
9683 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9684 if (GET_MODE_CLASS (mode
) == MODE_INT
9685 && ! can_compare_p (GT
, mode
, ccp_jump
))
9686 do_jump_by_parts_greater (exp
, 0, if_false_label
, if_true_label
);
9688 do_compare_and_jump (exp
, GT
, GTU
, if_false_label
, if_true_label
);
9692 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9693 if (GET_MODE_CLASS (mode
) == MODE_INT
9694 && ! can_compare_p (GE
, mode
, ccp_jump
))
9695 do_jump_by_parts_greater (exp
, 1, if_true_label
, if_false_label
);
9697 do_compare_and_jump (exp
, GE
, GEU
, if_false_label
, if_true_label
);
9700 case UNORDERED_EXPR
:
9703 enum rtx_code cmp
, rcmp
;
9706 if (code
== UNORDERED_EXPR
)
9707 cmp
= UNORDERED
, rcmp
= ORDERED
;
9709 cmp
= ORDERED
, rcmp
= UNORDERED
;
9710 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9713 if (! can_compare_p (cmp
, mode
, ccp_jump
)
9714 && (can_compare_p (rcmp
, mode
, ccp_jump
)
9715 /* If the target doesn't provide either UNORDERED or ORDERED
9716 comparisons, canonicalize on UNORDERED for the library. */
9717 || rcmp
== UNORDERED
))
9721 do_compare_and_jump (exp
, cmp
, cmp
, if_false_label
, if_true_label
);
9723 do_compare_and_jump (exp
, rcmp
, rcmp
, if_true_label
, if_false_label
);
9728 enum rtx_code rcode1
;
9729 enum tree_code tcode2
;
9753 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9754 if (can_compare_p (rcode1
, mode
, ccp_jump
))
9755 do_compare_and_jump (exp
, rcode1
, rcode1
, if_false_label
,
9759 tree op0
= save_expr (TREE_OPERAND (exp
, 0));
9760 tree op1
= save_expr (TREE_OPERAND (exp
, 1));
9763 /* If the target doesn't support combined unordered
9764 compares, decompose into UNORDERED + comparison. */
9765 cmp0
= fold (build (UNORDERED_EXPR
, TREE_TYPE (exp
), op0
, op1
));
9766 cmp1
= fold (build (tcode2
, TREE_TYPE (exp
), op0
, op1
));
9767 exp
= build (TRUTH_ORIF_EXPR
, TREE_TYPE (exp
), cmp0
, cmp1
);
9768 do_jump (exp
, if_false_label
, if_true_label
);
9775 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
9777 /* This is not needed any more and causes poor code since it causes
9778 comparisons and tests from non-SI objects to have different code
9780 /* Copy to register to avoid generating bad insns by cse
9781 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9782 if (!cse_not_expected
&& GET_CODE (temp
) == MEM
)
9783 temp
= copy_to_reg (temp
);
9785 do_pending_stack_adjust ();
9786 /* Do any postincrements in the expression that was tested. */
9789 if (GET_CODE (temp
) == CONST_INT
9790 || (GET_CODE (temp
) == CONST_DOUBLE
&& GET_MODE (temp
) == VOIDmode
)
9791 || GET_CODE (temp
) == LABEL_REF
)
9793 rtx target
= temp
== const0_rtx
? if_false_label
: if_true_label
;
9797 else if (GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
9798 && ! can_compare_p (NE
, GET_MODE (temp
), ccp_jump
))
9799 /* Note swapping the labels gives us not-equal. */
9800 do_jump_by_parts_equality_rtx (temp
, if_true_label
, if_false_label
);
9801 else if (GET_MODE (temp
) != VOIDmode
)
9802 do_compare_rtx_and_jump (temp
, CONST0_RTX (GET_MODE (temp
)),
9803 NE
, TREE_UNSIGNED (TREE_TYPE (exp
)),
9804 GET_MODE (temp
), NULL_RTX
, 0,
9805 if_false_label
, if_true_label
);
9810 if (drop_through_label
)
9812 /* If do_jump produces code that might be jumped around,
9813 do any stack adjusts from that code, before the place
9814 where control merges in. */
9815 do_pending_stack_adjust ();
9816 emit_label (drop_through_label
);
9820 /* Given a comparison expression EXP for values too wide to be compared
9821 with one insn, test the comparison and jump to the appropriate label.
9822 The code of EXP is ignored; we always test GT if SWAP is 0,
9823 and LT if SWAP is 1. */
9826 do_jump_by_parts_greater (exp
, swap
, if_false_label
, if_true_label
)
9829 rtx if_false_label
, if_true_label
;
9831 rtx op0
= expand_expr (TREE_OPERAND (exp
, swap
), NULL_RTX
, VOIDmode
, 0);
9832 rtx op1
= expand_expr (TREE_OPERAND (exp
, !swap
), NULL_RTX
, VOIDmode
, 0);
9833 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9834 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9836 do_jump_by_parts_greater_rtx (mode
, unsignedp
, op0
, op1
, if_false_label
, if_true_label
);
9839 /* Compare OP0 with OP1, word at a time, in mode MODE.
9840 UNSIGNEDP says to do unsigned comparison.
9841 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9844 do_jump_by_parts_greater_rtx (mode
, unsignedp
, op0
, op1
, if_false_label
, if_true_label
)
9845 enum machine_mode mode
;
9848 rtx if_false_label
, if_true_label
;
9850 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
9851 rtx drop_through_label
= 0;
9854 if (! if_true_label
|| ! if_false_label
)
9855 drop_through_label
= gen_label_rtx ();
9856 if (! if_true_label
)
9857 if_true_label
= drop_through_label
;
9858 if (! if_false_label
)
9859 if_false_label
= drop_through_label
;
9861 /* Compare a word at a time, high order first. */
9862 for (i
= 0; i
< nwords
; i
++)
9864 rtx op0_word
, op1_word
;
9866 if (WORDS_BIG_ENDIAN
)
9868 op0_word
= operand_subword_force (op0
, i
, mode
);
9869 op1_word
= operand_subword_force (op1
, i
, mode
);
9873 op0_word
= operand_subword_force (op0
, nwords
- 1 - i
, mode
);
9874 op1_word
= operand_subword_force (op1
, nwords
- 1 - i
, mode
);
9877 /* All but high-order word must be compared as unsigned. */
9878 do_compare_rtx_and_jump (op0_word
, op1_word
, GT
,
9879 (unsignedp
|| i
> 0), word_mode
, NULL_RTX
, 0,
9880 NULL_RTX
, if_true_label
);
9882 /* Consider lower words only if these are equal. */
9883 do_compare_rtx_and_jump (op0_word
, op1_word
, NE
, unsignedp
, word_mode
,
9884 NULL_RTX
, 0, NULL_RTX
, if_false_label
);
9888 emit_jump (if_false_label
);
9889 if (drop_through_label
)
9890 emit_label (drop_through_label
);
9893 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9894 with one insn, test the comparison and jump to the appropriate label. */
9897 do_jump_by_parts_equality (exp
, if_false_label
, if_true_label
)
9899 rtx if_false_label
, if_true_label
;
9901 rtx op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
9902 rtx op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
9903 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
9904 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
9906 rtx drop_through_label
= 0;
9908 if (! if_false_label
)
9909 drop_through_label
= if_false_label
= gen_label_rtx ();
9911 for (i
= 0; i
< nwords
; i
++)
9912 do_compare_rtx_and_jump (operand_subword_force (op0
, i
, mode
),
9913 operand_subword_force (op1
, i
, mode
),
9914 EQ
, TREE_UNSIGNED (TREE_TYPE (exp
)),
9915 word_mode
, NULL_RTX
, 0, if_false_label
,
9919 emit_jump (if_true_label
);
9920 if (drop_through_label
)
9921 emit_label (drop_through_label
);
9924 /* Jump according to whether OP0 is 0.
9925 We assume that OP0 has an integer mode that is too wide
9926 for the available compare insns. */
9929 do_jump_by_parts_equality_rtx (op0
, if_false_label
, if_true_label
)
9931 rtx if_false_label
, if_true_label
;
9933 int nwords
= GET_MODE_SIZE (GET_MODE (op0
)) / UNITS_PER_WORD
;
9936 rtx drop_through_label
= 0;
9938 /* The fastest way of doing this comparison on almost any machine is to
9939 "or" all the words and compare the result. If all have to be loaded
9940 from memory and this is a very wide item, it's possible this may
9941 be slower, but that's highly unlikely. */
9943 part
= gen_reg_rtx (word_mode
);
9944 emit_move_insn (part
, operand_subword_force (op0
, 0, GET_MODE (op0
)));
9945 for (i
= 1; i
< nwords
&& part
!= 0; i
++)
9946 part
= expand_binop (word_mode
, ior_optab
, part
,
9947 operand_subword_force (op0
, i
, GET_MODE (op0
)),
9948 part
, 1, OPTAB_WIDEN
);
9952 do_compare_rtx_and_jump (part
, const0_rtx
, EQ
, 1, word_mode
,
9953 NULL_RTX
, 0, if_false_label
, if_true_label
);
9958 /* If we couldn't do the "or" simply, do this with a series of compares. */
9959 if (! if_false_label
)
9960 drop_through_label
= if_false_label
= gen_label_rtx ();
9962 for (i
= 0; i
< nwords
; i
++)
9963 do_compare_rtx_and_jump (operand_subword_force (op0
, i
, GET_MODE (op0
)),
9964 const0_rtx
, EQ
, 1, word_mode
, NULL_RTX
, 0,
9965 if_false_label
, NULL_RTX
);
9968 emit_jump (if_true_label
);
9970 if (drop_through_label
)
9971 emit_label (drop_through_label
);
9974 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
9975 (including code to compute the values to be compared)
9976 and set (CC0) according to the result.
9977 The decision as to signed or unsigned comparison must be made by the caller.
9979 We force a stack adjustment unless there are currently
9980 things pushed on the stack that aren't yet used.
9982 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9985 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9986 size of MODE should be used. */
9989 compare_from_rtx (op0
, op1
, code
, unsignedp
, mode
, size
, align
)
9990 register rtx op0
, op1
;
9993 enum machine_mode mode
;
9999 /* If one operand is constant, make it the second one. Only do this
10000 if the other operand is not constant as well. */
10002 if ((CONSTANT_P (op0
) && ! CONSTANT_P (op1
))
10003 || (GET_CODE (op0
) == CONST_INT
&& GET_CODE (op1
) != CONST_INT
))
10008 code
= swap_condition (code
);
10011 if (flag_force_mem
)
10013 op0
= force_not_mem (op0
);
10014 op1
= force_not_mem (op1
);
10017 do_pending_stack_adjust ();
10019 if (GET_CODE (op0
) == CONST_INT
&& GET_CODE (op1
) == CONST_INT
10020 && (tem
= simplify_relational_operation (code
, mode
, op0
, op1
)) != 0)
10024 /* There's no need to do this now that combine.c can eliminate lots of
10025 sign extensions. This can be less efficient in certain cases on other
10028 /* If this is a signed equality comparison, we can do it as an
10029 unsigned comparison since zero-extension is cheaper than sign
10030 extension and comparisons with zero are done as unsigned. This is
10031 the case even on machines that can do fast sign extension, since
10032 zero-extension is easier to combine with other operations than
10033 sign-extension is. If we are comparing against a constant, we must
10034 convert it to what it would look like unsigned. */
10035 if ((code
== EQ
|| code
== NE
) && ! unsignedp
10036 && GET_MODE_BITSIZE (GET_MODE (op0
)) <= HOST_BITS_PER_WIDE_INT
)
10038 if (GET_CODE (op1
) == CONST_INT
10039 && (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
))) != INTVAL (op1
))
10040 op1
= GEN_INT (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
)));
10045 emit_cmp_insn (op0
, op1
, code
, size
, mode
, unsignedp
, align
);
10047 return gen_rtx_fmt_ee (code
, VOIDmode
, cc0_rtx
, const0_rtx
);
10050 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
10051 The decision as to signed or unsigned comparison must be made by the caller.
10053 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10056 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10057 size of MODE should be used. */
10060 do_compare_rtx_and_jump (op0
, op1
, code
, unsignedp
, mode
, size
, align
,
10061 if_false_label
, if_true_label
)
10062 register rtx op0
, op1
;
10063 enum rtx_code code
;
10065 enum machine_mode mode
;
10067 unsigned int align
;
10068 rtx if_false_label
, if_true_label
;
10071 int dummy_true_label
= 0;
10073 /* Reverse the comparison if that is safe and we want to jump if it is
10075 if (! if_true_label
&& ! FLOAT_MODE_P (mode
))
10077 if_true_label
= if_false_label
;
10078 if_false_label
= 0;
10079 code
= reverse_condition (code
);
10082 /* If one operand is constant, make it the second one. Only do this
10083 if the other operand is not constant as well. */
10085 if ((CONSTANT_P (op0
) && ! CONSTANT_P (op1
))
10086 || (GET_CODE (op0
) == CONST_INT
&& GET_CODE (op1
) != CONST_INT
))
10091 code
= swap_condition (code
);
10094 if (flag_force_mem
)
10096 op0
= force_not_mem (op0
);
10097 op1
= force_not_mem (op1
);
10100 do_pending_stack_adjust ();
10102 if (GET_CODE (op0
) == CONST_INT
&& GET_CODE (op1
) == CONST_INT
10103 && (tem
= simplify_relational_operation (code
, mode
, op0
, op1
)) != 0)
10105 if (tem
== const_true_rtx
)
10108 emit_jump (if_true_label
);
10112 if (if_false_label
)
10113 emit_jump (if_false_label
);
10119 /* There's no need to do this now that combine.c can eliminate lots of
10120 sign extensions. This can be less efficient in certain cases on other
10123 /* If this is a signed equality comparison, we can do it as an
10124 unsigned comparison since zero-extension is cheaper than sign
10125 extension and comparisons with zero are done as unsigned. This is
10126 the case even on machines that can do fast sign extension, since
10127 zero-extension is easier to combine with other operations than
10128 sign-extension is. If we are comparing against a constant, we must
10129 convert it to what it would look like unsigned. */
10130 if ((code
== EQ
|| code
== NE
) && ! unsignedp
10131 && GET_MODE_BITSIZE (GET_MODE (op0
)) <= HOST_BITS_PER_WIDE_INT
)
10133 if (GET_CODE (op1
) == CONST_INT
10134 && (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
))) != INTVAL (op1
))
10135 op1
= GEN_INT (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
)));
10140 if (! if_true_label
)
10142 dummy_true_label
= 1;
10143 if_true_label
= gen_label_rtx ();
10146 emit_cmp_and_jump_insns (op0
, op1
, code
, size
, mode
, unsignedp
, align
,
10149 if (if_false_label
)
10150 emit_jump (if_false_label
);
10151 if (dummy_true_label
)
10152 emit_label (if_true_label
);
10155 /* Generate code for a comparison expression EXP (including code to compute
10156 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10157 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10158 generated code will drop through.
10159 SIGNED_CODE should be the rtx operation for this comparison for
10160 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10162 We force a stack adjustment unless there are currently
10163 things pushed on the stack that aren't yet used. */
10166 do_compare_and_jump (exp
, signed_code
, unsigned_code
, if_false_label
,
10169 enum rtx_code signed_code
, unsigned_code
;
10170 rtx if_false_label
, if_true_label
;
10172 unsigned int align0
, align1
;
10173 register rtx op0
, op1
;
10174 register tree type
;
10175 register enum machine_mode mode
;
10177 enum rtx_code code
;
10179 /* Don't crash if the comparison was erroneous. */
10180 op0
= expand_expr_unaligned (TREE_OPERAND (exp
, 0), &align0
);
10181 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ERROR_MARK
)
10184 op1
= expand_expr_unaligned (TREE_OPERAND (exp
, 1), &align1
);
10185 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == ERROR_MARK
)
10188 type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
10189 mode
= TYPE_MODE (type
);
10190 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
10191 && (TREE_CODE (TREE_OPERAND (exp
, 1)) != INTEGER_CST
10192 || (GET_MODE_BITSIZE (mode
)
10193 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
,
10196 /* op0 might have been replaced by promoted constant, in which
10197 case the type of second argument should be used. */
10198 type
= TREE_TYPE (TREE_OPERAND (exp
, 1));
10199 mode
= TYPE_MODE (type
);
10201 unsignedp
= TREE_UNSIGNED (type
);
10202 code
= unsignedp
? unsigned_code
: signed_code
;
10204 #ifdef HAVE_canonicalize_funcptr_for_compare
10205 /* If function pointers need to be "canonicalized" before they can
10206 be reliably compared, then canonicalize them. */
10207 if (HAVE_canonicalize_funcptr_for_compare
10208 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == POINTER_TYPE
10209 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
10212 rtx new_op0
= gen_reg_rtx (mode
);
10214 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0
, op0
));
10218 if (HAVE_canonicalize_funcptr_for_compare
10219 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 1))) == POINTER_TYPE
10220 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
10223 rtx new_op1
= gen_reg_rtx (mode
);
10225 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1
, op1
));
10230 /* Do any postincrements in the expression that was tested. */
10233 do_compare_rtx_and_jump (op0
, op1
, code
, unsignedp
, mode
,
10235 ? expr_size (TREE_OPERAND (exp
, 0)) : NULL_RTX
),
10236 MIN (align0
, align1
),
10237 if_false_label
, if_true_label
);
10240 /* Generate code to calculate EXP using a store-flag instruction
10241 and return an rtx for the result. EXP is either a comparison
10242 or a TRUTH_NOT_EXPR whose operand is a comparison.
10244 If TARGET is nonzero, store the result there if convenient.
10246 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10249 Return zero if there is no suitable set-flag instruction
10250 available on this machine.
10252 Once expand_expr has been called on the arguments of the comparison,
10253 we are committed to doing the store flag, since it is not safe to
10254 re-evaluate the expression. We emit the store-flag insn by calling
10255 emit_store_flag, but only expand the arguments if we have a reason
10256 to believe that emit_store_flag will be successful. If we think that
10257 it will, but it isn't, we have to simulate the store-flag with a
10258 set/jump/set sequence. */
10261 do_store_flag (exp
, target
, mode
, only_cheap
)
10264 enum machine_mode mode
;
10267 enum rtx_code code
;
10268 tree arg0
, arg1
, type
;
10270 enum machine_mode operand_mode
;
10274 enum insn_code icode
;
10275 rtx subtarget
= target
;
10278 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10279 result at the end. We can't simply invert the test since it would
10280 have already been inverted if it were valid. This case occurs for
10281 some floating-point comparisons. */
10283 if (TREE_CODE (exp
) == TRUTH_NOT_EXPR
)
10284 invert
= 1, exp
= TREE_OPERAND (exp
, 0);
10286 arg0
= TREE_OPERAND (exp
, 0);
10287 arg1
= TREE_OPERAND (exp
, 1);
10289 /* Don't crash if the comparison was erroneous. */
10290 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
10293 type
= TREE_TYPE (arg0
);
10294 operand_mode
= TYPE_MODE (type
);
10295 unsignedp
= TREE_UNSIGNED (type
);
10297 /* We won't bother with BLKmode store-flag operations because it would mean
10298 passing a lot of information to emit_store_flag. */
10299 if (operand_mode
== BLKmode
)
10302 /* We won't bother with store-flag operations involving function pointers
10303 when function pointers must be canonicalized before comparisons. */
10304 #ifdef HAVE_canonicalize_funcptr_for_compare
10305 if (HAVE_canonicalize_funcptr_for_compare
10306 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == POINTER_TYPE
10307 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
10309 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 1))) == POINTER_TYPE
10310 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
10311 == FUNCTION_TYPE
))))
10318 /* Get the rtx comparison code to use. We know that EXP is a comparison
10319 operation of some type. Some comparisons against 1 and -1 can be
10320 converted to comparisons with zero. Do so here so that the tests
10321 below will be aware that we have a comparison with zero. These
10322 tests will not catch constants in the first operand, but constants
10323 are rarely passed as the first operand. */
10325 switch (TREE_CODE (exp
))
10334 if (integer_onep (arg1
))
10335 arg1
= integer_zero_node
, code
= unsignedp
? LEU
: LE
;
10337 code
= unsignedp
? LTU
: LT
;
10340 if (! unsignedp
&& integer_all_onesp (arg1
))
10341 arg1
= integer_zero_node
, code
= LT
;
10343 code
= unsignedp
? LEU
: LE
;
10346 if (! unsignedp
&& integer_all_onesp (arg1
))
10347 arg1
= integer_zero_node
, code
= GE
;
10349 code
= unsignedp
? GTU
: GT
;
10352 if (integer_onep (arg1
))
10353 arg1
= integer_zero_node
, code
= unsignedp
? GTU
: GT
;
10355 code
= unsignedp
? GEU
: GE
;
10358 case UNORDERED_EXPR
:
10384 /* Put a constant second. */
10385 if (TREE_CODE (arg0
) == REAL_CST
|| TREE_CODE (arg0
) == INTEGER_CST
)
10387 tem
= arg0
; arg0
= arg1
; arg1
= tem
;
10388 code
= swap_condition (code
);
10391 /* If this is an equality or inequality test of a single bit, we can
10392 do this by shifting the bit being tested to the low-order bit and
10393 masking the result with the constant 1. If the condition was EQ,
10394 we xor it with 1. This does not require an scc insn and is faster
10395 than an scc insn even if we have it. */
10397 if ((code
== NE
|| code
== EQ
)
10398 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
10399 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
10401 tree inner
= TREE_OPERAND (arg0
, 0);
10402 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
10405 /* If INNER is a right shift of a constant and it plus BITNUM does
10406 not overflow, adjust BITNUM and INNER. */
10408 if (TREE_CODE (inner
) == RSHIFT_EXPR
10409 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
10410 && TREE_INT_CST_HIGH (TREE_OPERAND (inner
, 1)) == 0
10411 && bitnum
< TYPE_PRECISION (type
)
10412 && 0 > compare_tree_int (TREE_OPERAND (inner
, 1),
10413 bitnum
- TYPE_PRECISION (type
)))
10415 bitnum
+= TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1));
10416 inner
= TREE_OPERAND (inner
, 0);
10419 /* If we are going to be able to omit the AND below, we must do our
10420 operations as unsigned. If we must use the AND, we have a choice.
10421 Normally unsigned is faster, but for some machines signed is. */
10422 ops_unsignedp
= (bitnum
== TYPE_PRECISION (type
) - 1 ? 1
10423 #ifdef LOAD_EXTEND_OP
10424 : (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
? 0 : 1)
10430 if (! get_subtarget (subtarget
)
10431 || GET_MODE (subtarget
) != operand_mode
10432 || ! safe_from_p (subtarget
, inner
, 1))
10435 op0
= expand_expr (inner
, subtarget
, VOIDmode
, 0);
10438 op0
= expand_shift (RSHIFT_EXPR
, operand_mode
, op0
,
10439 size_int (bitnum
), subtarget
, ops_unsignedp
);
10441 if (GET_MODE (op0
) != mode
)
10442 op0
= convert_to_mode (mode
, op0
, ops_unsignedp
);
10444 if ((code
== EQ
&& ! invert
) || (code
== NE
&& invert
))
10445 op0
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
, subtarget
,
10446 ops_unsignedp
, OPTAB_LIB_WIDEN
);
10448 /* Put the AND last so it can combine with more things. */
10449 if (bitnum
!= TYPE_PRECISION (type
) - 1)
10450 op0
= expand_and (op0
, const1_rtx
, subtarget
);
10455 /* Now see if we are likely to be able to do this. Return if not. */
10456 if (! can_compare_p (code
, operand_mode
, ccp_store_flag
))
10459 icode
= setcc_gen_code
[(int) code
];
10460 if (icode
== CODE_FOR_nothing
10461 || (only_cheap
&& insn_data
[(int) icode
].operand
[0].mode
!= mode
))
10463 /* We can only do this if it is one of the special cases that
10464 can be handled without an scc insn. */
10465 if ((code
== LT
&& integer_zerop (arg1
))
10466 || (! only_cheap
&& code
== GE
&& integer_zerop (arg1
)))
10468 else if (BRANCH_COST
>= 0
10469 && ! only_cheap
&& (code
== NE
|| code
== EQ
)
10470 && TREE_CODE (type
) != REAL_TYPE
10471 && ((abs_optab
->handlers
[(int) operand_mode
].insn_code
10472 != CODE_FOR_nothing
)
10473 || (ffs_optab
->handlers
[(int) operand_mode
].insn_code
10474 != CODE_FOR_nothing
)))
10480 if (! get_subtarget (target
)
10481 || GET_MODE (subtarget
) != operand_mode
10482 || ! safe_from_p (subtarget
, arg1
, 1))
10485 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, 0);
10486 op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
10489 target
= gen_reg_rtx (mode
);
10491 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10492 because, if the emit_store_flag does anything it will succeed and
10493 OP0 and OP1 will not be used subsequently. */
10495 result
= emit_store_flag (target
, code
,
10496 queued_subexp_p (op0
) ? copy_rtx (op0
) : op0
,
10497 queued_subexp_p (op1
) ? copy_rtx (op1
) : op1
,
10498 operand_mode
, unsignedp
, 1);
10503 result
= expand_binop (mode
, xor_optab
, result
, const1_rtx
,
10504 result
, 0, OPTAB_LIB_WIDEN
);
10508 /* If this failed, we have to do this with set/compare/jump/set code. */
10509 if (GET_CODE (target
) != REG
10510 || reg_mentioned_p (target
, op0
) || reg_mentioned_p (target
, op1
))
10511 target
= gen_reg_rtx (GET_MODE (target
));
10513 emit_move_insn (target
, invert
? const0_rtx
: const1_rtx
);
10514 result
= compare_from_rtx (op0
, op1
, code
, unsignedp
,
10515 operand_mode
, NULL_RTX
, 0);
10516 if (GET_CODE (result
) == CONST_INT
)
10517 return (((result
== const0_rtx
&& ! invert
)
10518 || (result
!= const0_rtx
&& invert
))
10519 ? const0_rtx
: const1_rtx
);
10521 label
= gen_label_rtx ();
10522 if (bcc_gen_fctn
[(int) code
] == 0)
10525 emit_jump_insn ((*bcc_gen_fctn
[(int) code
]) (label
));
10526 emit_move_insn (target
, invert
? const1_rtx
: const0_rtx
);
10527 emit_label (label
);
10532 /* Generate a tablejump instruction (used for switch statements). */
10534 #ifdef HAVE_tablejump
10536 /* INDEX is the value being switched on, with the lowest value
10537 in the table already subtracted.
10538 MODE is its expected mode (needed if INDEX is constant).
10539 RANGE is the length of the jump table.
10540 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10542 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10543 index value is out of range. */
10546 do_tablejump (index
, mode
, range
, table_label
, default_label
)
10547 rtx index
, range
, table_label
, default_label
;
10548 enum machine_mode mode
;
10550 register rtx temp
, vector
;
10552 /* Do an unsigned comparison (in the proper mode) between the index
10553 expression and the value which represents the length of the range.
10554 Since we just finished subtracting the lower bound of the range
10555 from the index expression, this comparison allows us to simultaneously
10556 check that the original index expression value is both greater than
10557 or equal to the minimum value of the range and less than or equal to
10558 the maximum value of the range. */
10560 emit_cmp_and_jump_insns (index
, range
, GTU
, NULL_RTX
, mode
, 1,
10563 /* If index is in range, it must fit in Pmode.
10564 Convert to Pmode so we can index with it. */
10566 index
= convert_to_mode (Pmode
, index
, 1);
10568 /* Don't let a MEM slip thru, because then INDEX that comes
10569 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10570 and break_out_memory_refs will go to work on it and mess it up. */
10571 #ifdef PIC_CASE_VECTOR_ADDRESS
10572 if (flag_pic
&& GET_CODE (index
) != REG
)
10573 index
= copy_to_mode_reg (Pmode
, index
);
10576 /* If flag_force_addr were to affect this address
10577 it could interfere with the tricky assumptions made
10578 about addresses that contain label-refs,
10579 which may be valid only very near the tablejump itself. */
10580 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10581 GET_MODE_SIZE, because this indicates how large insns are. The other
10582 uses should all be Pmode, because they are addresses. This code
10583 could fail if addresses and insns are not the same size. */
10584 index
= gen_rtx_PLUS (Pmode
,
10585 gen_rtx_MULT (Pmode
, index
,
10586 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE
))),
10587 gen_rtx_LABEL_REF (Pmode
, table_label
));
10588 #ifdef PIC_CASE_VECTOR_ADDRESS
10590 index
= PIC_CASE_VECTOR_ADDRESS (index
);
10593 index
= memory_address_noforce (CASE_VECTOR_MODE
, index
);
10594 temp
= gen_reg_rtx (CASE_VECTOR_MODE
);
10595 vector
= gen_rtx_MEM (CASE_VECTOR_MODE
, index
);
10596 RTX_UNCHANGING_P (vector
) = 1;
10597 convert_move (temp
, vector
, 0);
10599 emit_jump_insn (gen_tablejump (temp
, table_label
));
10601 /* If we are generating PIC code or if the table is PC-relative, the
10602 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10603 if (! CASE_VECTOR_PC_RELATIVE
&& ! flag_pic
)
10607 #endif /* HAVE_tablejump */