1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 92-97, 1998 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
30 #include "hard-reg-set.h"
33 #include "insn-flags.h"
34 #include "insn-codes.h"
35 #include "insn-config.h"
36 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
40 #include "typeclass.h"
44 #define CEIL(x,y) (((x) + (y) - 1) / (y))
46 /* Decide whether a function's arguments should be processed
47 from first to last or from last to first.
49 They should if the stack and args grow in opposite directions, but
50 only if we have push insns. */
54 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
55 #define PUSH_ARGS_REVERSED /* If it's last to first */
60 #ifndef STACK_PUSH_CODE
61 #ifdef STACK_GROWS_DOWNWARD
62 #define STACK_PUSH_CODE PRE_DEC
64 #define STACK_PUSH_CODE PRE_INC
68 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
69 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
71 /* Assume that case vectors are not pc-relative. */
72 #ifndef CASE_VECTOR_PC_RELATIVE
73 #define CASE_VECTOR_PC_RELATIVE 0
76 /* If this is nonzero, we do not bother generating VOLATILE
77 around volatile memory references, and we are willing to
78 output indirect addresses. If cse is to follow, we reject
79 indirect addresses so a useful potential cse is generated;
80 if it is used only once, instruction combination will produce
81 the same indirect address eventually. */
84 /* Nonzero to generate code for all the subroutines within an
85 expression before generating the upper levels of the expression.
86 Nowadays this is never zero. */
87 int do_preexpand_calls
= 1;
89 /* Number of units that we should eventually pop off the stack.
90 These are the arguments to function calls that have already returned. */
91 int pending_stack_adjust
;
93 /* Nonzero means stack pops must not be deferred, and deferred stack
94 pops must not be output. It is nonzero inside a function call,
95 inside a conditional expression, inside a statement expression,
96 and in other cases as well. */
97 int inhibit_defer_pop
;
99 /* Nonzero means __builtin_saveregs has already been done in this function.
100 The value is the pseudoreg containing the value __builtin_saveregs
102 static rtx saveregs_value
;
104 /* Similarly for __builtin_apply_args. */
105 static rtx apply_args_value
;
107 /* Nonzero if the machine description has been fixed to accept
108 CONSTANT_P_RTX patterns. We will emit a warning and continue
109 if we find we must actually use such a beast. */
110 static int can_handle_constant_p
;
112 /* Don't check memory usage, since code is being emitted to check a memory
113 usage. Used when flag_check_memory_usage is true, to avoid infinite
115 static int in_check_memory_usage
;
117 /* This structure is used by move_by_pieces to describe the move to
119 struct move_by_pieces
129 int explicit_inc_from
;
136 /* This structure is used by clear_by_pieces to describe the clear to
139 struct clear_by_pieces
151 extern struct obstack permanent_obstack
;
152 extern rtx arg_pointer_save_area
;
154 static rtx get_push_address
PROTO ((int));
156 static rtx enqueue_insn
PROTO((rtx
, rtx
));
157 static int queued_subexp_p
PROTO((rtx
));
158 static void init_queue
PROTO((void));
159 static void move_by_pieces
PROTO((rtx
, rtx
, int, int));
160 static int move_by_pieces_ninsns
PROTO((unsigned int, int));
161 static void move_by_pieces_1
PROTO((rtx (*) (rtx
, ...), enum machine_mode
,
162 struct move_by_pieces
*));
163 static void clear_by_pieces
PROTO((rtx
, int, int));
164 static void clear_by_pieces_1
PROTO((rtx (*) (rtx
, ...), enum machine_mode
,
165 struct clear_by_pieces
*));
166 static int is_zeros_p
PROTO((tree
));
167 static int mostly_zeros_p
PROTO((tree
));
168 static void store_constructor_field
PROTO((rtx
, int, int, enum machine_mode
,
170 static void store_constructor
PROTO((tree
, rtx
, int));
171 static rtx store_field
PROTO((rtx
, int, int, enum machine_mode
, tree
,
172 enum machine_mode
, int, int, int));
173 static enum memory_use_mode
174 get_memory_usage_from_modifier
PROTO((enum expand_modifier
));
175 static tree save_noncopied_parts
PROTO((tree
, tree
));
176 static tree init_noncopied_parts
PROTO((tree
, tree
));
177 static int safe_from_p
PROTO((rtx
, tree
, int));
178 static int fixed_type_p
PROTO((tree
));
179 static rtx var_rtx
PROTO((tree
));
180 static int get_pointer_alignment
PROTO((tree
, unsigned));
181 static tree string_constant
PROTO((tree
, tree
*));
182 static tree c_strlen
PROTO((tree
));
183 static rtx get_memory_rtx
PROTO((tree
));
184 static rtx expand_builtin
PROTO((tree
, rtx
, rtx
,
185 enum machine_mode
, int));
186 static int apply_args_size
PROTO((void));
187 static int apply_result_size
PROTO((void));
188 static rtx result_vector
PROTO((int, rtx
));
189 static rtx expand_builtin_apply_args
PROTO((void));
190 static rtx expand_builtin_apply
PROTO((rtx
, rtx
, rtx
));
191 static void expand_builtin_return
PROTO((rtx
));
192 static rtx expand_increment
PROTO((tree
, int, int));
193 static void preexpand_calls
PROTO((tree
));
194 static void do_jump_by_parts_greater
PROTO((tree
, int, rtx
, rtx
));
195 static void do_jump_by_parts_equality
PROTO((tree
, rtx
, rtx
));
196 static void do_jump_for_compare
PROTO((rtx
, rtx
, rtx
));
197 static rtx compare
PROTO((tree
, enum rtx_code
, enum rtx_code
));
198 static rtx do_store_flag
PROTO((tree
, rtx
, enum machine_mode
, int));
200 /* Record for each mode whether we can move a register directly to or
201 from an object of that mode in memory. If we can't, we won't try
202 to use that mode directly when accessing a field of that mode. */
204 static char direct_load
[NUM_MACHINE_MODES
];
205 static char direct_store
[NUM_MACHINE_MODES
];
207 /* MOVE_RATIO is the number of move instructions that is better than
211 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
214 /* If we are optimizing for space (-Os), cut down the default move ratio */
215 #define MOVE_RATIO (optimize_size ? 3 : 15)
219 /* This array records the insn_code of insns to perform block moves. */
220 enum insn_code movstr_optab
[NUM_MACHINE_MODES
];
222 /* This array records the insn_code of insns to perform block clears. */
223 enum insn_code clrstr_optab
[NUM_MACHINE_MODES
];
225 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
227 #ifndef SLOW_UNALIGNED_ACCESS
228 #define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
231 /* Register mappings for target machines without register windows. */
232 #ifndef INCOMING_REGNO
233 #define INCOMING_REGNO(OUT) (OUT)
235 #ifndef OUTGOING_REGNO
236 #define OUTGOING_REGNO(IN) (IN)
239 /* This is run once per compilation to set up which modes can be used
240 directly in memory and to initialize the block move optab. */
246 enum machine_mode mode
;
253 /* Since we are on the permanent obstack, we must be sure we save this
254 spot AFTER we call start_sequence, since it will reuse the rtl it
256 free_point
= (char *) oballoc (0);
258 /* Try indexing by frame ptr and try by stack ptr.
259 It is known that on the Convex the stack ptr isn't a valid index.
260 With luck, one or the other is valid on any machine. */
261 mem
= gen_rtx_MEM (VOIDmode
, stack_pointer_rtx
);
262 mem1
= gen_rtx_MEM (VOIDmode
, frame_pointer_rtx
);
264 insn
= emit_insn (gen_rtx_SET (0, NULL_RTX
, NULL_RTX
));
265 pat
= PATTERN (insn
);
267 for (mode
= VOIDmode
; (int) mode
< NUM_MACHINE_MODES
;
268 mode
= (enum machine_mode
) ((int) mode
+ 1))
273 direct_load
[(int) mode
] = direct_store
[(int) mode
] = 0;
274 PUT_MODE (mem
, mode
);
275 PUT_MODE (mem1
, mode
);
277 /* See if there is some register that can be used in this mode and
278 directly loaded or stored from memory. */
280 if (mode
!= VOIDmode
&& mode
!= BLKmode
)
281 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
282 && (direct_load
[(int) mode
] == 0 || direct_store
[(int) mode
] == 0);
285 if (! HARD_REGNO_MODE_OK (regno
, mode
))
288 reg
= gen_rtx_REG (mode
, regno
);
291 SET_DEST (pat
) = reg
;
292 if (recog (pat
, insn
, &num_clobbers
) >= 0)
293 direct_load
[(int) mode
] = 1;
295 SET_SRC (pat
) = mem1
;
296 SET_DEST (pat
) = reg
;
297 if (recog (pat
, insn
, &num_clobbers
) >= 0)
298 direct_load
[(int) mode
] = 1;
301 SET_DEST (pat
) = mem
;
302 if (recog (pat
, insn
, &num_clobbers
) >= 0)
303 direct_store
[(int) mode
] = 1;
306 SET_DEST (pat
) = mem1
;
307 if (recog (pat
, insn
, &num_clobbers
) >= 0)
308 direct_store
[(int) mode
] = 1;
312 /* Find out if CONSTANT_P_RTX is accepted. */
313 SET_DEST (pat
) = gen_rtx_REG (TYPE_MODE (integer_type_node
),
314 FIRST_PSEUDO_REGISTER
);
315 SET_SRC (pat
) = gen_rtx_CONSTANT_P_RTX (TYPE_MODE (integer_type_node
),
317 if (recog (pat
, insn
, &num_clobbers
) >= 0)
318 can_handle_constant_p
= 1;
324 /* This is run at the start of compiling a function. */
331 pending_stack_adjust
= 0;
332 inhibit_defer_pop
= 0;
334 apply_args_value
= 0;
338 /* Save all variables describing the current status into the structure *P.
339 This is used before starting a nested function. */
345 /* Instead of saving the postincrement queue, empty it. */
348 p
->pending_stack_adjust
= pending_stack_adjust
;
349 p
->inhibit_defer_pop
= inhibit_defer_pop
;
350 p
->saveregs_value
= saveregs_value
;
351 p
->apply_args_value
= apply_args_value
;
352 p
->forced_labels
= forced_labels
;
354 pending_stack_adjust
= 0;
355 inhibit_defer_pop
= 0;
357 apply_args_value
= 0;
361 /* Restore all variables describing the current status from the structure *P.
362 This is used after a nested function. */
365 restore_expr_status (p
)
368 pending_stack_adjust
= p
->pending_stack_adjust
;
369 inhibit_defer_pop
= p
->inhibit_defer_pop
;
370 saveregs_value
= p
->saveregs_value
;
371 apply_args_value
= p
->apply_args_value
;
372 forced_labels
= p
->forced_labels
;
375 /* Manage the queue of increment instructions to be output
376 for POSTINCREMENT_EXPR expressions, etc. */
378 static rtx pending_chain
;
380 /* Queue up to increment (or change) VAR later. BODY says how:
381 BODY should be the same thing you would pass to emit_insn
382 to increment right away. It will go to emit_insn later on.
384 The value is a QUEUED expression to be used in place of VAR
385 where you want to guarantee the pre-incrementation value of VAR. */
388 enqueue_insn (var
, body
)
391 pending_chain
= gen_rtx_QUEUED (GET_MODE (var
),
392 var
, NULL_RTX
, NULL_RTX
, body
,
394 return pending_chain
;
397 /* Use protect_from_queue to convert a QUEUED expression
398 into something that you can put immediately into an instruction.
399 If the queued incrementation has not happened yet,
400 protect_from_queue returns the variable itself.
401 If the incrementation has happened, protect_from_queue returns a temp
402 that contains a copy of the old value of the variable.
404 Any time an rtx which might possibly be a QUEUED is to be put
405 into an instruction, it must be passed through protect_from_queue first.
406 QUEUED expressions are not meaningful in instructions.
408 Do not pass a value through protect_from_queue and then hold
409 on to it for a while before putting it in an instruction!
410 If the queue is flushed in between, incorrect code will result. */
413 protect_from_queue (x
, modify
)
417 register RTX_CODE code
= GET_CODE (x
);
419 #if 0 /* A QUEUED can hang around after the queue is forced out. */
420 /* Shortcut for most common case. */
421 if (pending_chain
== 0)
427 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
428 use of autoincrement. Make a copy of the contents of the memory
429 location rather than a copy of the address, but not if the value is
430 of mode BLKmode. Don't modify X in place since it might be
432 if (code
== MEM
&& GET_MODE (x
) != BLKmode
433 && GET_CODE (XEXP (x
, 0)) == QUEUED
&& !modify
)
435 register rtx y
= XEXP (x
, 0);
436 register rtx
new = gen_rtx_MEM (GET_MODE (x
), QUEUED_VAR (y
));
438 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x
);
439 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x
);
440 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x
);
441 MEM_ALIAS_SET (new) = MEM_ALIAS_SET (x
);
445 register rtx temp
= gen_reg_rtx (GET_MODE (new));
446 emit_insn_before (gen_move_insn (temp
, new),
452 /* Otherwise, recursively protect the subexpressions of all
453 the kinds of rtx's that can contain a QUEUED. */
456 rtx tem
= protect_from_queue (XEXP (x
, 0), 0);
457 if (tem
!= XEXP (x
, 0))
463 else if (code
== PLUS
|| code
== MULT
)
465 rtx new0
= protect_from_queue (XEXP (x
, 0), 0);
466 rtx new1
= protect_from_queue (XEXP (x
, 1), 0);
467 if (new0
!= XEXP (x
, 0) || new1
!= XEXP (x
, 1))
476 /* If the increment has not happened, use the variable itself. */
477 if (QUEUED_INSN (x
) == 0)
478 return QUEUED_VAR (x
);
479 /* If the increment has happened and a pre-increment copy exists,
481 if (QUEUED_COPY (x
) != 0)
482 return QUEUED_COPY (x
);
483 /* The increment has happened but we haven't set up a pre-increment copy.
484 Set one up now, and use it. */
485 QUEUED_COPY (x
) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x
)));
486 emit_insn_before (gen_move_insn (QUEUED_COPY (x
), QUEUED_VAR (x
)),
488 return QUEUED_COPY (x
);
491 /* Return nonzero if X contains a QUEUED expression:
492 if it contains anything that will be altered by a queued increment.
493 We handle only combinations of MEM, PLUS, MINUS and MULT operators
494 since memory addresses generally contain only those. */
500 register enum rtx_code code
= GET_CODE (x
);
506 return queued_subexp_p (XEXP (x
, 0));
510 return (queued_subexp_p (XEXP (x
, 0))
511 || queued_subexp_p (XEXP (x
, 1)));
517 /* Perform all the pending incrementations. */
523 while ((p
= pending_chain
))
525 rtx body
= QUEUED_BODY (p
);
527 if (GET_CODE (body
) == SEQUENCE
)
529 QUEUED_INSN (p
) = XVECEXP (QUEUED_BODY (p
), 0, 0);
530 emit_insn (QUEUED_BODY (p
));
533 QUEUED_INSN (p
) = emit_insn (QUEUED_BODY (p
));
534 pending_chain
= QUEUED_NEXT (p
);
545 /* Copy data from FROM to TO, where the machine modes are not the same.
546 Both modes may be integer, or both may be floating.
547 UNSIGNEDP should be nonzero if FROM is an unsigned type.
548 This causes zero-extension instead of sign-extension. */
551 convert_move (to
, from
, unsignedp
)
552 register rtx to
, from
;
555 enum machine_mode to_mode
= GET_MODE (to
);
556 enum machine_mode from_mode
= GET_MODE (from
);
557 int to_real
= GET_MODE_CLASS (to_mode
) == MODE_FLOAT
;
558 int from_real
= GET_MODE_CLASS (from_mode
) == MODE_FLOAT
;
562 /* rtx code for making an equivalent value. */
563 enum rtx_code equiv_code
= (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
);
565 to
= protect_from_queue (to
, 1);
566 from
= protect_from_queue (from
, 0);
568 if (to_real
!= from_real
)
571 /* If FROM is a SUBREG that indicates that we have already done at least
572 the required extension, strip it. We don't handle such SUBREGs as
575 if (GET_CODE (from
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (from
)
576 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from
)))
577 >= GET_MODE_SIZE (to_mode
))
578 && SUBREG_PROMOTED_UNSIGNED_P (from
) == unsignedp
)
579 from
= gen_lowpart (to_mode
, from
), from_mode
= to_mode
;
581 if (GET_CODE (to
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (to
))
584 if (to_mode
== from_mode
585 || (from_mode
== VOIDmode
&& CONSTANT_P (from
)))
587 emit_move_insn (to
, from
);
595 if (GET_MODE_BITSIZE (from_mode
) < GET_MODE_BITSIZE (to_mode
))
597 /* Try converting directly if the insn is supported. */
598 if ((code
= can_extend_p (to_mode
, from_mode
, 0))
601 emit_unop_insn (code
, to
, from
, UNKNOWN
);
606 #ifdef HAVE_trunchfqf2
607 if (HAVE_trunchfqf2
&& from_mode
== HFmode
&& to_mode
== QFmode
)
609 emit_unop_insn (CODE_FOR_trunchfqf2
, to
, from
, UNKNOWN
);
613 #ifdef HAVE_trunctqfqf2
614 if (HAVE_trunctqfqf2
&& from_mode
== TQFmode
&& to_mode
== QFmode
)
616 emit_unop_insn (CODE_FOR_trunctqfqf2
, to
, from
, UNKNOWN
);
620 #ifdef HAVE_truncsfqf2
621 if (HAVE_truncsfqf2
&& from_mode
== SFmode
&& to_mode
== QFmode
)
623 emit_unop_insn (CODE_FOR_truncsfqf2
, to
, from
, UNKNOWN
);
627 #ifdef HAVE_truncdfqf2
628 if (HAVE_truncdfqf2
&& from_mode
== DFmode
&& to_mode
== QFmode
)
630 emit_unop_insn (CODE_FOR_truncdfqf2
, to
, from
, UNKNOWN
);
634 #ifdef HAVE_truncxfqf2
635 if (HAVE_truncxfqf2
&& from_mode
== XFmode
&& to_mode
== QFmode
)
637 emit_unop_insn (CODE_FOR_truncxfqf2
, to
, from
, UNKNOWN
);
641 #ifdef HAVE_trunctfqf2
642 if (HAVE_trunctfqf2
&& from_mode
== TFmode
&& to_mode
== QFmode
)
644 emit_unop_insn (CODE_FOR_trunctfqf2
, to
, from
, UNKNOWN
);
649 #ifdef HAVE_trunctqfhf2
650 if (HAVE_trunctqfhf2
&& from_mode
== TQFmode
&& to_mode
== HFmode
)
652 emit_unop_insn (CODE_FOR_trunctqfhf2
, to
, from
, UNKNOWN
);
656 #ifdef HAVE_truncsfhf2
657 if (HAVE_truncsfhf2
&& from_mode
== SFmode
&& to_mode
== HFmode
)
659 emit_unop_insn (CODE_FOR_truncsfhf2
, to
, from
, UNKNOWN
);
663 #ifdef HAVE_truncdfhf2
664 if (HAVE_truncdfhf2
&& from_mode
== DFmode
&& to_mode
== HFmode
)
666 emit_unop_insn (CODE_FOR_truncdfhf2
, to
, from
, UNKNOWN
);
670 #ifdef HAVE_truncxfhf2
671 if (HAVE_truncxfhf2
&& from_mode
== XFmode
&& to_mode
== HFmode
)
673 emit_unop_insn (CODE_FOR_truncxfhf2
, to
, from
, UNKNOWN
);
677 #ifdef HAVE_trunctfhf2
678 if (HAVE_trunctfhf2
&& from_mode
== TFmode
&& to_mode
== HFmode
)
680 emit_unop_insn (CODE_FOR_trunctfhf2
, to
, from
, UNKNOWN
);
685 #ifdef HAVE_truncsftqf2
686 if (HAVE_truncsftqf2
&& from_mode
== SFmode
&& to_mode
== TQFmode
)
688 emit_unop_insn (CODE_FOR_truncsftqf2
, to
, from
, UNKNOWN
);
692 #ifdef HAVE_truncdftqf2
693 if (HAVE_truncdftqf2
&& from_mode
== DFmode
&& to_mode
== TQFmode
)
695 emit_unop_insn (CODE_FOR_truncdftqf2
, to
, from
, UNKNOWN
);
699 #ifdef HAVE_truncxftqf2
700 if (HAVE_truncxftqf2
&& from_mode
== XFmode
&& to_mode
== TQFmode
)
702 emit_unop_insn (CODE_FOR_truncxftqf2
, to
, from
, UNKNOWN
);
706 #ifdef HAVE_trunctftqf2
707 if (HAVE_trunctftqf2
&& from_mode
== TFmode
&& to_mode
== TQFmode
)
709 emit_unop_insn (CODE_FOR_trunctftqf2
, to
, from
, UNKNOWN
);
714 #ifdef HAVE_truncdfsf2
715 if (HAVE_truncdfsf2
&& from_mode
== DFmode
&& to_mode
== SFmode
)
717 emit_unop_insn (CODE_FOR_truncdfsf2
, to
, from
, UNKNOWN
);
721 #ifdef HAVE_truncxfsf2
722 if (HAVE_truncxfsf2
&& from_mode
== XFmode
&& to_mode
== SFmode
)
724 emit_unop_insn (CODE_FOR_truncxfsf2
, to
, from
, UNKNOWN
);
728 #ifdef HAVE_trunctfsf2
729 if (HAVE_trunctfsf2
&& from_mode
== TFmode
&& to_mode
== SFmode
)
731 emit_unop_insn (CODE_FOR_trunctfsf2
, to
, from
, UNKNOWN
);
735 #ifdef HAVE_truncxfdf2
736 if (HAVE_truncxfdf2
&& from_mode
== XFmode
&& to_mode
== DFmode
)
738 emit_unop_insn (CODE_FOR_truncxfdf2
, to
, from
, UNKNOWN
);
742 #ifdef HAVE_trunctfdf2
743 if (HAVE_trunctfdf2
&& from_mode
== TFmode
&& to_mode
== DFmode
)
745 emit_unop_insn (CODE_FOR_trunctfdf2
, to
, from
, UNKNOWN
);
757 libcall
= extendsfdf2_libfunc
;
761 libcall
= extendsfxf2_libfunc
;
765 libcall
= extendsftf2_libfunc
;
777 libcall
= truncdfsf2_libfunc
;
781 libcall
= extenddfxf2_libfunc
;
785 libcall
= extenddftf2_libfunc
;
797 libcall
= truncxfsf2_libfunc
;
801 libcall
= truncxfdf2_libfunc
;
813 libcall
= trunctfsf2_libfunc
;
817 libcall
= trunctfdf2_libfunc
;
829 if (libcall
== (rtx
) 0)
830 /* This conversion is not implemented yet. */
833 value
= emit_library_call_value (libcall
, NULL_RTX
, 1, to_mode
,
835 emit_move_insn (to
, value
);
839 /* Now both modes are integers. */
841 /* Handle expanding beyond a word. */
842 if (GET_MODE_BITSIZE (from_mode
) < GET_MODE_BITSIZE (to_mode
)
843 && GET_MODE_BITSIZE (to_mode
) > BITS_PER_WORD
)
850 enum machine_mode lowpart_mode
;
851 int nwords
= CEIL (GET_MODE_SIZE (to_mode
), UNITS_PER_WORD
);
853 /* Try converting directly if the insn is supported. */
854 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
857 /* If FROM is a SUBREG, put it into a register. Do this
858 so that we always generate the same set of insns for
859 better cse'ing; if an intermediate assignment occurred,
860 we won't be doing the operation directly on the SUBREG. */
861 if (optimize
> 0 && GET_CODE (from
) == SUBREG
)
862 from
= force_reg (from_mode
, from
);
863 emit_unop_insn (code
, to
, from
, equiv_code
);
866 /* Next, try converting via full word. */
867 else if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
868 && ((code
= can_extend_p (to_mode
, word_mode
, unsignedp
))
869 != CODE_FOR_nothing
))
871 if (GET_CODE (to
) == REG
)
872 emit_insn (gen_rtx_CLOBBER (VOIDmode
, to
));
873 convert_move (gen_lowpart (word_mode
, to
), from
, unsignedp
);
874 emit_unop_insn (code
, to
,
875 gen_lowpart (word_mode
, to
), equiv_code
);
879 /* No special multiword conversion insn; do it by hand. */
882 /* Since we will turn this into a no conflict block, we must ensure
883 that the source does not overlap the target. */
885 if (reg_overlap_mentioned_p (to
, from
))
886 from
= force_reg (from_mode
, from
);
888 /* Get a copy of FROM widened to a word, if necessary. */
889 if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
)
890 lowpart_mode
= word_mode
;
892 lowpart_mode
= from_mode
;
894 lowfrom
= convert_to_mode (lowpart_mode
, from
, unsignedp
);
896 lowpart
= gen_lowpart (lowpart_mode
, to
);
897 emit_move_insn (lowpart
, lowfrom
);
899 /* Compute the value to put in each remaining word. */
901 fill_value
= const0_rtx
;
906 && insn_operand_mode
[(int) CODE_FOR_slt
][0] == word_mode
907 && STORE_FLAG_VALUE
== -1)
909 emit_cmp_insn (lowfrom
, const0_rtx
, NE
, NULL_RTX
,
911 fill_value
= gen_reg_rtx (word_mode
);
912 emit_insn (gen_slt (fill_value
));
918 = expand_shift (RSHIFT_EXPR
, lowpart_mode
, lowfrom
,
919 size_int (GET_MODE_BITSIZE (lowpart_mode
) - 1),
921 fill_value
= convert_to_mode (word_mode
, fill_value
, 1);
925 /* Fill the remaining words. */
926 for (i
= GET_MODE_SIZE (lowpart_mode
) / UNITS_PER_WORD
; i
< nwords
; i
++)
928 int index
= (WORDS_BIG_ENDIAN
? nwords
- i
- 1 : i
);
929 rtx subword
= operand_subword (to
, index
, 1, to_mode
);
934 if (fill_value
!= subword
)
935 emit_move_insn (subword
, fill_value
);
938 insns
= get_insns ();
941 emit_no_conflict_block (insns
, to
, from
, NULL_RTX
,
942 gen_rtx_fmt_e (equiv_code
, to_mode
, copy_rtx (from
)));
946 /* Truncating multi-word to a word or less. */
947 if (GET_MODE_BITSIZE (from_mode
) > BITS_PER_WORD
948 && GET_MODE_BITSIZE (to_mode
) <= BITS_PER_WORD
)
950 if (!((GET_CODE (from
) == MEM
951 && ! MEM_VOLATILE_P (from
)
952 && direct_load
[(int) to_mode
]
953 && ! mode_dependent_address_p (XEXP (from
, 0)))
954 || GET_CODE (from
) == REG
955 || GET_CODE (from
) == SUBREG
))
956 from
= force_reg (from_mode
, from
);
957 convert_move (to
, gen_lowpart (word_mode
, from
), 0);
961 /* Handle pointer conversion */ /* SPEE 900220 */
962 if (to_mode
== PQImode
)
964 if (from_mode
!= QImode
)
965 from
= convert_to_mode (QImode
, from
, unsignedp
);
967 #ifdef HAVE_truncqipqi2
968 if (HAVE_truncqipqi2
)
970 emit_unop_insn (CODE_FOR_truncqipqi2
, to
, from
, UNKNOWN
);
973 #endif /* HAVE_truncqipqi2 */
977 if (from_mode
== PQImode
)
979 if (to_mode
!= QImode
)
981 from
= convert_to_mode (QImode
, from
, unsignedp
);
986 #ifdef HAVE_extendpqiqi2
987 if (HAVE_extendpqiqi2
)
989 emit_unop_insn (CODE_FOR_extendpqiqi2
, to
, from
, UNKNOWN
);
992 #endif /* HAVE_extendpqiqi2 */
997 if (to_mode
== PSImode
)
999 if (from_mode
!= SImode
)
1000 from
= convert_to_mode (SImode
, from
, unsignedp
);
1002 #ifdef HAVE_truncsipsi2
1003 if (HAVE_truncsipsi2
)
1005 emit_unop_insn (CODE_FOR_truncsipsi2
, to
, from
, UNKNOWN
);
1008 #endif /* HAVE_truncsipsi2 */
1012 if (from_mode
== PSImode
)
1014 if (to_mode
!= SImode
)
1016 from
= convert_to_mode (SImode
, from
, unsignedp
);
1021 #ifdef HAVE_extendpsisi2
1022 if (HAVE_extendpsisi2
)
1024 emit_unop_insn (CODE_FOR_extendpsisi2
, to
, from
, UNKNOWN
);
1027 #endif /* HAVE_extendpsisi2 */
1032 if (to_mode
== PDImode
)
1034 if (from_mode
!= DImode
)
1035 from
= convert_to_mode (DImode
, from
, unsignedp
);
1037 #ifdef HAVE_truncdipdi2
1038 if (HAVE_truncdipdi2
)
1040 emit_unop_insn (CODE_FOR_truncdipdi2
, to
, from
, UNKNOWN
);
1043 #endif /* HAVE_truncdipdi2 */
1047 if (from_mode
== PDImode
)
1049 if (to_mode
!= DImode
)
1051 from
= convert_to_mode (DImode
, from
, unsignedp
);
1056 #ifdef HAVE_extendpdidi2
1057 if (HAVE_extendpdidi2
)
1059 emit_unop_insn (CODE_FOR_extendpdidi2
, to
, from
, UNKNOWN
);
1062 #endif /* HAVE_extendpdidi2 */
1067 /* Now follow all the conversions between integers
1068 no more than a word long. */
1070 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1071 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
)
1072 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
1073 GET_MODE_BITSIZE (from_mode
)))
1075 if (!((GET_CODE (from
) == MEM
1076 && ! MEM_VOLATILE_P (from
)
1077 && direct_load
[(int) to_mode
]
1078 && ! mode_dependent_address_p (XEXP (from
, 0)))
1079 || GET_CODE (from
) == REG
1080 || GET_CODE (from
) == SUBREG
))
1081 from
= force_reg (from_mode
, from
);
1082 if (GET_CODE (from
) == REG
&& REGNO (from
) < FIRST_PSEUDO_REGISTER
1083 && ! HARD_REGNO_MODE_OK (REGNO (from
), to_mode
))
1084 from
= copy_to_reg (from
);
1085 emit_move_insn (to
, gen_lowpart (to_mode
, from
));
1089 /* Handle extension. */
1090 if (GET_MODE_BITSIZE (to_mode
) > GET_MODE_BITSIZE (from_mode
))
1092 /* Convert directly if that works. */
1093 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
1094 != CODE_FOR_nothing
)
1096 emit_unop_insn (code
, to
, from
, equiv_code
);
1101 enum machine_mode intermediate
;
1103 /* Search for a mode to convert via. */
1104 for (intermediate
= from_mode
; intermediate
!= VOIDmode
;
1105 intermediate
= GET_MODE_WIDER_MODE (intermediate
))
1106 if (((can_extend_p (to_mode
, intermediate
, unsignedp
)
1107 != CODE_FOR_nothing
)
1108 || (GET_MODE_SIZE (to_mode
) < GET_MODE_SIZE (intermediate
)
1109 && TRULY_NOOP_TRUNCATION (to_mode
, intermediate
)))
1110 && (can_extend_p (intermediate
, from_mode
, unsignedp
)
1111 != CODE_FOR_nothing
))
1113 convert_move (to
, convert_to_mode (intermediate
, from
,
1114 unsignedp
), unsignedp
);
1118 /* No suitable intermediate mode. */
1123 /* Support special truncate insns for certain modes. */
1125 if (from_mode
== DImode
&& to_mode
== SImode
)
1127 #ifdef HAVE_truncdisi2
1128 if (HAVE_truncdisi2
)
1130 emit_unop_insn (CODE_FOR_truncdisi2
, to
, from
, UNKNOWN
);
1134 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1138 if (from_mode
== DImode
&& to_mode
== HImode
)
1140 #ifdef HAVE_truncdihi2
1141 if (HAVE_truncdihi2
)
1143 emit_unop_insn (CODE_FOR_truncdihi2
, to
, from
, UNKNOWN
);
1147 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1151 if (from_mode
== DImode
&& to_mode
== QImode
)
1153 #ifdef HAVE_truncdiqi2
1154 if (HAVE_truncdiqi2
)
1156 emit_unop_insn (CODE_FOR_truncdiqi2
, to
, from
, UNKNOWN
);
1160 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1164 if (from_mode
== SImode
&& to_mode
== HImode
)
1166 #ifdef HAVE_truncsihi2
1167 if (HAVE_truncsihi2
)
1169 emit_unop_insn (CODE_FOR_truncsihi2
, to
, from
, UNKNOWN
);
1173 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1177 if (from_mode
== SImode
&& to_mode
== QImode
)
1179 #ifdef HAVE_truncsiqi2
1180 if (HAVE_truncsiqi2
)
1182 emit_unop_insn (CODE_FOR_truncsiqi2
, to
, from
, UNKNOWN
);
1186 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1190 if (from_mode
== HImode
&& to_mode
== QImode
)
1192 #ifdef HAVE_trunchiqi2
1193 if (HAVE_trunchiqi2
)
1195 emit_unop_insn (CODE_FOR_trunchiqi2
, to
, from
, UNKNOWN
);
1199 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1203 if (from_mode
== TImode
&& to_mode
== DImode
)
1205 #ifdef HAVE_trunctidi2
1206 if (HAVE_trunctidi2
)
1208 emit_unop_insn (CODE_FOR_trunctidi2
, to
, from
, UNKNOWN
);
1212 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1216 if (from_mode
== TImode
&& to_mode
== SImode
)
1218 #ifdef HAVE_trunctisi2
1219 if (HAVE_trunctisi2
)
1221 emit_unop_insn (CODE_FOR_trunctisi2
, to
, from
, UNKNOWN
);
1225 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1229 if (from_mode
== TImode
&& to_mode
== HImode
)
1231 #ifdef HAVE_trunctihi2
1232 if (HAVE_trunctihi2
)
1234 emit_unop_insn (CODE_FOR_trunctihi2
, to
, from
, UNKNOWN
);
1238 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1242 if (from_mode
== TImode
&& to_mode
== QImode
)
1244 #ifdef HAVE_trunctiqi2
1245 if (HAVE_trunctiqi2
)
1247 emit_unop_insn (CODE_FOR_trunctiqi2
, to
, from
, UNKNOWN
);
1251 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1255 /* Handle truncation of volatile memrefs, and so on;
1256 the things that couldn't be truncated directly,
1257 and for which there was no special instruction. */
1258 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
))
1260 rtx temp
= force_reg (to_mode
, gen_lowpart (to_mode
, from
));
1261 emit_move_insn (to
, temp
);
1265 /* Mode combination is not recognized. */
1269 /* Return an rtx for a value that would result
1270 from converting X to mode MODE.
1271 Both X and MODE may be floating, or both integer.
1272 UNSIGNEDP is nonzero if X is an unsigned value.
1273 This can be done by referring to a part of X in place
1274 or by copying to a new temporary with conversion.
1276 This function *must not* call protect_from_queue
1277 except when putting X into an insn (in which case convert_move does it). */
1280 convert_to_mode (mode
, x
, unsignedp
)
1281 enum machine_mode mode
;
1285 return convert_modes (mode
, VOIDmode
, x
, unsignedp
);
1288 /* Return an rtx for a value that would result
1289 from converting X from mode OLDMODE to mode MODE.
1290 Both modes may be floating, or both integer.
1291 UNSIGNEDP is nonzero if X is an unsigned value.
1293 This can be done by referring to a part of X in place
1294 or by copying to a new temporary with conversion.
1296 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1298 This function *must not* call protect_from_queue
1299 except when putting X into an insn (in which case convert_move does it). */
1302 convert_modes (mode
, oldmode
, x
, unsignedp
)
1303 enum machine_mode mode
, oldmode
;
1309 /* If FROM is a SUBREG that indicates that we have already done at least
1310 the required extension, strip it. */
1312 if (GET_CODE (x
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (x
)
1313 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))) >= GET_MODE_SIZE (mode
)
1314 && SUBREG_PROMOTED_UNSIGNED_P (x
) == unsignedp
)
1315 x
= gen_lowpart (mode
, x
);
1317 if (GET_MODE (x
) != VOIDmode
)
1318 oldmode
= GET_MODE (x
);
1320 if (mode
== oldmode
)
1323 /* There is one case that we must handle specially: If we are converting
1324 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1325 we are to interpret the constant as unsigned, gen_lowpart will do
1326 the wrong if the constant appears negative. What we want to do is
1327 make the high-order word of the constant zero, not all ones. */
1329 if (unsignedp
&& GET_MODE_CLASS (mode
) == MODE_INT
1330 && GET_MODE_BITSIZE (mode
) == 2 * HOST_BITS_PER_WIDE_INT
1331 && GET_CODE (x
) == CONST_INT
&& INTVAL (x
) < 0)
1333 HOST_WIDE_INT val
= INTVAL (x
);
1335 if (oldmode
!= VOIDmode
1336 && HOST_BITS_PER_WIDE_INT
> GET_MODE_BITSIZE (oldmode
))
1338 int width
= GET_MODE_BITSIZE (oldmode
);
1340 /* We need to zero extend VAL. */
1341 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
1344 return immed_double_const (val
, (HOST_WIDE_INT
) 0, mode
);
1347 /* We can do this with a gen_lowpart if both desired and current modes
1348 are integer, and this is either a constant integer, a register, or a
1349 non-volatile MEM. Except for the constant case where MODE is no
1350 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1352 if ((GET_CODE (x
) == CONST_INT
1353 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
1354 || (GET_MODE_CLASS (mode
) == MODE_INT
1355 && GET_MODE_CLASS (oldmode
) == MODE_INT
1356 && (GET_CODE (x
) == CONST_DOUBLE
1357 || (GET_MODE_SIZE (mode
) <= GET_MODE_SIZE (oldmode
)
1358 && ((GET_CODE (x
) == MEM
&& ! MEM_VOLATILE_P (x
)
1359 && direct_load
[(int) mode
])
1360 || (GET_CODE (x
) == REG
1361 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode
),
1362 GET_MODE_BITSIZE (GET_MODE (x
)))))))))
1364 /* ?? If we don't know OLDMODE, we have to assume here that
1365 X does not need sign- or zero-extension. This may not be
1366 the case, but it's the best we can do. */
1367 if (GET_CODE (x
) == CONST_INT
&& oldmode
!= VOIDmode
1368 && GET_MODE_SIZE (mode
) > GET_MODE_SIZE (oldmode
))
1370 HOST_WIDE_INT val
= INTVAL (x
);
1371 int width
= GET_MODE_BITSIZE (oldmode
);
1373 /* We must sign or zero-extend in this case. Start by
1374 zero-extending, then sign extend if we need to. */
1375 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
1377 && (val
& ((HOST_WIDE_INT
) 1 << (width
- 1))))
1378 val
|= (HOST_WIDE_INT
) (-1) << width
;
1380 return GEN_INT (val
);
1383 return gen_lowpart (mode
, x
);
1386 temp
= gen_reg_rtx (mode
);
1387 convert_move (temp
, x
, unsignedp
);
1391 /* Generate several move instructions to copy LEN bytes
1392 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1393 The caller must pass FROM and TO
1394 through protect_from_queue before calling.
1395 ALIGN (in bytes) is maximum alignment we can assume. */
1398 move_by_pieces (to
, from
, len
, align
)
1402 struct move_by_pieces data
;
1403 rtx to_addr
= XEXP (to
, 0), from_addr
= XEXP (from
, 0);
1404 int max_size
= MOVE_MAX
+ 1;
1407 data
.to_addr
= to_addr
;
1408 data
.from_addr
= from_addr
;
1412 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
1413 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
1415 = (GET_CODE (from_addr
) == PRE_INC
|| GET_CODE (from_addr
) == PRE_DEC
1416 || GET_CODE (from_addr
) == POST_INC
1417 || GET_CODE (from_addr
) == POST_DEC
);
1419 data
.explicit_inc_from
= 0;
1420 data
.explicit_inc_to
= 0;
1422 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
1423 if (data
.reverse
) data
.offset
= len
;
1426 data
.to_struct
= MEM_IN_STRUCT_P (to
);
1427 data
.from_struct
= MEM_IN_STRUCT_P (from
);
1429 /* If copying requires more than two move insns,
1430 copy addresses to registers (to make displacements shorter)
1431 and use post-increment if available. */
1432 if (!(data
.autinc_from
&& data
.autinc_to
)
1433 && move_by_pieces_ninsns (len
, align
) > 2)
1435 #ifdef HAVE_PRE_DECREMENT
1436 if (data
.reverse
&& ! data
.autinc_from
)
1438 data
.from_addr
= copy_addr_to_reg (plus_constant (from_addr
, len
));
1439 data
.autinc_from
= 1;
1440 data
.explicit_inc_from
= -1;
1443 #ifdef HAVE_POST_INCREMENT
1444 if (! data
.autinc_from
)
1446 data
.from_addr
= copy_addr_to_reg (from_addr
);
1447 data
.autinc_from
= 1;
1448 data
.explicit_inc_from
= 1;
1451 if (!data
.autinc_from
&& CONSTANT_P (from_addr
))
1452 data
.from_addr
= copy_addr_to_reg (from_addr
);
1453 #ifdef HAVE_PRE_DECREMENT
1454 if (data
.reverse
&& ! data
.autinc_to
)
1456 data
.to_addr
= copy_addr_to_reg (plus_constant (to_addr
, len
));
1458 data
.explicit_inc_to
= -1;
1461 #ifdef HAVE_POST_INCREMENT
1462 if (! data
.reverse
&& ! data
.autinc_to
)
1464 data
.to_addr
= copy_addr_to_reg (to_addr
);
1466 data
.explicit_inc_to
= 1;
1469 if (!data
.autinc_to
&& CONSTANT_P (to_addr
))
1470 data
.to_addr
= copy_addr_to_reg (to_addr
);
1473 if (! SLOW_UNALIGNED_ACCESS
1474 || align
> MOVE_MAX
|| align
>= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
)
1477 /* First move what we can in the largest integer mode, then go to
1478 successively smaller modes. */
1480 while (max_size
> 1)
1482 enum machine_mode mode
= VOIDmode
, tmode
;
1483 enum insn_code icode
;
1485 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1486 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1487 if (GET_MODE_SIZE (tmode
) < max_size
)
1490 if (mode
== VOIDmode
)
1493 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1494 if (icode
!= CODE_FOR_nothing
1495 && align
>= MIN (BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
,
1496 GET_MODE_SIZE (mode
)))
1497 move_by_pieces_1 (GEN_FCN (icode
), mode
, &data
);
1499 max_size
= GET_MODE_SIZE (mode
);
1502 /* The code above should have handled everything. */
1507 /* Return number of insns required to move L bytes by pieces.
1508 ALIGN (in bytes) is maximum alignment we can assume. */
1511 move_by_pieces_ninsns (l
, align
)
1515 register int n_insns
= 0;
1516 int max_size
= MOVE_MAX
+ 1;
1518 if (! SLOW_UNALIGNED_ACCESS
1519 || align
> MOVE_MAX
|| align
>= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
)
1522 while (max_size
> 1)
1524 enum machine_mode mode
= VOIDmode
, tmode
;
1525 enum insn_code icode
;
1527 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1528 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1529 if (GET_MODE_SIZE (tmode
) < max_size
)
1532 if (mode
== VOIDmode
)
1535 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1536 if (icode
!= CODE_FOR_nothing
1537 && align
>= MIN (BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
,
1538 GET_MODE_SIZE (mode
)))
1539 n_insns
+= l
/ GET_MODE_SIZE (mode
), l
%= GET_MODE_SIZE (mode
);
1541 max_size
= GET_MODE_SIZE (mode
);
1547 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1548 with move instructions for mode MODE. GENFUN is the gen_... function
1549 to make a move insn for that mode. DATA has all the other info. */
1552 move_by_pieces_1 (genfun
, mode
, data
)
1553 rtx (*genfun
) PROTO ((rtx
, ...));
1554 enum machine_mode mode
;
1555 struct move_by_pieces
*data
;
1557 register int size
= GET_MODE_SIZE (mode
);
1558 register rtx to1
, from1
;
1560 while (data
->len
>= size
)
1562 if (data
->reverse
) data
->offset
-= size
;
1564 to1
= (data
->autinc_to
1565 ? gen_rtx_MEM (mode
, data
->to_addr
)
1566 : copy_rtx (change_address (data
->to
, mode
,
1567 plus_constant (data
->to_addr
,
1569 MEM_IN_STRUCT_P (to1
) = data
->to_struct
;
1572 = (data
->autinc_from
1573 ? gen_rtx_MEM (mode
, data
->from_addr
)
1574 : copy_rtx (change_address (data
->from
, mode
,
1575 plus_constant (data
->from_addr
,
1577 MEM_IN_STRUCT_P (from1
) = data
->from_struct
;
1579 #ifdef HAVE_PRE_DECREMENT
1580 if (data
->explicit_inc_to
< 0)
1581 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (-size
)));
1582 if (data
->explicit_inc_from
< 0)
1583 emit_insn (gen_add2_insn (data
->from_addr
, GEN_INT (-size
)));
1586 emit_insn ((*genfun
) (to1
, from1
));
1587 #ifdef HAVE_POST_INCREMENT
1588 if (data
->explicit_inc_to
> 0)
1589 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
1590 if (data
->explicit_inc_from
> 0)
1591 emit_insn (gen_add2_insn (data
->from_addr
, GEN_INT (size
)));
1594 if (! data
->reverse
) data
->offset
+= size
;
1600 /* Emit code to move a block Y to a block X.
1601 This may be done with string-move instructions,
1602 with multiple scalar move instructions, or with a library call.
1604 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1606 SIZE is an rtx that says how long they are.
1607 ALIGN is the maximum alignment we can assume they have,
1610 Return the address of the new block, if memcpy is called and returns it,
1614 emit_block_move (x
, y
, size
, align
)
1621 if (GET_MODE (x
) != BLKmode
)
1624 if (GET_MODE (y
) != BLKmode
)
1627 x
= protect_from_queue (x
, 1);
1628 y
= protect_from_queue (y
, 0);
1629 size
= protect_from_queue (size
, 0);
1631 if (GET_CODE (x
) != MEM
)
1633 if (GET_CODE (y
) != MEM
)
1638 if (GET_CODE (size
) == CONST_INT
1639 && (move_by_pieces_ninsns (INTVAL (size
), align
) < MOVE_RATIO
))
1640 move_by_pieces (x
, y
, INTVAL (size
), align
);
1643 /* Try the most limited insn first, because there's no point
1644 including more than one in the machine description unless
1645 the more limited one has some advantage. */
1647 rtx opalign
= GEN_INT (align
);
1648 enum machine_mode mode
;
1650 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
1651 mode
= GET_MODE_WIDER_MODE (mode
))
1653 enum insn_code code
= movstr_optab
[(int) mode
];
1655 if (code
!= CODE_FOR_nothing
1656 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1657 here because if SIZE is less than the mode mask, as it is
1658 returned by the macro, it will definitely be less than the
1659 actual mode mask. */
1660 && ((GET_CODE (size
) == CONST_INT
1661 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
1662 <= (GET_MODE_MASK (mode
) >> 1)))
1663 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
1664 && (insn_operand_predicate
[(int) code
][0] == 0
1665 || (*insn_operand_predicate
[(int) code
][0]) (x
, BLKmode
))
1666 && (insn_operand_predicate
[(int) code
][1] == 0
1667 || (*insn_operand_predicate
[(int) code
][1]) (y
, BLKmode
))
1668 && (insn_operand_predicate
[(int) code
][3] == 0
1669 || (*insn_operand_predicate
[(int) code
][3]) (opalign
,
1673 rtx last
= get_last_insn ();
1676 op2
= convert_to_mode (mode
, size
, 1);
1677 if (insn_operand_predicate
[(int) code
][2] != 0
1678 && ! (*insn_operand_predicate
[(int) code
][2]) (op2
, mode
))
1679 op2
= copy_to_mode_reg (mode
, op2
);
1681 pat
= GEN_FCN ((int) code
) (x
, y
, op2
, opalign
);
1688 delete_insns_since (last
);
1692 #ifdef TARGET_MEM_FUNCTIONS
1694 = emit_library_call_value (memcpy_libfunc
, NULL_RTX
, 0,
1695 ptr_mode
, 3, XEXP (x
, 0), Pmode
,
1697 convert_to_mode (TYPE_MODE (sizetype
), size
,
1698 TREE_UNSIGNED (sizetype
)),
1699 TYPE_MODE (sizetype
));
1701 emit_library_call (bcopy_libfunc
, 0,
1702 VOIDmode
, 3, XEXP (y
, 0), Pmode
,
1704 convert_to_mode (TYPE_MODE (integer_type_node
), size
,
1705 TREE_UNSIGNED (integer_type_node
)),
1706 TYPE_MODE (integer_type_node
));
1713 /* Copy all or part of a value X into registers starting at REGNO.
1714 The number of registers to be filled is NREGS. */
1717 move_block_to_reg (regno
, x
, nregs
, mode
)
1721 enum machine_mode mode
;
1724 #ifdef HAVE_load_multiple
1732 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
1733 x
= validize_mem (force_const_mem (mode
, x
));
1735 /* See if the machine can do this with a load multiple insn. */
1736 #ifdef HAVE_load_multiple
1737 if (HAVE_load_multiple
)
1739 last
= get_last_insn ();
1740 pat
= gen_load_multiple (gen_rtx_REG (word_mode
, regno
), x
,
1748 delete_insns_since (last
);
1752 for (i
= 0; i
< nregs
; i
++)
1753 emit_move_insn (gen_rtx_REG (word_mode
, regno
+ i
),
1754 operand_subword_force (x
, i
, mode
));
1757 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1758 The number of registers to be filled is NREGS. SIZE indicates the number
1759 of bytes in the object X. */
1763 move_block_from_reg (regno
, x
, nregs
, size
)
1770 #ifdef HAVE_store_multiple
1774 enum machine_mode mode
;
1776 /* If SIZE is that of a mode no bigger than a word, just use that
1777 mode's store operation. */
1778 if (size
<= UNITS_PER_WORD
1779 && (mode
= mode_for_size (size
* BITS_PER_UNIT
, MODE_INT
, 0)) != BLKmode
)
1781 emit_move_insn (change_address (x
, mode
, NULL
),
1782 gen_rtx_REG (mode
, regno
));
1786 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1787 to the left before storing to memory. Note that the previous test
1788 doesn't handle all cases (e.g. SIZE == 3). */
1789 if (size
< UNITS_PER_WORD
&& BYTES_BIG_ENDIAN
)
1791 rtx tem
= operand_subword (x
, 0, 1, BLKmode
);
1797 shift
= expand_shift (LSHIFT_EXPR
, word_mode
,
1798 gen_rtx_REG (word_mode
, regno
),
1799 build_int_2 ((UNITS_PER_WORD
- size
)
1800 * BITS_PER_UNIT
, 0), NULL_RTX
, 0);
1801 emit_move_insn (tem
, shift
);
1805 /* See if the machine can do this with a store multiple insn. */
1806 #ifdef HAVE_store_multiple
1807 if (HAVE_store_multiple
)
1809 last
= get_last_insn ();
1810 pat
= gen_store_multiple (x
, gen_rtx_REG (word_mode
, regno
),
1818 delete_insns_since (last
);
1822 for (i
= 0; i
< nregs
; i
++)
1824 rtx tem
= operand_subword (x
, i
, 1, BLKmode
);
1829 emit_move_insn (tem
, gen_rtx_REG (word_mode
, regno
+ i
));
1833 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1834 registers represented by a PARALLEL. SSIZE represents the total size of
1835 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1837 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1838 the balance will be in what would be the low-order memory addresses, i.e.
1839 left justified for big endian, right justified for little endian. This
1840 happens to be true for the targets currently using this support. If this
1841 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1845 emit_group_load (dst
, orig_src
, ssize
, align
)
1852 if (GET_CODE (dst
) != PARALLEL
)
1855 /* Check for a NULL entry, used to indicate that the parameter goes
1856 both on the stack and in registers. */
1857 if (XEXP (XVECEXP (dst
, 0, 0), 0))
1862 tmps
= (rtx
*) alloca (sizeof(rtx
) * XVECLEN (dst
, 0));
1864 /* If we won't be loading directly from memory, protect the real source
1865 from strange tricks we might play. */
1867 if (GET_CODE (src
) != MEM
)
1869 src
= gen_reg_rtx (GET_MODE (orig_src
));
1870 emit_move_insn (src
, orig_src
);
1873 /* Process the pieces. */
1874 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
1876 enum machine_mode mode
= GET_MODE (XEXP (XVECEXP (dst
, 0, i
), 0));
1877 int bytepos
= INTVAL (XEXP (XVECEXP (dst
, 0, i
), 1));
1878 int bytelen
= GET_MODE_SIZE (mode
);
1881 /* Handle trailing fragments that run over the size of the struct. */
1882 if (ssize
>= 0 && bytepos
+ bytelen
> ssize
)
1884 shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
1885 bytelen
= ssize
- bytepos
;
1890 /* Optimize the access just a bit. */
1891 if (GET_CODE (src
) == MEM
1892 && align
*BITS_PER_UNIT
>= GET_MODE_ALIGNMENT (mode
)
1893 && bytepos
*BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
1894 && bytelen
== GET_MODE_SIZE (mode
))
1896 tmps
[i
] = gen_reg_rtx (mode
);
1897 emit_move_insn (tmps
[i
],
1898 change_address (src
, mode
,
1899 plus_constant (XEXP (src
, 0),
1904 tmps
[i
] = extract_bit_field (src
, bytelen
*BITS_PER_UNIT
,
1905 bytepos
*BITS_PER_UNIT
, 1, NULL_RTX
,
1906 mode
, mode
, align
, ssize
);
1909 if (BYTES_BIG_ENDIAN
&& shift
)
1911 expand_binop (mode
, ashl_optab
, tmps
[i
], GEN_INT (shift
),
1912 tmps
[i
], 0, OPTAB_WIDEN
);
1917 /* Copy the extracted pieces into the proper (probable) hard regs. */
1918 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
1919 emit_move_insn (XEXP (XVECEXP (dst
, 0, i
), 0), tmps
[i
]);
1922 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
1923 registers represented by a PARALLEL. SSIZE represents the total size of
1924 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
1927 emit_group_store (orig_dst
, src
, ssize
, align
)
1934 if (GET_CODE (src
) != PARALLEL
)
1937 /* Check for a NULL entry, used to indicate that the parameter goes
1938 both on the stack and in registers. */
1939 if (XEXP (XVECEXP (src
, 0, 0), 0))
1944 tmps
= (rtx
*) alloca (sizeof(rtx
) * XVECLEN (src
, 0));
1946 /* Copy the (probable) hard regs into pseudos. */
1947 for (i
= start
; i
< XVECLEN (src
, 0); i
++)
1949 rtx reg
= XEXP (XVECEXP (src
, 0, i
), 0);
1950 tmps
[i
] = gen_reg_rtx (GET_MODE (reg
));
1951 emit_move_insn (tmps
[i
], reg
);
1955 /* If we won't be storing directly into memory, protect the real destination
1956 from strange tricks we might play. */
1958 if (GET_CODE (dst
) != MEM
)
1960 dst
= gen_reg_rtx (GET_MODE (orig_dst
));
1961 /* Make life a bit easier for combine. */
1962 emit_move_insn (dst
, const0_rtx
);
1964 else if (! MEM_IN_STRUCT_P (dst
))
1966 /* store_bit_field requires that memory operations have
1967 mem_in_struct_p set; we might not. */
1969 dst
= copy_rtx (orig_dst
);
1970 MEM_IN_STRUCT_P (dst
) = 1;
1973 /* Process the pieces. */
1974 for (i
= start
; i
< XVECLEN (src
, 0); i
++)
1976 int bytepos
= INTVAL (XEXP (XVECEXP (src
, 0, i
), 1));
1977 enum machine_mode mode
= GET_MODE (tmps
[i
]);
1978 int bytelen
= GET_MODE_SIZE (mode
);
1980 /* Handle trailing fragments that run over the size of the struct. */
1981 if (ssize
>= 0 && bytepos
+ bytelen
> ssize
)
1983 if (BYTES_BIG_ENDIAN
)
1985 int shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
1986 expand_binop (mode
, ashr_optab
, tmps
[i
], GEN_INT (shift
),
1987 tmps
[i
], 0, OPTAB_WIDEN
);
1989 bytelen
= ssize
- bytepos
;
1992 /* Optimize the access just a bit. */
1993 if (GET_CODE (dst
) == MEM
1994 && align
*BITS_PER_UNIT
>= GET_MODE_ALIGNMENT (mode
)
1995 && bytepos
*BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
1996 && bytelen
== GET_MODE_SIZE (mode
))
1998 emit_move_insn (change_address (dst
, mode
,
1999 plus_constant (XEXP (dst
, 0),
2005 store_bit_field (dst
, bytelen
*BITS_PER_UNIT
, bytepos
*BITS_PER_UNIT
,
2006 mode
, tmps
[i
], align
, ssize
);
2011 /* Copy from the pseudo into the (probable) hard reg. */
2012 if (GET_CODE (dst
) == REG
)
2013 emit_move_insn (orig_dst
, dst
);
2016 /* Add a USE expression for REG to the (possibly empty) list pointed
2017 to by CALL_FUSAGE. REG must denote a hard register. */
2020 use_reg (call_fusage
, reg
)
2021 rtx
*call_fusage
, reg
;
2023 if (GET_CODE (reg
) != REG
2024 || REGNO (reg
) >= FIRST_PSEUDO_REGISTER
)
2028 = gen_rtx_EXPR_LIST (VOIDmode
,
2029 gen_rtx_USE (VOIDmode
, reg
), *call_fusage
);
2032 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2033 starting at REGNO. All of these registers must be hard registers. */
2036 use_regs (call_fusage
, regno
, nregs
)
2043 if (regno
+ nregs
> FIRST_PSEUDO_REGISTER
)
2046 for (i
= 0; i
< nregs
; i
++)
2047 use_reg (call_fusage
, gen_rtx_REG (reg_raw_mode
[regno
+ i
], regno
+ i
));
2050 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2051 PARALLEL REGS. This is for calls that pass values in multiple
2052 non-contiguous locations. The Irix 6 ABI has examples of this. */
2055 use_group_regs (call_fusage
, regs
)
2061 for (i
= 0; i
< XVECLEN (regs
, 0); i
++)
2063 rtx reg
= XEXP (XVECEXP (regs
, 0, i
), 0);
2065 /* A NULL entry means the parameter goes both on the stack and in
2066 registers. This can also be a MEM for targets that pass values
2067 partially on the stack and partially in registers. */
2068 if (reg
!= 0 && GET_CODE (reg
) == REG
)
2069 use_reg (call_fusage
, reg
);
2073 /* Generate several move instructions to clear LEN bytes of block TO.
2074 (A MEM rtx with BLKmode). The caller must pass TO through
2075 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
2079 clear_by_pieces (to
, len
, align
)
2083 struct clear_by_pieces data
;
2084 rtx to_addr
= XEXP (to
, 0);
2085 int max_size
= MOVE_MAX
+ 1;
2088 data
.to_addr
= to_addr
;
2091 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
2092 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
2094 data
.explicit_inc_to
= 0;
2096 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
2097 if (data
.reverse
) data
.offset
= len
;
2100 data
.to_struct
= MEM_IN_STRUCT_P (to
);
2102 /* If copying requires more than two move insns,
2103 copy addresses to registers (to make displacements shorter)
2104 and use post-increment if available. */
2106 && move_by_pieces_ninsns (len
, align
) > 2)
2108 #ifdef HAVE_PRE_DECREMENT
2109 if (data
.reverse
&& ! data
.autinc_to
)
2111 data
.to_addr
= copy_addr_to_reg (plus_constant (to_addr
, len
));
2113 data
.explicit_inc_to
= -1;
2116 #ifdef HAVE_POST_INCREMENT
2117 if (! data
.reverse
&& ! data
.autinc_to
)
2119 data
.to_addr
= copy_addr_to_reg (to_addr
);
2121 data
.explicit_inc_to
= 1;
2124 if (!data
.autinc_to
&& CONSTANT_P (to_addr
))
2125 data
.to_addr
= copy_addr_to_reg (to_addr
);
2128 if (! SLOW_UNALIGNED_ACCESS
2129 || align
> MOVE_MAX
|| align
>= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
)
2132 /* First move what we can in the largest integer mode, then go to
2133 successively smaller modes. */
2135 while (max_size
> 1)
2137 enum machine_mode mode
= VOIDmode
, tmode
;
2138 enum insn_code icode
;
2140 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2141 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2142 if (GET_MODE_SIZE (tmode
) < max_size
)
2145 if (mode
== VOIDmode
)
2148 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
2149 if (icode
!= CODE_FOR_nothing
2150 && align
>= MIN (BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
,
2151 GET_MODE_SIZE (mode
)))
2152 clear_by_pieces_1 (GEN_FCN (icode
), mode
, &data
);
2154 max_size
= GET_MODE_SIZE (mode
);
2157 /* The code above should have handled everything. */
2162 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2163 with move instructions for mode MODE. GENFUN is the gen_... function
2164 to make a move insn for that mode. DATA has all the other info. */
2167 clear_by_pieces_1 (genfun
, mode
, data
)
2168 rtx (*genfun
) PROTO ((rtx
, ...));
2169 enum machine_mode mode
;
2170 struct clear_by_pieces
*data
;
2172 register int size
= GET_MODE_SIZE (mode
);
2175 while (data
->len
>= size
)
2177 if (data
->reverse
) data
->offset
-= size
;
2179 to1
= (data
->autinc_to
2180 ? gen_rtx_MEM (mode
, data
->to_addr
)
2181 : copy_rtx (change_address (data
->to
, mode
,
2182 plus_constant (data
->to_addr
,
2184 MEM_IN_STRUCT_P (to1
) = data
->to_struct
;
2186 #ifdef HAVE_PRE_DECREMENT
2187 if (data
->explicit_inc_to
< 0)
2188 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (-size
)));
2191 emit_insn ((*genfun
) (to1
, const0_rtx
));
2192 #ifdef HAVE_POST_INCREMENT
2193 if (data
->explicit_inc_to
> 0)
2194 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
2197 if (! data
->reverse
) data
->offset
+= size
;
2203 /* Write zeros through the storage of OBJECT.
2204 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
2205 the maximum alignment we can is has, measured in bytes.
2207 If we call a function that returns the length of the block, return it. */
2210 clear_storage (object
, size
, align
)
2217 if (GET_MODE (object
) == BLKmode
)
2219 object
= protect_from_queue (object
, 1);
2220 size
= protect_from_queue (size
, 0);
2222 if (GET_CODE (size
) == CONST_INT
2223 && (move_by_pieces_ninsns (INTVAL (size
), align
) < MOVE_RATIO
))
2224 clear_by_pieces (object
, INTVAL (size
), align
);
2228 /* Try the most limited insn first, because there's no point
2229 including more than one in the machine description unless
2230 the more limited one has some advantage. */
2232 rtx opalign
= GEN_INT (align
);
2233 enum machine_mode mode
;
2235 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
2236 mode
= GET_MODE_WIDER_MODE (mode
))
2238 enum insn_code code
= clrstr_optab
[(int) mode
];
2240 if (code
!= CODE_FOR_nothing
2241 /* We don't need MODE to be narrower than
2242 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2243 the mode mask, as it is returned by the macro, it will
2244 definitely be less than the actual mode mask. */
2245 && ((GET_CODE (size
) == CONST_INT
2246 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
2247 <= (GET_MODE_MASK (mode
) >> 1)))
2248 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
2249 && (insn_operand_predicate
[(int) code
][0] == 0
2250 || (*insn_operand_predicate
[(int) code
][0]) (object
,
2252 && (insn_operand_predicate
[(int) code
][2] == 0
2253 || (*insn_operand_predicate
[(int) code
][2]) (opalign
,
2257 rtx last
= get_last_insn ();
2260 op1
= convert_to_mode (mode
, size
, 1);
2261 if (insn_operand_predicate
[(int) code
][1] != 0
2262 && ! (*insn_operand_predicate
[(int) code
][1]) (op1
,
2264 op1
= copy_to_mode_reg (mode
, op1
);
2266 pat
= GEN_FCN ((int) code
) (object
, op1
, opalign
);
2273 delete_insns_since (last
);
2278 #ifdef TARGET_MEM_FUNCTIONS
2280 = emit_library_call_value (memset_libfunc
, NULL_RTX
, 0,
2282 XEXP (object
, 0), Pmode
,
2284 TYPE_MODE (integer_type_node
),
2286 (TYPE_MODE (sizetype
), size
,
2287 TREE_UNSIGNED (sizetype
)),
2288 TYPE_MODE (sizetype
));
2290 emit_library_call (bzero_libfunc
, 0,
2292 XEXP (object
, 0), Pmode
,
2294 (TYPE_MODE (integer_type_node
), size
,
2295 TREE_UNSIGNED (integer_type_node
)),
2296 TYPE_MODE (integer_type_node
));
2301 emit_move_insn (object
, CONST0_RTX (GET_MODE (object
)));
2306 /* Generate code to copy Y into X.
2307 Both Y and X must have the same mode, except that
2308 Y can be a constant with VOIDmode.
2309 This mode cannot be BLKmode; use emit_block_move for that.
2311 Return the last instruction emitted. */
2314 emit_move_insn (x
, y
)
2317 enum machine_mode mode
= GET_MODE (x
);
2319 x
= protect_from_queue (x
, 1);
2320 y
= protect_from_queue (y
, 0);
2322 if (mode
== BLKmode
|| (GET_MODE (y
) != mode
&& GET_MODE (y
) != VOIDmode
))
2325 if (CONSTANT_P (y
) && ! LEGITIMATE_CONSTANT_P (y
))
2326 y
= force_const_mem (mode
, y
);
2328 /* If X or Y are memory references, verify that their addresses are valid
2330 if (GET_CODE (x
) == MEM
2331 && ((! memory_address_p (GET_MODE (x
), XEXP (x
, 0))
2332 && ! push_operand (x
, GET_MODE (x
)))
2334 && CONSTANT_ADDRESS_P (XEXP (x
, 0)))))
2335 x
= change_address (x
, VOIDmode
, XEXP (x
, 0));
2337 if (GET_CODE (y
) == MEM
2338 && (! memory_address_p (GET_MODE (y
), XEXP (y
, 0))
2340 && CONSTANT_ADDRESS_P (XEXP (y
, 0)))))
2341 y
= change_address (y
, VOIDmode
, XEXP (y
, 0));
2343 if (mode
== BLKmode
)
2346 return emit_move_insn_1 (x
, y
);
2349 /* Low level part of emit_move_insn.
2350 Called just like emit_move_insn, but assumes X and Y
2351 are basically valid. */
2354 emit_move_insn_1 (x
, y
)
2357 enum machine_mode mode
= GET_MODE (x
);
2358 enum machine_mode submode
;
2359 enum mode_class
class = GET_MODE_CLASS (mode
);
2362 if (mov_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
2364 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) mode
].insn_code
) (x
, y
));
2366 /* Expand complex moves by moving real part and imag part, if possible. */
2367 else if ((class == MODE_COMPLEX_FLOAT
|| class == MODE_COMPLEX_INT
)
2368 && BLKmode
!= (submode
= mode_for_size ((GET_MODE_UNIT_SIZE (mode
)
2370 (class == MODE_COMPLEX_INT
2371 ? MODE_INT
: MODE_FLOAT
),
2373 && (mov_optab
->handlers
[(int) submode
].insn_code
2374 != CODE_FOR_nothing
))
2376 /* Don't split destination if it is a stack push. */
2377 int stack
= push_operand (x
, GET_MODE (x
));
2379 /* If this is a stack, push the highpart first, so it
2380 will be in the argument order.
2382 In that case, change_address is used only to convert
2383 the mode, not to change the address. */
2386 /* Note that the real part always precedes the imag part in memory
2387 regardless of machine's endianness. */
2388 #ifdef STACK_GROWS_DOWNWARD
2389 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2390 (gen_rtx_MEM (submode
, (XEXP (x
, 0))),
2391 gen_imagpart (submode
, y
)));
2392 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2393 (gen_rtx_MEM (submode
, (XEXP (x
, 0))),
2394 gen_realpart (submode
, y
)));
2396 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2397 (gen_rtx_MEM (submode
, (XEXP (x
, 0))),
2398 gen_realpart (submode
, y
)));
2399 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2400 (gen_rtx_MEM (submode
, (XEXP (x
, 0))),
2401 gen_imagpart (submode
, y
)));
2406 /* Show the output dies here. */
2408 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
2410 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2411 (gen_realpart (submode
, x
), gen_realpart (submode
, y
)));
2412 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2413 (gen_imagpart (submode
, x
), gen_imagpart (submode
, y
)));
2416 return get_last_insn ();
2419 /* This will handle any multi-word mode that lacks a move_insn pattern.
2420 However, you will get better code if you define such patterns,
2421 even if they must turn into multiple assembler instructions. */
2422 else if (GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
2426 #ifdef PUSH_ROUNDING
2428 /* If X is a push on the stack, do the push now and replace
2429 X with a reference to the stack pointer. */
2430 if (push_operand (x
, GET_MODE (x
)))
2432 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x
))));
2433 x
= change_address (x
, VOIDmode
, stack_pointer_rtx
);
2437 /* Show the output dies here. */
2439 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
2442 i
< (GET_MODE_SIZE (mode
) + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
;
2445 rtx xpart
= operand_subword (x
, i
, 1, mode
);
2446 rtx ypart
= operand_subword (y
, i
, 1, mode
);
2448 /* If we can't get a part of Y, put Y into memory if it is a
2449 constant. Otherwise, force it into a register. If we still
2450 can't get a part of Y, abort. */
2451 if (ypart
== 0 && CONSTANT_P (y
))
2453 y
= force_const_mem (mode
, y
);
2454 ypart
= operand_subword (y
, i
, 1, mode
);
2456 else if (ypart
== 0)
2457 ypart
= operand_subword_force (y
, i
, mode
);
2459 if (xpart
== 0 || ypart
== 0)
2462 last_insn
= emit_move_insn (xpart
, ypart
);
2471 /* Pushing data onto the stack. */
2473 /* Push a block of length SIZE (perhaps variable)
2474 and return an rtx to address the beginning of the block.
2475 Note that it is not possible for the value returned to be a QUEUED.
2476 The value may be virtual_outgoing_args_rtx.
2478 EXTRA is the number of bytes of padding to push in addition to SIZE.
2479 BELOW nonzero means this padding comes at low addresses;
2480 otherwise, the padding comes at high addresses. */
2483 push_block (size
, extra
, below
)
2489 size
= convert_modes (Pmode
, ptr_mode
, size
, 1);
2490 if (CONSTANT_P (size
))
2491 anti_adjust_stack (plus_constant (size
, extra
));
2492 else if (GET_CODE (size
) == REG
&& extra
== 0)
2493 anti_adjust_stack (size
);
2496 rtx temp
= copy_to_mode_reg (Pmode
, size
);
2498 temp
= expand_binop (Pmode
, add_optab
, temp
, GEN_INT (extra
),
2499 temp
, 0, OPTAB_LIB_WIDEN
);
2500 anti_adjust_stack (temp
);
2503 #ifdef STACK_GROWS_DOWNWARD
2504 temp
= virtual_outgoing_args_rtx
;
2505 if (extra
!= 0 && below
)
2506 temp
= plus_constant (temp
, extra
);
2508 if (GET_CODE (size
) == CONST_INT
)
2509 temp
= plus_constant (virtual_outgoing_args_rtx
,
2510 - INTVAL (size
) - (below
? 0 : extra
));
2511 else if (extra
!= 0 && !below
)
2512 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
2513 negate_rtx (Pmode
, plus_constant (size
, extra
)));
2515 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
2516 negate_rtx (Pmode
, size
));
2519 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT
), temp
);
2525 return gen_rtx_fmt_e (STACK_PUSH_CODE
, Pmode
, stack_pointer_rtx
);
2528 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2529 block of SIZE bytes. */
2532 get_push_address (size
)
2537 if (STACK_PUSH_CODE
== POST_DEC
)
2538 temp
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
, GEN_INT (size
));
2539 else if (STACK_PUSH_CODE
== POST_INC
)
2540 temp
= gen_rtx_MINUS (Pmode
, stack_pointer_rtx
, GEN_INT (size
));
2542 temp
= stack_pointer_rtx
;
2544 return copy_to_reg (temp
);
2547 /* Generate code to push X onto the stack, assuming it has mode MODE and
2549 MODE is redundant except when X is a CONST_INT (since they don't
2551 SIZE is an rtx for the size of data to be copied (in bytes),
2552 needed only if X is BLKmode.
2554 ALIGN (in bytes) is maximum alignment we can assume.
2556 If PARTIAL and REG are both nonzero, then copy that many of the first
2557 words of X into registers starting with REG, and push the rest of X.
2558 The amount of space pushed is decreased by PARTIAL words,
2559 rounded *down* to a multiple of PARM_BOUNDARY.
2560 REG must be a hard register in this case.
2561 If REG is zero but PARTIAL is not, take any all others actions for an
2562 argument partially in registers, but do not actually load any
2565 EXTRA is the amount in bytes of extra space to leave next to this arg.
2566 This is ignored if an argument block has already been allocated.
2568 On a machine that lacks real push insns, ARGS_ADDR is the address of
2569 the bottom of the argument block for this call. We use indexing off there
2570 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2571 argument block has not been preallocated.
2573 ARGS_SO_FAR is the size of args previously pushed for this call.
2575 REG_PARM_STACK_SPACE is nonzero if functions require stack space
2576 for arguments passed in registers. If nonzero, it will be the number
2577 of bytes required. */
2580 emit_push_insn (x
, mode
, type
, size
, align
, partial
, reg
, extra
,
2581 args_addr
, args_so_far
, reg_parm_stack_space
)
2583 enum machine_mode mode
;
2592 int reg_parm_stack_space
;
2595 enum direction stack_direction
2596 #ifdef STACK_GROWS_DOWNWARD
2602 /* Decide where to pad the argument: `downward' for below,
2603 `upward' for above, or `none' for don't pad it.
2604 Default is below for small data on big-endian machines; else above. */
2605 enum direction where_pad
= FUNCTION_ARG_PADDING (mode
, type
);
2607 /* Invert direction if stack is post-update. */
2608 if (STACK_PUSH_CODE
== POST_INC
|| STACK_PUSH_CODE
== POST_DEC
)
2609 if (where_pad
!= none
)
2610 where_pad
= (where_pad
== downward
? upward
: downward
);
2612 xinner
= x
= protect_from_queue (x
, 0);
2614 if (mode
== BLKmode
)
2616 /* Copy a block into the stack, entirely or partially. */
2619 int used
= partial
* UNITS_PER_WORD
;
2620 int offset
= used
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
2628 /* USED is now the # of bytes we need not copy to the stack
2629 because registers will take care of them. */
2632 xinner
= change_address (xinner
, BLKmode
,
2633 plus_constant (XEXP (xinner
, 0), used
));
2635 /* If the partial register-part of the arg counts in its stack size,
2636 skip the part of stack space corresponding to the registers.
2637 Otherwise, start copying to the beginning of the stack space,
2638 by setting SKIP to 0. */
2639 skip
= (reg_parm_stack_space
== 0) ? 0 : used
;
2641 #ifdef PUSH_ROUNDING
2642 /* Do it with several push insns if that doesn't take lots of insns
2643 and if there is no difficulty with push insns that skip bytes
2644 on the stack for alignment purposes. */
2646 && GET_CODE (size
) == CONST_INT
2648 && (move_by_pieces_ninsns ((unsigned) INTVAL (size
) - used
, align
)
2650 /* Here we avoid the case of a structure whose weak alignment
2651 forces many pushes of a small amount of data,
2652 and such small pushes do rounding that causes trouble. */
2653 && ((! SLOW_UNALIGNED_ACCESS
)
2654 || align
>= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
2655 || PUSH_ROUNDING (align
) == align
)
2656 && PUSH_ROUNDING (INTVAL (size
)) == INTVAL (size
))
2658 /* Push padding now if padding above and stack grows down,
2659 or if padding below and stack grows up.
2660 But if space already allocated, this has already been done. */
2661 if (extra
&& args_addr
== 0
2662 && where_pad
!= none
&& where_pad
!= stack_direction
)
2663 anti_adjust_stack (GEN_INT (extra
));
2665 move_by_pieces (gen_rtx_MEM (BLKmode
, gen_push_operand ()), xinner
,
2666 INTVAL (size
) - used
, align
);
2668 if (flag_check_memory_usage
&& ! in_check_memory_usage
)
2672 in_check_memory_usage
= 1;
2673 temp
= get_push_address (INTVAL(size
) - used
);
2674 if (GET_CODE (x
) == MEM
&& type
&& AGGREGATE_TYPE_P (type
))
2675 emit_library_call (chkr_copy_bitmap_libfunc
, 1, VOIDmode
, 3,
2677 XEXP (xinner
, 0), ptr_mode
,
2678 GEN_INT (INTVAL(size
) - used
),
2679 TYPE_MODE (sizetype
));
2681 emit_library_call (chkr_set_right_libfunc
, 1, VOIDmode
, 3,
2683 GEN_INT (INTVAL(size
) - used
),
2684 TYPE_MODE (sizetype
),
2685 GEN_INT (MEMORY_USE_RW
),
2686 TYPE_MODE (integer_type_node
));
2687 in_check_memory_usage
= 0;
2691 #endif /* PUSH_ROUNDING */
2693 /* Otherwise make space on the stack and copy the data
2694 to the address of that space. */
2696 /* Deduct words put into registers from the size we must copy. */
2699 if (GET_CODE (size
) == CONST_INT
)
2700 size
= GEN_INT (INTVAL (size
) - used
);
2702 size
= expand_binop (GET_MODE (size
), sub_optab
, size
,
2703 GEN_INT (used
), NULL_RTX
, 0,
2707 /* Get the address of the stack space.
2708 In this case, we do not deal with EXTRA separately.
2709 A single stack adjust will do. */
2712 temp
= push_block (size
, extra
, where_pad
== downward
);
2715 else if (GET_CODE (args_so_far
) == CONST_INT
)
2716 temp
= memory_address (BLKmode
,
2717 plus_constant (args_addr
,
2718 skip
+ INTVAL (args_so_far
)));
2720 temp
= memory_address (BLKmode
,
2721 plus_constant (gen_rtx_PLUS (Pmode
,
2725 if (flag_check_memory_usage
&& ! in_check_memory_usage
)
2729 in_check_memory_usage
= 1;
2730 target
= copy_to_reg (temp
);
2731 if (GET_CODE (x
) == MEM
&& type
&& AGGREGATE_TYPE_P (type
))
2732 emit_library_call (chkr_copy_bitmap_libfunc
, 1, VOIDmode
, 3,
2734 XEXP (xinner
, 0), ptr_mode
,
2735 size
, TYPE_MODE (sizetype
));
2737 emit_library_call (chkr_set_right_libfunc
, 1, VOIDmode
, 3,
2739 size
, TYPE_MODE (sizetype
),
2740 GEN_INT (MEMORY_USE_RW
),
2741 TYPE_MODE (integer_type_node
));
2742 in_check_memory_usage
= 0;
2745 /* TEMP is the address of the block. Copy the data there. */
2746 if (GET_CODE (size
) == CONST_INT
2747 && (move_by_pieces_ninsns ((unsigned) INTVAL (size
), align
)
2750 move_by_pieces (gen_rtx_MEM (BLKmode
, temp
), xinner
,
2751 INTVAL (size
), align
);
2756 rtx opalign
= GEN_INT (align
);
2757 enum machine_mode mode
;
2758 rtx target
= gen_rtx_MEM (BLKmode
, temp
);
2760 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2762 mode
= GET_MODE_WIDER_MODE (mode
))
2764 enum insn_code code
= movstr_optab
[(int) mode
];
2766 if (code
!= CODE_FOR_nothing
2767 && ((GET_CODE (size
) == CONST_INT
2768 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
2769 <= (GET_MODE_MASK (mode
) >> 1)))
2770 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
2771 && (insn_operand_predicate
[(int) code
][0] == 0
2772 || ((*insn_operand_predicate
[(int) code
][0])
2774 && (insn_operand_predicate
[(int) code
][1] == 0
2775 || ((*insn_operand_predicate
[(int) code
][1])
2777 && (insn_operand_predicate
[(int) code
][3] == 0
2778 || ((*insn_operand_predicate
[(int) code
][3])
2779 (opalign
, VOIDmode
))))
2781 rtx op2
= convert_to_mode (mode
, size
, 1);
2782 rtx last
= get_last_insn ();
2785 if (insn_operand_predicate
[(int) code
][2] != 0
2786 && ! ((*insn_operand_predicate
[(int) code
][2])
2788 op2
= copy_to_mode_reg (mode
, op2
);
2790 pat
= GEN_FCN ((int) code
) (target
, xinner
,
2798 delete_insns_since (last
);
2803 #ifndef ACCUMULATE_OUTGOING_ARGS
2804 /* If the source is referenced relative to the stack pointer,
2805 copy it to another register to stabilize it. We do not need
2806 to do this if we know that we won't be changing sp. */
2808 if (reg_mentioned_p (virtual_stack_dynamic_rtx
, temp
)
2809 || reg_mentioned_p (virtual_outgoing_args_rtx
, temp
))
2810 temp
= copy_to_reg (temp
);
2813 /* Make inhibit_defer_pop nonzero around the library call
2814 to force it to pop the bcopy-arguments right away. */
2816 #ifdef TARGET_MEM_FUNCTIONS
2817 emit_library_call (memcpy_libfunc
, 0,
2818 VOIDmode
, 3, temp
, Pmode
, XEXP (xinner
, 0), Pmode
,
2819 convert_to_mode (TYPE_MODE (sizetype
),
2820 size
, TREE_UNSIGNED (sizetype
)),
2821 TYPE_MODE (sizetype
));
2823 emit_library_call (bcopy_libfunc
, 0,
2824 VOIDmode
, 3, XEXP (xinner
, 0), Pmode
, temp
, Pmode
,
2825 convert_to_mode (TYPE_MODE (integer_type_node
),
2827 TREE_UNSIGNED (integer_type_node
)),
2828 TYPE_MODE (integer_type_node
));
2833 else if (partial
> 0)
2835 /* Scalar partly in registers. */
2837 int size
= GET_MODE_SIZE (mode
) / UNITS_PER_WORD
;
2840 /* # words of start of argument
2841 that we must make space for but need not store. */
2842 int offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_WORD
);
2843 int args_offset
= INTVAL (args_so_far
);
2846 /* Push padding now if padding above and stack grows down,
2847 or if padding below and stack grows up.
2848 But if space already allocated, this has already been done. */
2849 if (extra
&& args_addr
== 0
2850 && where_pad
!= none
&& where_pad
!= stack_direction
)
2851 anti_adjust_stack (GEN_INT (extra
));
2853 /* If we make space by pushing it, we might as well push
2854 the real data. Otherwise, we can leave OFFSET nonzero
2855 and leave the space uninitialized. */
2859 /* Now NOT_STACK gets the number of words that we don't need to
2860 allocate on the stack. */
2861 not_stack
= partial
- offset
;
2863 /* If the partial register-part of the arg counts in its stack size,
2864 skip the part of stack space corresponding to the registers.
2865 Otherwise, start copying to the beginning of the stack space,
2866 by setting SKIP to 0. */
2867 skip
= (reg_parm_stack_space
== 0) ? 0 : not_stack
;
2869 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
2870 x
= validize_mem (force_const_mem (mode
, x
));
2872 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2873 SUBREGs of such registers are not allowed. */
2874 if ((GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
2875 && GET_MODE_CLASS (GET_MODE (x
)) != MODE_INT
))
2876 x
= copy_to_reg (x
);
2878 /* Loop over all the words allocated on the stack for this arg. */
2879 /* We can do it by words, because any scalar bigger than a word
2880 has a size a multiple of a word. */
2881 #ifndef PUSH_ARGS_REVERSED
2882 for (i
= not_stack
; i
< size
; i
++)
2884 for (i
= size
- 1; i
>= not_stack
; i
--)
2886 if (i
>= not_stack
+ offset
)
2887 emit_push_insn (operand_subword_force (x
, i
, mode
),
2888 word_mode
, NULL_TREE
, NULL_RTX
, align
, 0, NULL_RTX
,
2890 GEN_INT (args_offset
+ ((i
- not_stack
+ skip
)
2892 reg_parm_stack_space
);
2897 rtx target
= NULL_RTX
;
2899 /* Push padding now if padding above and stack grows down,
2900 or if padding below and stack grows up.
2901 But if space already allocated, this has already been done. */
2902 if (extra
&& args_addr
== 0
2903 && where_pad
!= none
&& where_pad
!= stack_direction
)
2904 anti_adjust_stack (GEN_INT (extra
));
2906 #ifdef PUSH_ROUNDING
2908 addr
= gen_push_operand ();
2912 if (GET_CODE (args_so_far
) == CONST_INT
)
2914 = memory_address (mode
,
2915 plus_constant (args_addr
,
2916 INTVAL (args_so_far
)));
2918 addr
= memory_address (mode
, gen_rtx_PLUS (Pmode
, args_addr
,
2923 emit_move_insn (gen_rtx_MEM (mode
, addr
), x
);
2925 if (flag_check_memory_usage
&& ! in_check_memory_usage
)
2927 in_check_memory_usage
= 1;
2929 target
= get_push_address (GET_MODE_SIZE (mode
));
2931 if (GET_CODE (x
) == MEM
&& type
&& AGGREGATE_TYPE_P (type
))
2932 emit_library_call (chkr_copy_bitmap_libfunc
, 1, VOIDmode
, 3,
2934 XEXP (x
, 0), ptr_mode
,
2935 GEN_INT (GET_MODE_SIZE (mode
)),
2936 TYPE_MODE (sizetype
));
2938 emit_library_call (chkr_set_right_libfunc
, 1, VOIDmode
, 3,
2940 GEN_INT (GET_MODE_SIZE (mode
)),
2941 TYPE_MODE (sizetype
),
2942 GEN_INT (MEMORY_USE_RW
),
2943 TYPE_MODE (integer_type_node
));
2944 in_check_memory_usage
= 0;
2949 /* If part should go in registers, copy that part
2950 into the appropriate registers. Do this now, at the end,
2951 since mem-to-mem copies above may do function calls. */
2952 if (partial
> 0 && reg
!= 0)
2954 /* Handle calls that pass values in multiple non-contiguous locations.
2955 The Irix 6 ABI has examples of this. */
2956 if (GET_CODE (reg
) == PARALLEL
)
2957 emit_group_load (reg
, x
, -1, align
); /* ??? size? */
2959 move_block_to_reg (REGNO (reg
), x
, partial
, mode
);
2962 if (extra
&& args_addr
== 0 && where_pad
== stack_direction
)
2963 anti_adjust_stack (GEN_INT (extra
));
2966 /* Expand an assignment that stores the value of FROM into TO.
2967 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2968 (This may contain a QUEUED rtx;
2969 if the value is constant, this rtx is a constant.)
2970 Otherwise, the returned value is NULL_RTX.
2972 SUGGEST_REG is no longer actually used.
2973 It used to mean, copy the value through a register
2974 and return that register, if that is possible.
2975 We now use WANT_VALUE to decide whether to do this. */
2978 expand_assignment (to
, from
, want_value
, suggest_reg
)
2983 register rtx to_rtx
= 0;
2986 /* Don't crash if the lhs of the assignment was erroneous. */
2988 if (TREE_CODE (to
) == ERROR_MARK
)
2990 result
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
2991 return want_value
? result
: NULL_RTX
;
2994 /* Assignment of a structure component needs special treatment
2995 if the structure component's rtx is not simply a MEM.
2996 Assignment of an array element at a constant index, and assignment of
2997 an array element in an unaligned packed structure field, has the same
3000 if (TREE_CODE (to
) == COMPONENT_REF
|| TREE_CODE (to
) == BIT_FIELD_REF
3001 || TREE_CODE (to
) == ARRAY_REF
)
3003 enum machine_mode mode1
;
3013 tem
= get_inner_reference (to
, &bitsize
, &bitpos
, &offset
, &mode1
,
3014 &unsignedp
, &volatilep
, &alignment
);
3016 /* If we are going to use store_bit_field and extract_bit_field,
3017 make sure to_rtx will be safe for multiple use. */
3019 if (mode1
== VOIDmode
&& want_value
)
3020 tem
= stabilize_reference (tem
);
3022 to_rtx
= expand_expr (tem
, NULL_RTX
, VOIDmode
, EXPAND_MEMORY_USE_DONT
);
3025 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
3027 if (GET_CODE (to_rtx
) != MEM
)
3030 if (GET_MODE (offset_rtx
) != ptr_mode
)
3032 #ifdef POINTERS_EXTEND_UNSIGNED
3033 offset_rtx
= convert_memory_address (ptr_mode
, offset_rtx
);
3035 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
3039 if (GET_CODE (to_rtx
) == MEM
3040 && GET_MODE (to_rtx
) == BLKmode
3042 && (bitpos
% bitsize
) == 0
3043 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
3044 && (alignment
* BITS_PER_UNIT
) == GET_MODE_ALIGNMENT (mode1
))
3046 rtx temp
= change_address (to_rtx
, mode1
,
3047 plus_constant (XEXP (to_rtx
, 0),
3050 if (GET_CODE (XEXP (temp
, 0)) == REG
)
3053 to_rtx
= change_address (to_rtx
, mode1
,
3054 force_reg (GET_MODE (XEXP (temp
, 0)),
3059 to_rtx
= change_address (to_rtx
, VOIDmode
,
3060 gen_rtx_PLUS (ptr_mode
, XEXP (to_rtx
, 0),
3061 force_reg (ptr_mode
, offset_rtx
)));
3065 if (GET_CODE (to_rtx
) == MEM
)
3067 /* When the offset is zero, to_rtx is the address of the
3068 structure we are storing into, and hence may be shared.
3069 We must make a new MEM before setting the volatile bit. */
3071 to_rtx
= copy_rtx (to_rtx
);
3073 MEM_VOLATILE_P (to_rtx
) = 1;
3075 #if 0 /* This was turned off because, when a field is volatile
3076 in an object which is not volatile, the object may be in a register,
3077 and then we would abort over here. */
3083 if (TREE_CODE (to
) == COMPONENT_REF
3084 && TREE_READONLY (TREE_OPERAND (to
, 1)))
3087 to_rtx
= copy_rtx (to_rtx
);
3089 RTX_UNCHANGING_P (to_rtx
) = 1;
3092 /* Check the access. */
3093 if (flag_check_memory_usage
&& GET_CODE (to_rtx
) == MEM
)
3098 enum machine_mode best_mode
;
3100 best_mode
= get_best_mode (bitsize
, bitpos
,
3101 TYPE_ALIGN (TREE_TYPE (tem
)),
3103 if (best_mode
== VOIDmode
)
3106 best_mode_size
= GET_MODE_BITSIZE (best_mode
);
3107 to_addr
= plus_constant (XEXP (to_rtx
, 0), (bitpos
/ BITS_PER_UNIT
));
3108 size
= CEIL ((bitpos
% best_mode_size
) + bitsize
, best_mode_size
);
3109 size
*= GET_MODE_SIZE (best_mode
);
3111 /* Check the access right of the pointer. */
3113 emit_library_call (chkr_check_addr_libfunc
, 1, VOIDmode
, 3,
3115 GEN_INT (size
), TYPE_MODE (sizetype
),
3116 GEN_INT (MEMORY_USE_WO
),
3117 TYPE_MODE (integer_type_node
));
3120 result
= store_field (to_rtx
, bitsize
, bitpos
, mode1
, from
,
3122 /* Spurious cast makes HPUX compiler happy. */
3123 ? (enum machine_mode
) TYPE_MODE (TREE_TYPE (to
))
3126 /* Required alignment of containing datum. */
3128 int_size_in_bytes (TREE_TYPE (tem
)));
3129 preserve_temp_slots (result
);
3133 /* If the value is meaningful, convert RESULT to the proper mode.
3134 Otherwise, return nothing. */
3135 return (want_value
? convert_modes (TYPE_MODE (TREE_TYPE (to
)),
3136 TYPE_MODE (TREE_TYPE (from
)),
3138 TREE_UNSIGNED (TREE_TYPE (to
)))
3142 /* If the rhs is a function call and its value is not an aggregate,
3143 call the function before we start to compute the lhs.
3144 This is needed for correct code for cases such as
3145 val = setjmp (buf) on machines where reference to val
3146 requires loading up part of an address in a separate insn.
3148 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
3149 a promoted variable where the zero- or sign- extension needs to be done.
3150 Handling this in the normal way is safe because no computation is done
3152 if (TREE_CODE (from
) == CALL_EXPR
&& ! aggregate_value_p (from
)
3153 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from
))) == INTEGER_CST
3154 && ! (TREE_CODE (to
) == VAR_DECL
&& GET_CODE (DECL_RTL (to
)) == REG
))
3159 value
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
3161 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_MEMORY_USE_WO
);
3163 /* Handle calls that return values in multiple non-contiguous locations.
3164 The Irix 6 ABI has examples of this. */
3165 if (GET_CODE (to_rtx
) == PARALLEL
)
3166 emit_group_load (to_rtx
, value
, int_size_in_bytes (TREE_TYPE (from
)),
3167 TYPE_ALIGN (TREE_TYPE (from
)) / BITS_PER_UNIT
);
3168 else if (GET_MODE (to_rtx
) == BLKmode
)
3169 emit_block_move (to_rtx
, value
, expr_size (from
),
3170 TYPE_ALIGN (TREE_TYPE (from
)) / BITS_PER_UNIT
);
3172 emit_move_insn (to_rtx
, value
);
3173 preserve_temp_slots (to_rtx
);
3176 return want_value
? to_rtx
: NULL_RTX
;
3179 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3180 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3184 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_MEMORY_USE_WO
);
3185 if (GET_CODE (to_rtx
) == MEM
)
3186 MEM_ALIAS_SET (to_rtx
) = get_alias_set (to
);
3189 /* Don't move directly into a return register. */
3190 if (TREE_CODE (to
) == RESULT_DECL
&& GET_CODE (to_rtx
) == REG
)
3195 temp
= expand_expr (from
, 0, GET_MODE (to_rtx
), 0);
3196 emit_move_insn (to_rtx
, temp
);
3197 preserve_temp_slots (to_rtx
);
3200 return want_value
? to_rtx
: NULL_RTX
;
3203 /* In case we are returning the contents of an object which overlaps
3204 the place the value is being stored, use a safe function when copying
3205 a value through a pointer into a structure value return block. */
3206 if (TREE_CODE (to
) == RESULT_DECL
&& TREE_CODE (from
) == INDIRECT_REF
3207 && current_function_returns_struct
3208 && !current_function_returns_pcc_struct
)
3213 size
= expr_size (from
);
3214 from_rtx
= expand_expr (from
, NULL_RTX
, VOIDmode
,
3215 EXPAND_MEMORY_USE_DONT
);
3217 /* Copy the rights of the bitmap. */
3218 if (flag_check_memory_usage
)
3219 emit_library_call (chkr_copy_bitmap_libfunc
, 1, VOIDmode
, 3,
3220 XEXP (to_rtx
, 0), ptr_mode
,
3221 XEXP (from_rtx
, 0), ptr_mode
,
3222 convert_to_mode (TYPE_MODE (sizetype
),
3223 size
, TREE_UNSIGNED (sizetype
)),
3224 TYPE_MODE (sizetype
));
3226 #ifdef TARGET_MEM_FUNCTIONS
3227 emit_library_call (memcpy_libfunc
, 0,
3228 VOIDmode
, 3, XEXP (to_rtx
, 0), Pmode
,
3229 XEXP (from_rtx
, 0), Pmode
,
3230 convert_to_mode (TYPE_MODE (sizetype
),
3231 size
, TREE_UNSIGNED (sizetype
)),
3232 TYPE_MODE (sizetype
));
3234 emit_library_call (bcopy_libfunc
, 0,
3235 VOIDmode
, 3, XEXP (from_rtx
, 0), Pmode
,
3236 XEXP (to_rtx
, 0), Pmode
,
3237 convert_to_mode (TYPE_MODE (integer_type_node
),
3238 size
, TREE_UNSIGNED (integer_type_node
)),
3239 TYPE_MODE (integer_type_node
));
3242 preserve_temp_slots (to_rtx
);
3245 return want_value
? to_rtx
: NULL_RTX
;
3248 /* Compute FROM and store the value in the rtx we got. */
3251 result
= store_expr (from
, to_rtx
, want_value
);
3252 preserve_temp_slots (result
);
3255 return want_value
? result
: NULL_RTX
;
3258 /* Generate code for computing expression EXP,
3259 and storing the value into TARGET.
3260 TARGET may contain a QUEUED rtx.
3262 If WANT_VALUE is nonzero, return a copy of the value
3263 not in TARGET, so that we can be sure to use the proper
3264 value in a containing expression even if TARGET has something
3265 else stored in it. If possible, we copy the value through a pseudo
3266 and return that pseudo. Or, if the value is constant, we try to
3267 return the constant. In some cases, we return a pseudo
3268 copied *from* TARGET.
3270 If the mode is BLKmode then we may return TARGET itself.
3271 It turns out that in BLKmode it doesn't cause a problem.
3272 because C has no operators that could combine two different
3273 assignments into the same BLKmode object with different values
3274 with no sequence point. Will other languages need this to
3277 If WANT_VALUE is 0, we return NULL, to make sure
3278 to catch quickly any cases where the caller uses the value
3279 and fails to set WANT_VALUE. */
3282 store_expr (exp
, target
, want_value
)
3284 register rtx target
;
3288 int dont_return_target
= 0;
3290 if (TREE_CODE (exp
) == COMPOUND_EXPR
)
3292 /* Perform first part of compound expression, then assign from second
3294 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
3296 return store_expr (TREE_OPERAND (exp
, 1), target
, want_value
);
3298 else if (TREE_CODE (exp
) == COND_EXPR
&& GET_MODE (target
) == BLKmode
)
3300 /* For conditional expression, get safe form of the target. Then
3301 test the condition, doing the appropriate assignment on either
3302 side. This avoids the creation of unnecessary temporaries.
3303 For non-BLKmode, it is more efficient not to do this. */
3305 rtx lab1
= gen_label_rtx (), lab2
= gen_label_rtx ();
3308 target
= protect_from_queue (target
, 1);
3310 do_pending_stack_adjust ();
3312 jumpifnot (TREE_OPERAND (exp
, 0), lab1
);
3313 start_cleanup_deferral ();
3314 store_expr (TREE_OPERAND (exp
, 1), target
, 0);
3315 end_cleanup_deferral ();
3317 emit_jump_insn (gen_jump (lab2
));
3320 start_cleanup_deferral ();
3321 store_expr (TREE_OPERAND (exp
, 2), target
, 0);
3322 end_cleanup_deferral ();
3327 return want_value
? target
: NULL_RTX
;
3329 else if (want_value
&& GET_CODE (target
) == MEM
&& ! MEM_VOLATILE_P (target
)
3330 && GET_MODE (target
) != BLKmode
)
3331 /* If target is in memory and caller wants value in a register instead,
3332 arrange that. Pass TARGET as target for expand_expr so that,
3333 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3334 We know expand_expr will not use the target in that case.
3335 Don't do this if TARGET is volatile because we are supposed
3336 to write it and then read it. */
3338 temp
= expand_expr (exp
, cse_not_expected
? NULL_RTX
: target
,
3339 GET_MODE (target
), 0);
3340 if (GET_MODE (temp
) != BLKmode
&& GET_MODE (temp
) != VOIDmode
)
3341 temp
= copy_to_reg (temp
);
3342 dont_return_target
= 1;
3344 else if (queued_subexp_p (target
))
3345 /* If target contains a postincrement, let's not risk
3346 using it as the place to generate the rhs. */
3348 if (GET_MODE (target
) != BLKmode
&& GET_MODE (target
) != VOIDmode
)
3350 /* Expand EXP into a new pseudo. */
3351 temp
= gen_reg_rtx (GET_MODE (target
));
3352 temp
= expand_expr (exp
, temp
, GET_MODE (target
), 0);
3355 temp
= expand_expr (exp
, NULL_RTX
, GET_MODE (target
), 0);
3357 /* If target is volatile, ANSI requires accessing the value
3358 *from* the target, if it is accessed. So make that happen.
3359 In no case return the target itself. */
3360 if (! MEM_VOLATILE_P (target
) && want_value
)
3361 dont_return_target
= 1;
3363 else if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
3364 /* If this is an scalar in a register that is stored in a wider mode
3365 than the declared mode, compute the result into its declared mode
3366 and then convert to the wider mode. Our value is the computed
3369 /* If we don't want a value, we can do the conversion inside EXP,
3370 which will often result in some optimizations. Do the conversion
3371 in two steps: first change the signedness, if needed, then
3372 the extend. But don't do this if the type of EXP is a subtype
3373 of something else since then the conversion might involve
3374 more than just converting modes. */
3375 if (! want_value
&& INTEGRAL_TYPE_P (TREE_TYPE (exp
))
3376 && TREE_TYPE (TREE_TYPE (exp
)) == 0)
3378 if (TREE_UNSIGNED (TREE_TYPE (exp
))
3379 != SUBREG_PROMOTED_UNSIGNED_P (target
))
3382 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target
),
3386 exp
= convert (type_for_mode (GET_MODE (SUBREG_REG (target
)),
3387 SUBREG_PROMOTED_UNSIGNED_P (target
)),
3391 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
3393 /* If TEMP is a volatile MEM and we want a result value, make
3394 the access now so it gets done only once. Likewise if
3395 it contains TARGET. */
3396 if (GET_CODE (temp
) == MEM
&& want_value
3397 && (MEM_VOLATILE_P (temp
)
3398 || reg_mentioned_p (SUBREG_REG (target
), XEXP (temp
, 0))))
3399 temp
= copy_to_reg (temp
);
3401 /* If TEMP is a VOIDmode constant, use convert_modes to make
3402 sure that we properly convert it. */
3403 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
)
3404 temp
= convert_modes (GET_MODE (SUBREG_REG (target
)),
3405 TYPE_MODE (TREE_TYPE (exp
)), temp
,
3406 SUBREG_PROMOTED_UNSIGNED_P (target
));
3408 convert_move (SUBREG_REG (target
), temp
,
3409 SUBREG_PROMOTED_UNSIGNED_P (target
));
3410 return want_value
? temp
: NULL_RTX
;
3414 temp
= expand_expr (exp
, target
, GET_MODE (target
), 0);
3415 /* Return TARGET if it's a specified hardware register.
3416 If TARGET is a volatile mem ref, either return TARGET
3417 or return a reg copied *from* TARGET; ANSI requires this.
3419 Otherwise, if TEMP is not TARGET, return TEMP
3420 if it is constant (for efficiency),
3421 or if we really want the correct value. */
3422 if (!(target
&& GET_CODE (target
) == REG
3423 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)
3424 && !(GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
))
3425 && ! rtx_equal_p (temp
, target
)
3426 && (CONSTANT_P (temp
) || want_value
))
3427 dont_return_target
= 1;
3430 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3431 the same as that of TARGET, adjust the constant. This is needed, for
3432 example, in case it is a CONST_DOUBLE and we want only a word-sized
3434 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
3435 && TREE_CODE (exp
) != ERROR_MARK
3436 && GET_MODE (target
) != TYPE_MODE (TREE_TYPE (exp
)))
3437 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
3438 temp
, TREE_UNSIGNED (TREE_TYPE (exp
)));
3440 if (flag_check_memory_usage
3441 && GET_CODE (target
) == MEM
3442 && AGGREGATE_TYPE_P (TREE_TYPE (exp
)))
3444 if (GET_CODE (temp
) == MEM
)
3445 emit_library_call (chkr_copy_bitmap_libfunc
, 1, VOIDmode
, 3,
3446 XEXP (target
, 0), ptr_mode
,
3447 XEXP (temp
, 0), ptr_mode
,
3448 expr_size (exp
), TYPE_MODE (sizetype
));
3450 emit_library_call (chkr_check_addr_libfunc
, 1, VOIDmode
, 3,
3451 XEXP (target
, 0), ptr_mode
,
3452 expr_size (exp
), TYPE_MODE (sizetype
),
3453 GEN_INT (MEMORY_USE_WO
),
3454 TYPE_MODE (integer_type_node
));
3457 /* If value was not generated in the target, store it there.
3458 Convert the value to TARGET's type first if nec. */
3460 if (! rtx_equal_p (temp
, target
) && TREE_CODE (exp
) != ERROR_MARK
)
3462 target
= protect_from_queue (target
, 1);
3463 if (GET_MODE (temp
) != GET_MODE (target
)
3464 && GET_MODE (temp
) != VOIDmode
)
3466 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (exp
));
3467 if (dont_return_target
)
3469 /* In this case, we will return TEMP,
3470 so make sure it has the proper mode.
3471 But don't forget to store the value into TARGET. */
3472 temp
= convert_to_mode (GET_MODE (target
), temp
, unsignedp
);
3473 emit_move_insn (target
, temp
);
3476 convert_move (target
, temp
, unsignedp
);
3479 else if (GET_MODE (temp
) == BLKmode
&& TREE_CODE (exp
) == STRING_CST
)
3481 /* Handle copying a string constant into an array.
3482 The string constant may be shorter than the array.
3483 So copy just the string's actual length, and clear the rest. */
3487 /* Get the size of the data type of the string,
3488 which is actually the size of the target. */
3489 size
= expr_size (exp
);
3490 if (GET_CODE (size
) == CONST_INT
3491 && INTVAL (size
) < TREE_STRING_LENGTH (exp
))
3492 emit_block_move (target
, temp
, size
,
3493 TYPE_ALIGN (TREE_TYPE (exp
)) / BITS_PER_UNIT
);
3496 /* Compute the size of the data to copy from the string. */
3498 = size_binop (MIN_EXPR
,
3499 make_tree (sizetype
, size
),
3501 build_int_2 (TREE_STRING_LENGTH (exp
), 0)));
3502 rtx copy_size_rtx
= expand_expr (copy_size
, NULL_RTX
,
3506 /* Copy that much. */
3507 emit_block_move (target
, temp
, copy_size_rtx
,
3508 TYPE_ALIGN (TREE_TYPE (exp
)) / BITS_PER_UNIT
);
3510 /* Figure out how much is left in TARGET that we have to clear.
3511 Do all calculations in ptr_mode. */
3513 addr
= XEXP (target
, 0);
3514 addr
= convert_modes (ptr_mode
, Pmode
, addr
, 1);
3516 if (GET_CODE (copy_size_rtx
) == CONST_INT
)
3518 addr
= plus_constant (addr
, TREE_STRING_LENGTH (exp
));
3519 size
= plus_constant (size
, - TREE_STRING_LENGTH (exp
));
3523 addr
= force_reg (ptr_mode
, addr
);
3524 addr
= expand_binop (ptr_mode
, add_optab
, addr
,
3525 copy_size_rtx
, NULL_RTX
, 0,
3528 size
= expand_binop (ptr_mode
, sub_optab
, size
,
3529 copy_size_rtx
, NULL_RTX
, 0,
3532 emit_cmp_insn (size
, const0_rtx
, LT
, NULL_RTX
,
3533 GET_MODE (size
), 0, 0);
3534 label
= gen_label_rtx ();
3535 emit_jump_insn (gen_blt (label
));
3538 if (size
!= const0_rtx
)
3540 /* Be sure we can write on ADDR. */
3541 if (flag_check_memory_usage
)
3542 emit_library_call (chkr_check_addr_libfunc
, 1, VOIDmode
, 3,
3544 size
, TYPE_MODE (sizetype
),
3545 GEN_INT (MEMORY_USE_WO
),
3546 TYPE_MODE (integer_type_node
));
3547 #ifdef TARGET_MEM_FUNCTIONS
3548 emit_library_call (memset_libfunc
, 0, VOIDmode
, 3,
3550 const0_rtx
, TYPE_MODE (integer_type_node
),
3551 convert_to_mode (TYPE_MODE (sizetype
),
3553 TREE_UNSIGNED (sizetype
)),
3554 TYPE_MODE (sizetype
));
3556 emit_library_call (bzero_libfunc
, 0, VOIDmode
, 2,
3558 convert_to_mode (TYPE_MODE (integer_type_node
),
3560 TREE_UNSIGNED (integer_type_node
)),
3561 TYPE_MODE (integer_type_node
));
3569 /* Handle calls that return values in multiple non-contiguous locations.
3570 The Irix 6 ABI has examples of this. */
3571 else if (GET_CODE (target
) == PARALLEL
)
3572 emit_group_load (target
, temp
, int_size_in_bytes (TREE_TYPE (exp
)),
3573 TYPE_ALIGN (TREE_TYPE (exp
)) / BITS_PER_UNIT
);
3574 else if (GET_MODE (temp
) == BLKmode
)
3575 emit_block_move (target
, temp
, expr_size (exp
),
3576 TYPE_ALIGN (TREE_TYPE (exp
)) / BITS_PER_UNIT
);
3578 emit_move_insn (target
, temp
);
3581 /* If we don't want a value, return NULL_RTX. */
3585 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3586 ??? The latter test doesn't seem to make sense. */
3587 else if (dont_return_target
&& GET_CODE (temp
) != MEM
)
3590 /* Return TARGET itself if it is a hard register. */
3591 else if (want_value
&& GET_MODE (target
) != BLKmode
3592 && ! (GET_CODE (target
) == REG
3593 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
3594 return copy_to_reg (target
);
3600 /* Return 1 if EXP just contains zeros. */
3608 switch (TREE_CODE (exp
))
3612 case NON_LVALUE_EXPR
:
3613 return is_zeros_p (TREE_OPERAND (exp
, 0));
3616 return TREE_INT_CST_LOW (exp
) == 0 && TREE_INT_CST_HIGH (exp
) == 0;
3620 is_zeros_p (TREE_REALPART (exp
)) && is_zeros_p (TREE_IMAGPART (exp
));
3623 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp
), dconst0
);
3626 if (TREE_TYPE (exp
) && TREE_CODE (TREE_TYPE (exp
)) == SET_TYPE
)
3627 return CONSTRUCTOR_ELTS (exp
) == NULL_TREE
;
3628 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
3629 if (! is_zeros_p (TREE_VALUE (elt
)))
3639 /* Return 1 if EXP contains mostly (3/4) zeros. */
3642 mostly_zeros_p (exp
)
3645 if (TREE_CODE (exp
) == CONSTRUCTOR
)
3647 int elts
= 0, zeros
= 0;
3648 tree elt
= CONSTRUCTOR_ELTS (exp
);
3649 if (TREE_TYPE (exp
) && TREE_CODE (TREE_TYPE (exp
)) == SET_TYPE
)
3651 /* If there are no ranges of true bits, it is all zero. */
3652 return elt
== NULL_TREE
;
3654 for (; elt
; elt
= TREE_CHAIN (elt
))
3656 /* We do not handle the case where the index is a RANGE_EXPR,
3657 so the statistic will be somewhat inaccurate.
3658 We do make a more accurate count in store_constructor itself,
3659 so since this function is only used for nested array elements,
3660 this should be close enough. */
3661 if (mostly_zeros_p (TREE_VALUE (elt
)))
3666 return 4 * zeros
>= 3 * elts
;
3669 return is_zeros_p (exp
);
3672 /* Helper function for store_constructor.
3673 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
3674 TYPE is the type of the CONSTRUCTOR, not the element type.
3675 CLEARED is as for store_constructor.
3677 This provides a recursive shortcut back to store_constructor when it isn't
3678 necessary to go through store_field. This is so that we can pass through
3679 the cleared field to let store_constructor know that we may not have to
3680 clear a substructure if the outer structure has already been cleared. */
3683 store_constructor_field (target
, bitsize
, bitpos
,
3684 mode
, exp
, type
, cleared
)
3686 int bitsize
, bitpos
;
3687 enum machine_mode mode
;
3691 if (TREE_CODE (exp
) == CONSTRUCTOR
3692 && bitpos
% BITS_PER_UNIT
== 0
3693 /* If we have a non-zero bitpos for a register target, then we just
3694 let store_field do the bitfield handling. This is unlikely to
3695 generate unnecessary clear instructions anyways. */
3696 && (bitpos
== 0 || GET_CODE (target
) == MEM
))
3699 target
= change_address (target
, VOIDmode
,
3700 plus_constant (XEXP (target
, 0),
3701 bitpos
/ BITS_PER_UNIT
));
3702 store_constructor (exp
, target
, cleared
);
3705 store_field (target
, bitsize
, bitpos
, mode
, exp
,
3706 VOIDmode
, 0, TYPE_ALIGN (type
) / BITS_PER_UNIT
,
3707 int_size_in_bytes (type
));
3710 /* Store the value of constructor EXP into the rtx TARGET.
3711 TARGET is either a REG or a MEM.
3712 CLEARED is true if TARGET is known to have been zero'd. */
3715 store_constructor (exp
, target
, cleared
)
3720 tree type
= TREE_TYPE (exp
);
3722 /* We know our target cannot conflict, since safe_from_p has been called. */
3724 /* Don't try copying piece by piece into a hard register
3725 since that is vulnerable to being clobbered by EXP.
3726 Instead, construct in a pseudo register and then copy it all. */
3727 if (GET_CODE (target
) == REG
&& REGNO (target
) < FIRST_PSEUDO_REGISTER
)
3729 rtx temp
= gen_reg_rtx (GET_MODE (target
));
3730 store_constructor (exp
, temp
, 0);
3731 emit_move_insn (target
, temp
);
3736 if (TREE_CODE (type
) == RECORD_TYPE
|| TREE_CODE (type
) == UNION_TYPE
3737 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
3741 /* Inform later passes that the whole union value is dead. */
3742 if (TREE_CODE (type
) == UNION_TYPE
3743 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
3744 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
3746 /* If we are building a static constructor into a register,
3747 set the initial value as zero so we can fold the value into
3748 a constant. But if more than one register is involved,
3749 this probably loses. */
3750 else if (GET_CODE (target
) == REG
&& TREE_STATIC (exp
)
3751 && GET_MODE_SIZE (GET_MODE (target
)) <= UNITS_PER_WORD
)
3754 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
3759 /* If the constructor has fewer fields than the structure
3760 or if we are initializing the structure to mostly zeros,
3761 clear the whole structure first. */
3762 else if ((list_length (CONSTRUCTOR_ELTS (exp
))
3763 != list_length (TYPE_FIELDS (type
)))
3764 || mostly_zeros_p (exp
))
3767 clear_storage (target
, expr_size (exp
),
3768 TYPE_ALIGN (type
) / BITS_PER_UNIT
);
3773 /* Inform later passes that the old value is dead. */
3774 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
3776 /* Store each element of the constructor into
3777 the corresponding field of TARGET. */
3779 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
3781 register tree field
= TREE_PURPOSE (elt
);
3782 register enum machine_mode mode
;
3786 tree pos
, constant
= 0, offset
= 0;
3787 rtx to_rtx
= target
;
3789 /* Just ignore missing fields.
3790 We cleared the whole structure, above,
3791 if any fields are missing. */
3795 if (cleared
&& is_zeros_p (TREE_VALUE (elt
)))
3798 bitsize
= TREE_INT_CST_LOW (DECL_SIZE (field
));
3799 unsignedp
= TREE_UNSIGNED (field
);
3800 mode
= DECL_MODE (field
);
3801 if (DECL_BIT_FIELD (field
))
3804 pos
= DECL_FIELD_BITPOS (field
);
3805 if (TREE_CODE (pos
) == INTEGER_CST
)
3807 else if (TREE_CODE (pos
) == PLUS_EXPR
3808 && TREE_CODE (TREE_OPERAND (pos
, 1)) == INTEGER_CST
)
3809 constant
= TREE_OPERAND (pos
, 1), offset
= TREE_OPERAND (pos
, 0);
3814 bitpos
= TREE_INT_CST_LOW (constant
);
3820 if (contains_placeholder_p (offset
))
3821 offset
= build (WITH_RECORD_EXPR
, sizetype
,
3822 offset
, make_tree (TREE_TYPE (exp
), target
));
3824 offset
= size_binop (FLOOR_DIV_EXPR
, offset
,
3825 size_int (BITS_PER_UNIT
));
3827 offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
3828 if (GET_CODE (to_rtx
) != MEM
)
3831 if (GET_MODE (offset_rtx
) != ptr_mode
)
3833 #ifdef POINTERS_EXTEND_UNSIGNED
3834 offset_rtx
= convert_memory_address (ptr_mode
, offset_rtx
);
3836 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
3841 = change_address (to_rtx
, VOIDmode
,
3842 gen_rtx_PLUS (ptr_mode
, XEXP (to_rtx
, 0),
3843 force_reg (ptr_mode
, offset_rtx
)));
3845 if (TREE_READONLY (field
))
3847 if (GET_CODE (to_rtx
) == MEM
)
3848 to_rtx
= copy_rtx (to_rtx
);
3850 RTX_UNCHANGING_P (to_rtx
) = 1;
3853 store_constructor_field (to_rtx
, bitsize
, bitpos
,
3854 mode
, TREE_VALUE (elt
), type
, cleared
);
3857 else if (TREE_CODE (type
) == ARRAY_TYPE
)
3862 tree domain
= TYPE_DOMAIN (type
);
3863 HOST_WIDE_INT minelt
= TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain
));
3864 HOST_WIDE_INT maxelt
= TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain
));
3865 tree elttype
= TREE_TYPE (type
);
3867 /* If the constructor has fewer elements than the array,
3868 clear the whole array first. Similarly if this is
3869 static constructor of a non-BLKmode object. */
3870 if (cleared
|| (GET_CODE (target
) == REG
&& TREE_STATIC (exp
)))
3874 HOST_WIDE_INT count
= 0, zero_count
= 0;
3876 /* This loop is a more accurate version of the loop in
3877 mostly_zeros_p (it handles RANGE_EXPR in an index).
3878 It is also needed to check for missing elements. */
3879 for (elt
= CONSTRUCTOR_ELTS (exp
);
3881 elt
= TREE_CHAIN (elt
))
3883 tree index
= TREE_PURPOSE (elt
);
3884 HOST_WIDE_INT this_node_count
;
3885 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
3887 tree lo_index
= TREE_OPERAND (index
, 0);
3888 tree hi_index
= TREE_OPERAND (index
, 1);
3889 if (TREE_CODE (lo_index
) != INTEGER_CST
3890 || TREE_CODE (hi_index
) != INTEGER_CST
)
3895 this_node_count
= TREE_INT_CST_LOW (hi_index
)
3896 - TREE_INT_CST_LOW (lo_index
) + 1;
3899 this_node_count
= 1;
3900 count
+= this_node_count
;
3901 if (mostly_zeros_p (TREE_VALUE (elt
)))
3902 zero_count
+= this_node_count
;
3904 /* Clear the entire array first if there are any missing elements,
3905 or if the incidence of zero elements is >= 75%. */
3906 if (count
< maxelt
- minelt
+ 1
3907 || 4 * zero_count
>= 3 * count
)
3913 clear_storage (target
, expr_size (exp
),
3914 TYPE_ALIGN (type
) / BITS_PER_UNIT
);
3918 /* Inform later passes that the old value is dead. */
3919 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
3921 /* Store each element of the constructor into
3922 the corresponding element of TARGET, determined
3923 by counting the elements. */
3924 for (elt
= CONSTRUCTOR_ELTS (exp
), i
= 0;
3926 elt
= TREE_CHAIN (elt
), i
++)
3928 register enum machine_mode mode
;
3932 tree value
= TREE_VALUE (elt
);
3933 tree index
= TREE_PURPOSE (elt
);
3934 rtx xtarget
= target
;
3936 if (cleared
&& is_zeros_p (value
))
3939 mode
= TYPE_MODE (elttype
);
3940 bitsize
= GET_MODE_BITSIZE (mode
);
3941 unsignedp
= TREE_UNSIGNED (elttype
);
3943 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
3945 tree lo_index
= TREE_OPERAND (index
, 0);
3946 tree hi_index
= TREE_OPERAND (index
, 1);
3947 rtx index_r
, pos_rtx
, addr
, hi_r
, loop_top
, loop_end
;
3948 struct nesting
*loop
;
3949 HOST_WIDE_INT lo
, hi
, count
;
3952 /* If the range is constant and "small", unroll the loop. */
3953 if (TREE_CODE (lo_index
) == INTEGER_CST
3954 && TREE_CODE (hi_index
) == INTEGER_CST
3955 && (lo
= TREE_INT_CST_LOW (lo_index
),
3956 hi
= TREE_INT_CST_LOW (hi_index
),
3957 count
= hi
- lo
+ 1,
3958 (GET_CODE (target
) != MEM
3960 || (TREE_CODE (TYPE_SIZE (elttype
)) == INTEGER_CST
3961 && TREE_INT_CST_LOW (TYPE_SIZE (elttype
)) * count
3964 lo
-= minelt
; hi
-= minelt
;
3965 for (; lo
<= hi
; lo
++)
3967 bitpos
= lo
* TREE_INT_CST_LOW (TYPE_SIZE (elttype
));
3968 store_constructor_field (target
, bitsize
, bitpos
,
3969 mode
, value
, type
, cleared
);
3974 hi_r
= expand_expr (hi_index
, NULL_RTX
, VOIDmode
, 0);
3975 loop_top
= gen_label_rtx ();
3976 loop_end
= gen_label_rtx ();
3978 unsignedp
= TREE_UNSIGNED (domain
);
3980 index
= build_decl (VAR_DECL
, NULL_TREE
, domain
);
3982 DECL_RTL (index
) = index_r
3983 = gen_reg_rtx (promote_mode (domain
, DECL_MODE (index
),
3986 if (TREE_CODE (value
) == SAVE_EXPR
3987 && SAVE_EXPR_RTL (value
) == 0)
3989 /* Make sure value gets expanded once before the
3991 expand_expr (value
, const0_rtx
, VOIDmode
, 0);
3994 store_expr (lo_index
, index_r
, 0);
3995 loop
= expand_start_loop (0);
3997 /* Assign value to element index. */
3998 position
= size_binop (EXACT_DIV_EXPR
, TYPE_SIZE (elttype
),
3999 size_int (BITS_PER_UNIT
));
4000 position
= size_binop (MULT_EXPR
,
4001 size_binop (MINUS_EXPR
, index
,
4002 TYPE_MIN_VALUE (domain
)),
4004 pos_rtx
= expand_expr (position
, 0, VOIDmode
, 0);
4005 addr
= gen_rtx_PLUS (Pmode
, XEXP (target
, 0), pos_rtx
);
4006 xtarget
= change_address (target
, mode
, addr
);
4007 if (TREE_CODE (value
) == CONSTRUCTOR
)
4008 store_constructor (value
, xtarget
, cleared
);
4010 store_expr (value
, xtarget
, 0);
4012 expand_exit_loop_if_false (loop
,
4013 build (LT_EXPR
, integer_type_node
,
4016 expand_increment (build (PREINCREMENT_EXPR
,
4018 index
, integer_one_node
), 0, 0);
4020 emit_label (loop_end
);
4022 /* Needed by stupid register allocation. to extend the
4023 lifetime of pseudo-regs used by target past the end
4025 emit_insn (gen_rtx_USE (GET_MODE (target
), target
));
4028 else if ((index
!= 0 && TREE_CODE (index
) != INTEGER_CST
)
4029 || TREE_CODE (TYPE_SIZE (elttype
)) != INTEGER_CST
)
4035 index
= size_int (i
);
4038 index
= size_binop (MINUS_EXPR
, index
,
4039 TYPE_MIN_VALUE (domain
));
4040 position
= size_binop (EXACT_DIV_EXPR
, TYPE_SIZE (elttype
),
4041 size_int (BITS_PER_UNIT
));
4042 position
= size_binop (MULT_EXPR
, index
, position
);
4043 pos_rtx
= expand_expr (position
, 0, VOIDmode
, 0);
4044 addr
= gen_rtx_PLUS (Pmode
, XEXP (target
, 0), pos_rtx
);
4045 xtarget
= change_address (target
, mode
, addr
);
4046 store_expr (value
, xtarget
, 0);
4051 bitpos
= ((TREE_INT_CST_LOW (index
) - minelt
)
4052 * TREE_INT_CST_LOW (TYPE_SIZE (elttype
)));
4054 bitpos
= (i
* TREE_INT_CST_LOW (TYPE_SIZE (elttype
)));
4055 store_constructor_field (target
, bitsize
, bitpos
,
4056 mode
, value
, type
, cleared
);
4060 /* set constructor assignments */
4061 else if (TREE_CODE (type
) == SET_TYPE
)
4063 tree elt
= CONSTRUCTOR_ELTS (exp
);
4064 int nbytes
= int_size_in_bytes (type
), nbits
;
4065 tree domain
= TYPE_DOMAIN (type
);
4066 tree domain_min
, domain_max
, bitlength
;
4068 /* The default implementation strategy is to extract the constant
4069 parts of the constructor, use that to initialize the target,
4070 and then "or" in whatever non-constant ranges we need in addition.
4072 If a large set is all zero or all ones, it is
4073 probably better to set it using memset (if available) or bzero.
4074 Also, if a large set has just a single range, it may also be
4075 better to first clear all the first clear the set (using
4076 bzero/memset), and set the bits we want. */
4078 /* Check for all zeros. */
4079 if (elt
== NULL_TREE
)
4082 clear_storage (target
, expr_size (exp
),
4083 TYPE_ALIGN (type
) / BITS_PER_UNIT
);
4087 domain_min
= convert (sizetype
, TYPE_MIN_VALUE (domain
));
4088 domain_max
= convert (sizetype
, TYPE_MAX_VALUE (domain
));
4089 bitlength
= size_binop (PLUS_EXPR
,
4090 size_binop (MINUS_EXPR
, domain_max
, domain_min
),
4093 if (nbytes
< 0 || TREE_CODE (bitlength
) != INTEGER_CST
)
4095 nbits
= TREE_INT_CST_LOW (bitlength
);
4097 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4098 are "complicated" (more than one range), initialize (the
4099 constant parts) by copying from a constant. */
4100 if (GET_MODE (target
) != BLKmode
|| nbits
<= 2 * BITS_PER_WORD
4101 || (nbytes
<= 32 && TREE_CHAIN (elt
) != NULL_TREE
))
4103 int set_word_size
= TYPE_ALIGN (TREE_TYPE (exp
));
4104 enum machine_mode mode
= mode_for_size (set_word_size
, MODE_INT
, 1);
4105 char *bit_buffer
= (char *) alloca (nbits
);
4106 HOST_WIDE_INT word
= 0;
4109 int offset
= 0; /* In bytes from beginning of set. */
4110 elt
= get_set_constructor_bits (exp
, bit_buffer
, nbits
);
4113 if (bit_buffer
[ibit
])
4115 if (BYTES_BIG_ENDIAN
)
4116 word
|= (1 << (set_word_size
- 1 - bit_pos
));
4118 word
|= 1 << bit_pos
;
4121 if (bit_pos
>= set_word_size
|| ibit
== nbits
)
4123 if (word
!= 0 || ! cleared
)
4125 rtx datum
= GEN_INT (word
);
4127 /* The assumption here is that it is safe to use
4128 XEXP if the set is multi-word, but not if
4129 it's single-word. */
4130 if (GET_CODE (target
) == MEM
)
4132 to_rtx
= plus_constant (XEXP (target
, 0), offset
);
4133 to_rtx
= change_address (target
, mode
, to_rtx
);
4135 else if (offset
== 0)
4139 emit_move_insn (to_rtx
, datum
);
4145 offset
+= set_word_size
/ BITS_PER_UNIT
;
4151 /* Don't bother clearing storage if the set is all ones. */
4152 if (TREE_CHAIN (elt
) != NULL_TREE
4153 || (TREE_PURPOSE (elt
) == NULL_TREE
4155 : (TREE_CODE (TREE_VALUE (elt
)) != INTEGER_CST
4156 || TREE_CODE (TREE_PURPOSE (elt
)) != INTEGER_CST
4157 || (TREE_INT_CST_LOW (TREE_VALUE (elt
))
4158 - TREE_INT_CST_LOW (TREE_PURPOSE (elt
)) + 1
4160 clear_storage (target
, expr_size (exp
),
4161 TYPE_ALIGN (type
) / BITS_PER_UNIT
);
4164 for (; elt
!= NULL_TREE
; elt
= TREE_CHAIN (elt
))
4166 /* start of range of element or NULL */
4167 tree startbit
= TREE_PURPOSE (elt
);
4168 /* end of range of element, or element value */
4169 tree endbit
= TREE_VALUE (elt
);
4170 #ifdef TARGET_MEM_FUNCTIONS
4171 HOST_WIDE_INT startb
, endb
;
4173 rtx bitlength_rtx
, startbit_rtx
, endbit_rtx
, targetx
;
4175 bitlength_rtx
= expand_expr (bitlength
,
4176 NULL_RTX
, MEM
, EXPAND_CONST_ADDRESS
);
4178 /* handle non-range tuple element like [ expr ] */
4179 if (startbit
== NULL_TREE
)
4181 startbit
= save_expr (endbit
);
4184 startbit
= convert (sizetype
, startbit
);
4185 endbit
= convert (sizetype
, endbit
);
4186 if (! integer_zerop (domain_min
))
4188 startbit
= size_binop (MINUS_EXPR
, startbit
, domain_min
);
4189 endbit
= size_binop (MINUS_EXPR
, endbit
, domain_min
);
4191 startbit_rtx
= expand_expr (startbit
, NULL_RTX
, MEM
,
4192 EXPAND_CONST_ADDRESS
);
4193 endbit_rtx
= expand_expr (endbit
, NULL_RTX
, MEM
,
4194 EXPAND_CONST_ADDRESS
);
4198 targetx
= assign_stack_temp (GET_MODE (target
),
4199 GET_MODE_SIZE (GET_MODE (target
)),
4201 emit_move_insn (targetx
, target
);
4203 else if (GET_CODE (target
) == MEM
)
4208 #ifdef TARGET_MEM_FUNCTIONS
4209 /* Optimization: If startbit and endbit are
4210 constants divisible by BITS_PER_UNIT,
4211 call memset instead. */
4212 if (TREE_CODE (startbit
) == INTEGER_CST
4213 && TREE_CODE (endbit
) == INTEGER_CST
4214 && (startb
= TREE_INT_CST_LOW (startbit
)) % BITS_PER_UNIT
== 0
4215 && (endb
= TREE_INT_CST_LOW (endbit
) + 1) % BITS_PER_UNIT
== 0)
4217 emit_library_call (memset_libfunc
, 0,
4219 plus_constant (XEXP (targetx
, 0),
4220 startb
/ BITS_PER_UNIT
),
4222 constm1_rtx
, TYPE_MODE (integer_type_node
),
4223 GEN_INT ((endb
- startb
) / BITS_PER_UNIT
),
4224 TYPE_MODE (sizetype
));
4229 emit_library_call (gen_rtx_SYMBOL_REF (Pmode
, "__setbits"),
4230 0, VOIDmode
, 4, XEXP (targetx
, 0), Pmode
,
4231 bitlength_rtx
, TYPE_MODE (sizetype
),
4232 startbit_rtx
, TYPE_MODE (sizetype
),
4233 endbit_rtx
, TYPE_MODE (sizetype
));
4236 emit_move_insn (target
, targetx
);
4244 /* Store the value of EXP (an expression tree)
4245 into a subfield of TARGET which has mode MODE and occupies
4246 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4247 If MODE is VOIDmode, it means that we are storing into a bit-field.
4249 If VALUE_MODE is VOIDmode, return nothing in particular.
4250 UNSIGNEDP is not used in this case.
4252 Otherwise, return an rtx for the value stored. This rtx
4253 has mode VALUE_MODE if that is convenient to do.
4254 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4256 ALIGN is the alignment that TARGET is known to have, measured in bytes.
4257 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
4260 store_field (target
, bitsize
, bitpos
, mode
, exp
, value_mode
,
4261 unsignedp
, align
, total_size
)
4263 int bitsize
, bitpos
;
4264 enum machine_mode mode
;
4266 enum machine_mode value_mode
;
4271 HOST_WIDE_INT width_mask
= 0;
4273 if (TREE_CODE (exp
) == ERROR_MARK
)
4276 if (bitsize
< HOST_BITS_PER_WIDE_INT
)
4277 width_mask
= ((HOST_WIDE_INT
) 1 << bitsize
) - 1;
4279 /* If we are storing into an unaligned field of an aligned union that is
4280 in a register, we may have the mode of TARGET being an integer mode but
4281 MODE == BLKmode. In that case, get an aligned object whose size and
4282 alignment are the same as TARGET and store TARGET into it (we can avoid
4283 the store if the field being stored is the entire width of TARGET). Then
4284 call ourselves recursively to store the field into a BLKmode version of
4285 that object. Finally, load from the object into TARGET. This is not
4286 very efficient in general, but should only be slightly more expensive
4287 than the otherwise-required unaligned accesses. Perhaps this can be
4288 cleaned up later. */
4291 && (GET_CODE (target
) == REG
|| GET_CODE (target
) == SUBREG
))
4293 rtx object
= assign_stack_temp (GET_MODE (target
),
4294 GET_MODE_SIZE (GET_MODE (target
)), 0);
4295 rtx blk_object
= copy_rtx (object
);
4297 MEM_IN_STRUCT_P (object
) = 1;
4298 MEM_IN_STRUCT_P (blk_object
) = 1;
4299 PUT_MODE (blk_object
, BLKmode
);
4301 if (bitsize
!= GET_MODE_BITSIZE (GET_MODE (target
)))
4302 emit_move_insn (object
, target
);
4304 store_field (blk_object
, bitsize
, bitpos
, mode
, exp
, VOIDmode
, 0,
4307 /* Even though we aren't returning target, we need to
4308 give it the updated value. */
4309 emit_move_insn (target
, object
);
4314 /* If the structure is in a register or if the component
4315 is a bit field, we cannot use addressing to access it.
4316 Use bit-field techniques or SUBREG to store in it. */
4318 if (mode
== VOIDmode
4319 || (mode
!= BLKmode
&& ! direct_store
[(int) mode
])
4320 || GET_CODE (target
) == REG
4321 || GET_CODE (target
) == SUBREG
4322 /* If the field isn't aligned enough to store as an ordinary memref,
4323 store it as a bit field. */
4324 || (SLOW_UNALIGNED_ACCESS
4325 && align
* BITS_PER_UNIT
< GET_MODE_ALIGNMENT (mode
))
4326 || (SLOW_UNALIGNED_ACCESS
&& bitpos
% GET_MODE_ALIGNMENT (mode
) != 0))
4328 rtx temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
4330 /* If BITSIZE is narrower than the size of the type of EXP
4331 we will be narrowing TEMP. Normally, what's wanted are the
4332 low-order bits. However, if EXP's type is a record and this is
4333 big-endian machine, we want the upper BITSIZE bits. */
4334 if (BYTES_BIG_ENDIAN
&& GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
4335 && bitsize
< GET_MODE_BITSIZE (GET_MODE (temp
))
4336 && TREE_CODE (TREE_TYPE (exp
)) == RECORD_TYPE
)
4337 temp
= expand_shift (RSHIFT_EXPR
, GET_MODE (temp
), temp
,
4338 size_int (GET_MODE_BITSIZE (GET_MODE (temp
))
4342 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4344 if (mode
!= VOIDmode
&& mode
!= BLKmode
4345 && mode
!= TYPE_MODE (TREE_TYPE (exp
)))
4346 temp
= convert_modes (mode
, TYPE_MODE (TREE_TYPE (exp
)), temp
, 1);
4348 /* If the modes of TARGET and TEMP are both BLKmode, both
4349 must be in memory and BITPOS must be aligned on a byte
4350 boundary. If so, we simply do a block copy. */
4351 if (GET_MODE (target
) == BLKmode
&& GET_MODE (temp
) == BLKmode
)
4353 if (GET_CODE (target
) != MEM
|| GET_CODE (temp
) != MEM
4354 || bitpos
% BITS_PER_UNIT
!= 0)
4357 target
= change_address (target
, VOIDmode
,
4358 plus_constant (XEXP (target
, 0),
4359 bitpos
/ BITS_PER_UNIT
));
4361 emit_block_move (target
, temp
,
4362 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
4366 return value_mode
== VOIDmode
? const0_rtx
: target
;
4369 /* Store the value in the bitfield. */
4370 store_bit_field (target
, bitsize
, bitpos
, mode
, temp
, align
, total_size
);
4371 if (value_mode
!= VOIDmode
)
4373 /* The caller wants an rtx for the value. */
4374 /* If possible, avoid refetching from the bitfield itself. */
4376 && ! (GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
)))
4379 enum machine_mode tmode
;
4382 return expand_and (temp
, GEN_INT (width_mask
), NULL_RTX
);
4383 tmode
= GET_MODE (temp
);
4384 if (tmode
== VOIDmode
)
4386 count
= build_int_2 (GET_MODE_BITSIZE (tmode
) - bitsize
, 0);
4387 temp
= expand_shift (LSHIFT_EXPR
, tmode
, temp
, count
, 0, 0);
4388 return expand_shift (RSHIFT_EXPR
, tmode
, temp
, count
, 0, 0);
4390 return extract_bit_field (target
, bitsize
, bitpos
, unsignedp
,
4391 NULL_RTX
, value_mode
, 0, align
,
4398 rtx addr
= XEXP (target
, 0);
4401 /* If a value is wanted, it must be the lhs;
4402 so make the address stable for multiple use. */
4404 if (value_mode
!= VOIDmode
&& GET_CODE (addr
) != REG
4405 && ! CONSTANT_ADDRESS_P (addr
)
4406 /* A frame-pointer reference is already stable. */
4407 && ! (GET_CODE (addr
) == PLUS
4408 && GET_CODE (XEXP (addr
, 1)) == CONST_INT
4409 && (XEXP (addr
, 0) == virtual_incoming_args_rtx
4410 || XEXP (addr
, 0) == virtual_stack_vars_rtx
)))
4411 addr
= copy_to_reg (addr
);
4413 /* Now build a reference to just the desired component. */
4415 to_rtx
= copy_rtx (change_address (target
, mode
,
4416 plus_constant (addr
,
4418 / BITS_PER_UNIT
))));
4419 MEM_IN_STRUCT_P (to_rtx
) = 1;
4421 return store_expr (exp
, to_rtx
, value_mode
!= VOIDmode
);
4425 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4426 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
4427 ARRAY_REFs and find the ultimate containing object, which we return.
4429 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4430 bit position, and *PUNSIGNEDP to the signedness of the field.
4431 If the position of the field is variable, we store a tree
4432 giving the variable offset (in units) in *POFFSET.
4433 This offset is in addition to the bit position.
4434 If the position is not variable, we store 0 in *POFFSET.
4435 We set *PALIGNMENT to the alignment in bytes of the address that will be
4436 computed. This is the alignment of the thing we return if *POFFSET
4437 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
4439 If any of the extraction expressions is volatile,
4440 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4442 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4443 is a mode that can be used to access the field. In that case, *PBITSIZE
4446 If the field describes a variable-sized object, *PMODE is set to
4447 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
4448 this case, but the address of the object can be found. */
4451 get_inner_reference (exp
, pbitsize
, pbitpos
, poffset
, pmode
,
4452 punsignedp
, pvolatilep
, palignment
)
4457 enum machine_mode
*pmode
;
4462 tree orig_exp
= exp
;
4464 enum machine_mode mode
= VOIDmode
;
4465 tree offset
= integer_zero_node
;
4466 int alignment
= BIGGEST_ALIGNMENT
;
4468 if (TREE_CODE (exp
) == COMPONENT_REF
)
4470 size_tree
= DECL_SIZE (TREE_OPERAND (exp
, 1));
4471 if (! DECL_BIT_FIELD (TREE_OPERAND (exp
, 1)))
4472 mode
= DECL_MODE (TREE_OPERAND (exp
, 1));
4473 *punsignedp
= TREE_UNSIGNED (TREE_OPERAND (exp
, 1));
4475 else if (TREE_CODE (exp
) == BIT_FIELD_REF
)
4477 size_tree
= TREE_OPERAND (exp
, 1);
4478 *punsignedp
= TREE_UNSIGNED (exp
);
4482 mode
= TYPE_MODE (TREE_TYPE (exp
));
4483 *pbitsize
= GET_MODE_BITSIZE (mode
);
4484 *punsignedp
= TREE_UNSIGNED (TREE_TYPE (exp
));
4489 if (TREE_CODE (size_tree
) != INTEGER_CST
)
4490 mode
= BLKmode
, *pbitsize
= -1;
4492 *pbitsize
= TREE_INT_CST_LOW (size_tree
);
4495 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4496 and find the ultimate containing object. */
4502 if (TREE_CODE (exp
) == COMPONENT_REF
|| TREE_CODE (exp
) == BIT_FIELD_REF
)
4504 tree pos
= (TREE_CODE (exp
) == COMPONENT_REF
4505 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp
, 1))
4506 : TREE_OPERAND (exp
, 2));
4507 tree constant
= integer_zero_node
, var
= pos
;
4509 /* If this field hasn't been filled in yet, don't go
4510 past it. This should only happen when folding expressions
4511 made during type construction. */
4515 /* Assume here that the offset is a multiple of a unit.
4516 If not, there should be an explicitly added constant. */
4517 if (TREE_CODE (pos
) == PLUS_EXPR
4518 && TREE_CODE (TREE_OPERAND (pos
, 1)) == INTEGER_CST
)
4519 constant
= TREE_OPERAND (pos
, 1), var
= TREE_OPERAND (pos
, 0);
4520 else if (TREE_CODE (pos
) == INTEGER_CST
)
4521 constant
= pos
, var
= integer_zero_node
;
4523 *pbitpos
+= TREE_INT_CST_LOW (constant
);
4524 offset
= size_binop (PLUS_EXPR
, offset
,
4525 size_binop (EXACT_DIV_EXPR
, var
,
4526 size_int (BITS_PER_UNIT
)));
4529 else if (TREE_CODE (exp
) == ARRAY_REF
)
4531 /* This code is based on the code in case ARRAY_REF in expand_expr
4532 below. We assume here that the size of an array element is
4533 always an integral multiple of BITS_PER_UNIT. */
4535 tree index
= TREE_OPERAND (exp
, 1);
4536 tree domain
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp
, 0)));
4538 = domain
? TYPE_MIN_VALUE (domain
) : integer_zero_node
;
4539 tree index_type
= TREE_TYPE (index
);
4542 if (TYPE_PRECISION (index_type
) != TYPE_PRECISION (sizetype
))
4544 index
= convert (type_for_size (TYPE_PRECISION (sizetype
), 0),
4546 index_type
= TREE_TYPE (index
);
4549 if (! integer_zerop (low_bound
))
4550 index
= fold (build (MINUS_EXPR
, index_type
, index
, low_bound
));
4552 if (TREE_CODE (index
) == INTEGER_CST
)
4554 index
= convert (sbitsizetype
, index
);
4555 index_type
= TREE_TYPE (index
);
4558 xindex
= fold (build (MULT_EXPR
, sbitsizetype
, index
,
4559 convert (sbitsizetype
,
4560 TYPE_SIZE (TREE_TYPE (exp
)))));
4562 if (TREE_CODE (xindex
) == INTEGER_CST
4563 && TREE_INT_CST_HIGH (xindex
) == 0)
4564 *pbitpos
+= TREE_INT_CST_LOW (xindex
);
4567 /* Either the bit offset calculated above is not constant, or
4568 it overflowed. In either case, redo the multiplication
4569 against the size in units. This is especially important
4570 in the non-constant case to avoid a division at runtime. */
4571 xindex
= fold (build (MULT_EXPR
, ssizetype
, index
,
4573 TYPE_SIZE_UNIT (TREE_TYPE (exp
)))));
4575 if (contains_placeholder_p (xindex
))
4576 xindex
= build (WITH_RECORD_EXPR
, sizetype
, xindex
, exp
);
4578 offset
= size_binop (PLUS_EXPR
, offset
, xindex
);
4581 else if (TREE_CODE (exp
) != NON_LVALUE_EXPR
4582 && ! ((TREE_CODE (exp
) == NOP_EXPR
4583 || TREE_CODE (exp
) == CONVERT_EXPR
)
4584 && ! (TREE_CODE (TREE_TYPE (exp
)) == UNION_TYPE
4585 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))
4587 && (TYPE_MODE (TREE_TYPE (exp
))
4588 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))))
4591 /* If any reference in the chain is volatile, the effect is volatile. */
4592 if (TREE_THIS_VOLATILE (exp
))
4595 /* If the offset is non-constant already, then we can't assume any
4596 alignment more than the alignment here. */
4597 if (! integer_zerop (offset
))
4598 alignment
= MIN (alignment
, TYPE_ALIGN (TREE_TYPE (exp
)));
4600 exp
= TREE_OPERAND (exp
, 0);
4603 if (TREE_CODE_CLASS (TREE_CODE (exp
)) == 'd')
4604 alignment
= MIN (alignment
, DECL_ALIGN (exp
));
4605 else if (TREE_TYPE (exp
) != 0)
4606 alignment
= MIN (alignment
, TYPE_ALIGN (TREE_TYPE (exp
)));
4608 if (integer_zerop (offset
))
4611 if (offset
!= 0 && contains_placeholder_p (offset
))
4612 offset
= build (WITH_RECORD_EXPR
, sizetype
, offset
, orig_exp
);
4616 *palignment
= alignment
/ BITS_PER_UNIT
;
4620 /* Subroutine of expand_exp: compute memory_usage from modifier. */
4621 static enum memory_use_mode
4622 get_memory_usage_from_modifier (modifier
)
4623 enum expand_modifier modifier
;
4629 return MEMORY_USE_RO
;
4631 case EXPAND_MEMORY_USE_WO
:
4632 return MEMORY_USE_WO
;
4634 case EXPAND_MEMORY_USE_RW
:
4635 return MEMORY_USE_RW
;
4637 case EXPAND_MEMORY_USE_DONT
:
4638 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
4639 MEMORY_USE_DONT, because they are modifiers to a call of
4640 expand_expr in the ADDR_EXPR case of expand_expr. */
4641 case EXPAND_CONST_ADDRESS
:
4642 case EXPAND_INITIALIZER
:
4643 return MEMORY_USE_DONT
;
4644 case EXPAND_MEMORY_USE_BAD
:
4650 /* Given an rtx VALUE that may contain additions and multiplications,
4651 return an equivalent value that just refers to a register or memory.
4652 This is done by generating instructions to perform the arithmetic
4653 and returning a pseudo-register containing the value.
4655 The returned value may be a REG, SUBREG, MEM or constant. */
4658 force_operand (value
, target
)
4661 register optab binoptab
= 0;
4662 /* Use a temporary to force order of execution of calls to
4666 /* Use subtarget as the target for operand 0 of a binary operation. */
4667 register rtx subtarget
= (target
!= 0 && GET_CODE (target
) == REG
? target
: 0);
4669 /* Check for a PIC address load. */
4671 && (GET_CODE (value
) == PLUS
|| GET_CODE (value
) == MINUS
)
4672 && XEXP (value
, 0) == pic_offset_table_rtx
4673 && (GET_CODE (XEXP (value
, 1)) == SYMBOL_REF
4674 || GET_CODE (XEXP (value
, 1)) == LABEL_REF
4675 || GET_CODE (XEXP (value
, 1)) == CONST
))
4678 subtarget
= gen_reg_rtx (GET_MODE (value
));
4679 emit_move_insn (subtarget
, value
);
4683 if (GET_CODE (value
) == PLUS
)
4684 binoptab
= add_optab
;
4685 else if (GET_CODE (value
) == MINUS
)
4686 binoptab
= sub_optab
;
4687 else if (GET_CODE (value
) == MULT
)
4689 op2
= XEXP (value
, 1);
4690 if (!CONSTANT_P (op2
)
4691 && !(GET_CODE (op2
) == REG
&& op2
!= subtarget
))
4693 tmp
= force_operand (XEXP (value
, 0), subtarget
);
4694 return expand_mult (GET_MODE (value
), tmp
,
4695 force_operand (op2
, NULL_RTX
),
4701 op2
= XEXP (value
, 1);
4702 if (!CONSTANT_P (op2
)
4703 && !(GET_CODE (op2
) == REG
&& op2
!= subtarget
))
4705 if (binoptab
== sub_optab
&& GET_CODE (op2
) == CONST_INT
)
4707 binoptab
= add_optab
;
4708 op2
= negate_rtx (GET_MODE (value
), op2
);
4711 /* Check for an addition with OP2 a constant integer and our first
4712 operand a PLUS of a virtual register and something else. In that
4713 case, we want to emit the sum of the virtual register and the
4714 constant first and then add the other value. This allows virtual
4715 register instantiation to simply modify the constant rather than
4716 creating another one around this addition. */
4717 if (binoptab
== add_optab
&& GET_CODE (op2
) == CONST_INT
4718 && GET_CODE (XEXP (value
, 0)) == PLUS
4719 && GET_CODE (XEXP (XEXP (value
, 0), 0)) == REG
4720 && REGNO (XEXP (XEXP (value
, 0), 0)) >= FIRST_VIRTUAL_REGISTER
4721 && REGNO (XEXP (XEXP (value
, 0), 0)) <= LAST_VIRTUAL_REGISTER
)
4723 rtx temp
= expand_binop (GET_MODE (value
), binoptab
,
4724 XEXP (XEXP (value
, 0), 0), op2
,
4725 subtarget
, 0, OPTAB_LIB_WIDEN
);
4726 return expand_binop (GET_MODE (value
), binoptab
, temp
,
4727 force_operand (XEXP (XEXP (value
, 0), 1), 0),
4728 target
, 0, OPTAB_LIB_WIDEN
);
4731 tmp
= force_operand (XEXP (value
, 0), subtarget
);
4732 return expand_binop (GET_MODE (value
), binoptab
, tmp
,
4733 force_operand (op2
, NULL_RTX
),
4734 target
, 0, OPTAB_LIB_WIDEN
);
4735 /* We give UNSIGNEDP = 0 to expand_binop
4736 because the only operations we are expanding here are signed ones. */
4741 /* Subroutine of expand_expr:
4742 save the non-copied parts (LIST) of an expr (LHS), and return a list
4743 which can restore these values to their previous values,
4744 should something modify their storage. */
4747 save_noncopied_parts (lhs
, list
)
4754 for (tail
= list
; tail
; tail
= TREE_CHAIN (tail
))
4755 if (TREE_CODE (TREE_VALUE (tail
)) == TREE_LIST
)
4756 parts
= chainon (parts
, save_noncopied_parts (lhs
, TREE_VALUE (tail
)));
4759 tree part
= TREE_VALUE (tail
);
4760 tree part_type
= TREE_TYPE (part
);
4761 tree to_be_saved
= build (COMPONENT_REF
, part_type
, lhs
, part
);
4762 rtx target
= assign_temp (part_type
, 0, 1, 1);
4763 if (! memory_address_p (TYPE_MODE (part_type
), XEXP (target
, 0)))
4764 target
= change_address (target
, TYPE_MODE (part_type
), NULL_RTX
);
4765 parts
= tree_cons (to_be_saved
,
4766 build (RTL_EXPR
, part_type
, NULL_TREE
,
4769 store_expr (TREE_PURPOSE (parts
), RTL_EXPR_RTL (TREE_VALUE (parts
)), 0);
4774 /* Subroutine of expand_expr:
4775 record the non-copied parts (LIST) of an expr (LHS), and return a list
4776 which specifies the initial values of these parts. */
4779 init_noncopied_parts (lhs
, list
)
4786 for (tail
= list
; tail
; tail
= TREE_CHAIN (tail
))
4787 if (TREE_CODE (TREE_VALUE (tail
)) == TREE_LIST
)
4788 parts
= chainon (parts
, init_noncopied_parts (lhs
, TREE_VALUE (tail
)));
4791 tree part
= TREE_VALUE (tail
);
4792 tree part_type
= TREE_TYPE (part
);
4793 tree to_be_initialized
= build (COMPONENT_REF
, part_type
, lhs
, part
);
4794 parts
= tree_cons (TREE_PURPOSE (tail
), to_be_initialized
, parts
);
4799 /* Subroutine of expand_expr: return nonzero iff there is no way that
4800 EXP can reference X, which is being modified. TOP_P is nonzero if this
4801 call is going to be used to determine whether we need a temporary
4802 for EXP, as opposed to a recursive call to this function.
4804 It is always safe for this routine to return zero since it merely
4805 searches for optimization opportunities. */
4808 safe_from_p (x
, exp
, top_p
)
4815 static int save_expr_count
;
4816 static int save_expr_size
= 0;
4817 static tree
*save_expr_rewritten
;
4818 static tree save_expr_trees
[256];
4821 /* If EXP has varying size, we MUST use a target since we currently
4822 have no way of allocating temporaries of variable size
4823 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
4824 So we assume here that something at a higher level has prevented a
4825 clash. This is somewhat bogus, but the best we can do. Only
4826 do this when X is BLKmode and when we are at the top level. */
4827 || (top_p
&& TREE_TYPE (exp
) != 0 && TYPE_SIZE (TREE_TYPE (exp
)) != 0
4828 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
4829 && (TREE_CODE (TREE_TYPE (exp
)) != ARRAY_TYPE
4830 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)) == NULL_TREE
4831 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)))
4833 && GET_MODE (x
) == BLKmode
))
4836 if (top_p
&& save_expr_size
== 0)
4840 save_expr_count
= 0;
4841 save_expr_size
= sizeof (save_expr_trees
) / sizeof (save_expr_trees
[0]);
4842 save_expr_rewritten
= &save_expr_trees
[0];
4844 rtn
= safe_from_p (x
, exp
, 1);
4846 for (i
= 0; i
< save_expr_count
; ++i
)
4848 if (TREE_CODE (save_expr_trees
[i
]) != ERROR_MARK
)
4850 TREE_SET_CODE (save_expr_trees
[i
], SAVE_EXPR
);
4858 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
4859 find the underlying pseudo. */
4860 if (GET_CODE (x
) == SUBREG
)
4863 if (GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
4867 /* If X is a location in the outgoing argument area, it is always safe. */
4868 if (GET_CODE (x
) == MEM
4869 && (XEXP (x
, 0) == virtual_outgoing_args_rtx
4870 || (GET_CODE (XEXP (x
, 0)) == PLUS
4871 && XEXP (XEXP (x
, 0), 0) == virtual_outgoing_args_rtx
)))
4874 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
4877 exp_rtl
= DECL_RTL (exp
);
4884 if (TREE_CODE (exp
) == TREE_LIST
)
4885 return ((TREE_VALUE (exp
) == 0
4886 || safe_from_p (x
, TREE_VALUE (exp
), 0))
4887 && (TREE_CHAIN (exp
) == 0
4888 || safe_from_p (x
, TREE_CHAIN (exp
), 0)));
4889 else if (TREE_CODE (exp
) == ERROR_MARK
)
4890 return 1; /* An already-visited SAVE_EXPR? */
4895 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
4899 return (safe_from_p (x
, TREE_OPERAND (exp
, 0), 0)
4900 && safe_from_p (x
, TREE_OPERAND (exp
, 1), 0));
4904 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
4905 the expression. If it is set, we conflict iff we are that rtx or
4906 both are in memory. Otherwise, we check all operands of the
4907 expression recursively. */
4909 switch (TREE_CODE (exp
))
4912 return (staticp (TREE_OPERAND (exp
, 0))
4913 || safe_from_p (x
, TREE_OPERAND (exp
, 0), 0)
4914 || TREE_STATIC (exp
));
4917 if (GET_CODE (x
) == MEM
)
4922 exp_rtl
= CALL_EXPR_RTL (exp
);
4925 /* Assume that the call will clobber all hard registers and
4927 if ((GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
4928 || GET_CODE (x
) == MEM
)
4935 /* If a sequence exists, we would have to scan every instruction
4936 in the sequence to see if it was safe. This is probably not
4938 if (RTL_EXPR_SEQUENCE (exp
))
4941 exp_rtl
= RTL_EXPR_RTL (exp
);
4944 case WITH_CLEANUP_EXPR
:
4945 exp_rtl
= RTL_EXPR_RTL (exp
);
4948 case CLEANUP_POINT_EXPR
:
4949 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
4952 exp_rtl
= SAVE_EXPR_RTL (exp
);
4956 /* This SAVE_EXPR might appear many times in the top-level
4957 safe_from_p() expression, and if it has a complex
4958 subexpression, examining it multiple times could result
4959 in a combinatorial explosion. E.g. on an Alpha
4960 running at least 200MHz, a Fortran test case compiled with
4961 optimization took about 28 minutes to compile -- even though
4962 it was only a few lines long, and the complicated line causing
4963 so much time to be spent in the earlier version of safe_from_p()
4964 had only 293 or so unique nodes.
4966 So, turn this SAVE_EXPR into an ERROR_MARK for now, but remember
4967 where it is so we can turn it back in the top-level safe_from_p()
4970 /* For now, don't bother re-sizing the array. */
4971 if (save_expr_count
>= save_expr_size
)
4973 save_expr_rewritten
[save_expr_count
++] = exp
;
4974 TREE_SET_CODE (exp
, ERROR_MARK
);
4976 nops
= tree_code_length
[(int) SAVE_EXPR
];
4977 for (i
= 0; i
< nops
; i
++)
4978 if (TREE_OPERAND (exp
, i
) != 0
4979 && ! safe_from_p (x
, TREE_OPERAND (exp
, i
), 0))
4984 /* The only operand we look at is operand 1. The rest aren't
4985 part of the expression. */
4986 return safe_from_p (x
, TREE_OPERAND (exp
, 1), 0);
4988 case METHOD_CALL_EXPR
:
4989 /* This takes a rtx argument, but shouldn't appear here. */
4996 /* If we have an rtx, we do not need to scan our operands. */
5000 nops
= tree_code_length
[(int) TREE_CODE (exp
)];
5001 for (i
= 0; i
< nops
; i
++)
5002 if (TREE_OPERAND (exp
, i
) != 0
5003 && ! safe_from_p (x
, TREE_OPERAND (exp
, i
), 0))
5007 /* If we have an rtl, find any enclosed object. Then see if we conflict
5011 if (GET_CODE (exp_rtl
) == SUBREG
)
5013 exp_rtl
= SUBREG_REG (exp_rtl
);
5014 if (GET_CODE (exp_rtl
) == REG
5015 && REGNO (exp_rtl
) < FIRST_PSEUDO_REGISTER
)
5019 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5020 are memory and EXP is not readonly. */
5021 return ! (rtx_equal_p (x
, exp_rtl
)
5022 || (GET_CODE (x
) == MEM
&& GET_CODE (exp_rtl
) == MEM
5023 && ! TREE_READONLY (exp
)));
5026 /* If we reach here, it is safe. */
5030 /* Subroutine of expand_expr: return nonzero iff EXP is an
5031 expression whose type is statically determinable. */
5037 if (TREE_CODE (exp
) == PARM_DECL
5038 || TREE_CODE (exp
) == VAR_DECL
5039 || TREE_CODE (exp
) == CALL_EXPR
|| TREE_CODE (exp
) == TARGET_EXPR
5040 || TREE_CODE (exp
) == COMPONENT_REF
5041 || TREE_CODE (exp
) == ARRAY_REF
)
5046 /* Subroutine of expand_expr: return rtx if EXP is a
5047 variable or parameter; else return 0. */
5054 switch (TREE_CODE (exp
))
5058 return DECL_RTL (exp
);
5064 #ifdef MAX_INTEGER_COMPUTATION_MODE
5066 check_max_integer_computation_mode (exp
)
5069 enum tree_code code
= TREE_CODE (exp
);
5070 enum machine_mode mode
;
5072 /* First check the type of the overall operation. We need only look at
5073 unary, binary and relational operations. */
5074 if (TREE_CODE_CLASS (code
) == '1'
5075 || TREE_CODE_CLASS (code
) == '2'
5076 || TREE_CODE_CLASS (code
) == '<')
5078 mode
= TYPE_MODE (TREE_TYPE (exp
));
5079 if (GET_MODE_CLASS (mode
) == MODE_INT
5080 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
5081 fatal ("unsupported wide integer operation");
5084 /* Check operand of a unary op. */
5085 if (TREE_CODE_CLASS (code
) == '1')
5087 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
5088 if (GET_MODE_CLASS (mode
) == MODE_INT
5089 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
5090 fatal ("unsupported wide integer operation");
5093 /* Check operands of a binary/comparison op. */
5094 if (TREE_CODE_CLASS (code
) == '2' || TREE_CODE_CLASS (code
) == '<')
5096 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
5097 if (GET_MODE_CLASS (mode
) == MODE_INT
5098 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
5099 fatal ("unsupported wide integer operation");
5101 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1)));
5102 if (GET_MODE_CLASS (mode
) == MODE_INT
5103 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
5104 fatal ("unsupported wide integer operation");
5110 /* expand_expr: generate code for computing expression EXP.
5111 An rtx for the computed value is returned. The value is never null.
5112 In the case of a void EXP, const0_rtx is returned.
5114 The value may be stored in TARGET if TARGET is nonzero.
5115 TARGET is just a suggestion; callers must assume that
5116 the rtx returned may not be the same as TARGET.
5118 If TARGET is CONST0_RTX, it means that the value will be ignored.
5120 If TMODE is not VOIDmode, it suggests generating the
5121 result in mode TMODE. But this is done only when convenient.
5122 Otherwise, TMODE is ignored and the value generated in its natural mode.
5123 TMODE is just a suggestion; callers must assume that
5124 the rtx returned may not have mode TMODE.
5126 Note that TARGET may have neither TMODE nor MODE. In that case, it
5127 probably will not be used.
5129 If MODIFIER is EXPAND_SUM then when EXP is an addition
5130 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5131 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5132 products as above, or REG or MEM, or constant.
5133 Ordinarily in such cases we would output mul or add instructions
5134 and then return a pseudo reg containing the sum.
5136 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5137 it also marks a label as absolutely required (it can't be dead).
5138 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
5139 This is used for outputting expressions used in initializers.
5141 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5142 with a constant address even if that address is not normally legitimate.
5143 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
5146 expand_expr (exp
, target
, tmode
, modifier
)
5149 enum machine_mode tmode
;
5150 enum expand_modifier modifier
;
5152 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
5153 This is static so it will be accessible to our recursive callees. */
5154 static tree placeholder_list
= 0;
5155 register rtx op0
, op1
, temp
;
5156 tree type
= TREE_TYPE (exp
);
5157 int unsignedp
= TREE_UNSIGNED (type
);
5158 register enum machine_mode mode
= TYPE_MODE (type
);
5159 register enum tree_code code
= TREE_CODE (exp
);
5161 /* Use subtarget as the target for operand 0 of a binary operation. */
5162 rtx subtarget
= (target
!= 0 && GET_CODE (target
) == REG
? target
: 0);
5163 rtx original_target
= target
;
5164 int ignore
= (target
== const0_rtx
5165 || ((code
== NON_LVALUE_EXPR
|| code
== NOP_EXPR
5166 || code
== CONVERT_EXPR
|| code
== REFERENCE_EXPR
5167 || code
== COND_EXPR
)
5168 && TREE_CODE (type
) == VOID_TYPE
));
5170 /* Used by check-memory-usage to make modifier read only. */
5171 enum expand_modifier ro_modifier
;
5173 /* Make a read-only version of the modifier. */
5174 if (modifier
== EXPAND_NORMAL
|| modifier
== EXPAND_SUM
5175 || modifier
== EXPAND_CONST_ADDRESS
|| modifier
== EXPAND_INITIALIZER
)
5176 ro_modifier
= modifier
;
5178 ro_modifier
= EXPAND_NORMAL
;
5180 /* Don't use hard regs as subtargets, because the combiner
5181 can only handle pseudo regs. */
5182 if (subtarget
&& REGNO (subtarget
) < FIRST_PSEUDO_REGISTER
)
5184 /* Avoid subtargets inside loops,
5185 since they hide some invariant expressions. */
5186 if (preserve_subexpressions_p ())
5189 /* If we are going to ignore this result, we need only do something
5190 if there is a side-effect somewhere in the expression. If there
5191 is, short-circuit the most common cases here. Note that we must
5192 not call expand_expr with anything but const0_rtx in case this
5193 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
5197 if (! TREE_SIDE_EFFECTS (exp
))
5200 /* Ensure we reference a volatile object even if value is ignored. */
5201 if (TREE_THIS_VOLATILE (exp
)
5202 && TREE_CODE (exp
) != FUNCTION_DECL
5203 && mode
!= VOIDmode
&& mode
!= BLKmode
)
5205 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, ro_modifier
);
5206 if (GET_CODE (temp
) == MEM
)
5207 temp
= copy_to_reg (temp
);
5211 if (TREE_CODE_CLASS (code
) == '1')
5212 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
,
5213 VOIDmode
, ro_modifier
);
5214 else if (TREE_CODE_CLASS (code
) == '2'
5215 || TREE_CODE_CLASS (code
) == '<')
5217 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, ro_modifier
);
5218 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
, ro_modifier
);
5221 else if ((code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
)
5222 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 1)))
5223 /* If the second operand has no side effects, just evaluate
5225 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
,
5226 VOIDmode
, ro_modifier
);
5231 #ifdef MAX_INTEGER_COMPUTATION_MODE
5234 enum machine_mode mode
= GET_MODE (target
);
5236 if (GET_MODE_CLASS (mode
) == MODE_INT
5237 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
5238 fatal ("unsupported wide integer operation");
5241 if (GET_MODE_CLASS (tmode
) == MODE_INT
5242 && tmode
> MAX_INTEGER_COMPUTATION_MODE
)
5243 fatal ("unsupported wide integer operation");
5245 check_max_integer_computation_mode (exp
);
5248 /* If will do cse, generate all results into pseudo registers
5249 since 1) that allows cse to find more things
5250 and 2) otherwise cse could produce an insn the machine
5253 if (! cse_not_expected
&& mode
!= BLKmode
&& target
5254 && (GET_CODE (target
) != REG
|| REGNO (target
) < FIRST_PSEUDO_REGISTER
))
5261 tree function
= decl_function_context (exp
);
5262 /* Handle using a label in a containing function. */
5263 if (function
!= current_function_decl
5264 && function
!= inline_function_decl
&& function
!= 0)
5266 struct function
*p
= find_function_data (function
);
5267 /* Allocate in the memory associated with the function
5268 that the label is in. */
5269 push_obstacks (p
->function_obstack
,
5270 p
->function_maybepermanent_obstack
);
5272 p
->forced_labels
= gen_rtx_EXPR_LIST (VOIDmode
,
5277 else if (modifier
== EXPAND_INITIALIZER
)
5278 forced_labels
= gen_rtx_EXPR_LIST (VOIDmode
,
5279 label_rtx (exp
), forced_labels
);
5280 temp
= gen_rtx_MEM (FUNCTION_MODE
,
5281 gen_rtx_LABEL_REF (Pmode
, label_rtx (exp
)));
5282 if (function
!= current_function_decl
5283 && function
!= inline_function_decl
&& function
!= 0)
5284 LABEL_REF_NONLOCAL_P (XEXP (temp
, 0)) = 1;
5289 if (DECL_RTL (exp
) == 0)
5291 error_with_decl (exp
, "prior parameter's size depends on `%s'");
5292 return CONST0_RTX (mode
);
5295 /* ... fall through ... */
5298 /* If a static var's type was incomplete when the decl was written,
5299 but the type is complete now, lay out the decl now. */
5300 if (DECL_SIZE (exp
) == 0 && TYPE_SIZE (TREE_TYPE (exp
)) != 0
5301 && (TREE_STATIC (exp
) || DECL_EXTERNAL (exp
)))
5303 push_obstacks_nochange ();
5304 end_temporary_allocation ();
5305 layout_decl (exp
, 0);
5306 PUT_MODE (DECL_RTL (exp
), DECL_MODE (exp
));
5310 /* Only check automatic variables. Currently, function arguments are
5311 not checked (this can be done at compile-time with prototypes).
5312 Aggregates are not checked. */
5313 if (flag_check_memory_usage
&& code
== VAR_DECL
5314 && GET_CODE (DECL_RTL (exp
)) == MEM
5315 && DECL_CONTEXT (exp
) != NULL_TREE
5316 && ! TREE_STATIC (exp
)
5317 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp
)))
5319 enum memory_use_mode memory_usage
;
5320 memory_usage
= get_memory_usage_from_modifier (modifier
);
5322 if (memory_usage
!= MEMORY_USE_DONT
)
5323 emit_library_call (chkr_check_addr_libfunc
, 1, VOIDmode
, 3,
5324 XEXP (DECL_RTL (exp
), 0), ptr_mode
,
5325 GEN_INT (int_size_in_bytes (type
)),
5326 TYPE_MODE (sizetype
),
5327 GEN_INT (memory_usage
),
5328 TYPE_MODE (integer_type_node
));
5331 /* ... fall through ... */
5335 if (DECL_RTL (exp
) == 0)
5338 /* Ensure variable marked as used even if it doesn't go through
5339 a parser. If it hasn't be used yet, write out an external
5341 if (! TREE_USED (exp
))
5343 assemble_external (exp
);
5344 TREE_USED (exp
) = 1;
5347 /* Show we haven't gotten RTL for this yet. */
5350 /* Handle variables inherited from containing functions. */
5351 context
= decl_function_context (exp
);
5353 /* We treat inline_function_decl as an alias for the current function
5354 because that is the inline function whose vars, types, etc.
5355 are being merged into the current function.
5356 See expand_inline_function. */
5358 if (context
!= 0 && context
!= current_function_decl
5359 && context
!= inline_function_decl
5360 /* If var is static, we don't need a static chain to access it. */
5361 && ! (GET_CODE (DECL_RTL (exp
)) == MEM
5362 && CONSTANT_P (XEXP (DECL_RTL (exp
), 0))))
5366 /* Mark as non-local and addressable. */
5367 DECL_NONLOCAL (exp
) = 1;
5368 if (DECL_NO_STATIC_CHAIN (current_function_decl
))
5370 mark_addressable (exp
);
5371 if (GET_CODE (DECL_RTL (exp
)) != MEM
)
5373 addr
= XEXP (DECL_RTL (exp
), 0);
5374 if (GET_CODE (addr
) == MEM
)
5375 addr
= gen_rtx_MEM (Pmode
,
5376 fix_lexical_addr (XEXP (addr
, 0), exp
));
5378 addr
= fix_lexical_addr (addr
, exp
);
5379 temp
= change_address (DECL_RTL (exp
), mode
, addr
);
5382 /* This is the case of an array whose size is to be determined
5383 from its initializer, while the initializer is still being parsed.
5386 else if (GET_CODE (DECL_RTL (exp
)) == MEM
5387 && GET_CODE (XEXP (DECL_RTL (exp
), 0)) == REG
)
5388 temp
= change_address (DECL_RTL (exp
), GET_MODE (DECL_RTL (exp
)),
5389 XEXP (DECL_RTL (exp
), 0));
5391 /* If DECL_RTL is memory, we are in the normal case and either
5392 the address is not valid or it is not a register and -fforce-addr
5393 is specified, get the address into a register. */
5395 else if (GET_CODE (DECL_RTL (exp
)) == MEM
5396 && modifier
!= EXPAND_CONST_ADDRESS
5397 && modifier
!= EXPAND_SUM
5398 && modifier
!= EXPAND_INITIALIZER
5399 && (! memory_address_p (DECL_MODE (exp
),
5400 XEXP (DECL_RTL (exp
), 0))
5402 && GET_CODE (XEXP (DECL_RTL (exp
), 0)) != REG
)))
5403 temp
= change_address (DECL_RTL (exp
), VOIDmode
,
5404 copy_rtx (XEXP (DECL_RTL (exp
), 0)));
5406 /* If we got something, return it. But first, set the alignment
5407 the address is a register. */
5410 if (GET_CODE (temp
) == MEM
&& GET_CODE (XEXP (temp
, 0)) == REG
)
5411 mark_reg_pointer (XEXP (temp
, 0),
5412 DECL_ALIGN (exp
) / BITS_PER_UNIT
);
5417 /* If the mode of DECL_RTL does not match that of the decl, it
5418 must be a promoted value. We return a SUBREG of the wanted mode,
5419 but mark it so that we know that it was already extended. */
5421 if (GET_CODE (DECL_RTL (exp
)) == REG
5422 && GET_MODE (DECL_RTL (exp
)) != mode
)
5424 /* Get the signedness used for this variable. Ensure we get the
5425 same mode we got when the variable was declared. */
5426 if (GET_MODE (DECL_RTL (exp
))
5427 != promote_mode (type
, DECL_MODE (exp
), &unsignedp
, 0))
5430 temp
= gen_rtx_SUBREG (mode
, DECL_RTL (exp
), 0);
5431 SUBREG_PROMOTED_VAR_P (temp
) = 1;
5432 SUBREG_PROMOTED_UNSIGNED_P (temp
) = unsignedp
;
5436 return DECL_RTL (exp
);
5439 return immed_double_const (TREE_INT_CST_LOW (exp
),
5440 TREE_INT_CST_HIGH (exp
),
5444 return expand_expr (DECL_INITIAL (exp
), target
, VOIDmode
,
5445 EXPAND_MEMORY_USE_BAD
);
5448 /* If optimized, generate immediate CONST_DOUBLE
5449 which will be turned into memory by reload if necessary.
5451 We used to force a register so that loop.c could see it. But
5452 this does not allow gen_* patterns to perform optimizations with
5453 the constants. It also produces two insns in cases like "x = 1.0;".
5454 On most machines, floating-point constants are not permitted in
5455 many insns, so we'd end up copying it to a register in any case.
5457 Now, we do the copying in expand_binop, if appropriate. */
5458 return immed_real_const (exp
);
5462 if (! TREE_CST_RTL (exp
))
5463 output_constant_def (exp
);
5465 /* TREE_CST_RTL probably contains a constant address.
5466 On RISC machines where a constant address isn't valid,
5467 make some insns to get that address into a register. */
5468 if (GET_CODE (TREE_CST_RTL (exp
)) == MEM
5469 && modifier
!= EXPAND_CONST_ADDRESS
5470 && modifier
!= EXPAND_INITIALIZER
5471 && modifier
!= EXPAND_SUM
5472 && (! memory_address_p (mode
, XEXP (TREE_CST_RTL (exp
), 0))
5474 && GET_CODE (XEXP (TREE_CST_RTL (exp
), 0)) != REG
)))
5475 return change_address (TREE_CST_RTL (exp
), VOIDmode
,
5476 copy_rtx (XEXP (TREE_CST_RTL (exp
), 0)));
5477 return TREE_CST_RTL (exp
);
5479 case EXPR_WITH_FILE_LOCATION
:
5482 char *saved_input_filename
= input_filename
;
5483 int saved_lineno
= lineno
;
5484 input_filename
= EXPR_WFL_FILENAME (exp
);
5485 lineno
= EXPR_WFL_LINENO (exp
);
5486 if (EXPR_WFL_EMIT_LINE_NOTE (exp
))
5487 emit_line_note (input_filename
, lineno
);
5488 /* Possibly avoid switching back and force here */
5489 to_return
= expand_expr (EXPR_WFL_NODE (exp
), target
, tmode
, modifier
);
5490 input_filename
= saved_input_filename
;
5491 lineno
= saved_lineno
;
5496 context
= decl_function_context (exp
);
5498 /* If this SAVE_EXPR was at global context, assume we are an
5499 initialization function and move it into our context. */
5501 SAVE_EXPR_CONTEXT (exp
) = current_function_decl
;
5503 /* We treat inline_function_decl as an alias for the current function
5504 because that is the inline function whose vars, types, etc.
5505 are being merged into the current function.
5506 See expand_inline_function. */
5507 if (context
== current_function_decl
|| context
== inline_function_decl
)
5510 /* If this is non-local, handle it. */
5513 /* The following call just exists to abort if the context is
5514 not of a containing function. */
5515 find_function_data (context
);
5517 temp
= SAVE_EXPR_RTL (exp
);
5518 if (temp
&& GET_CODE (temp
) == REG
)
5520 put_var_into_stack (exp
);
5521 temp
= SAVE_EXPR_RTL (exp
);
5523 if (temp
== 0 || GET_CODE (temp
) != MEM
)
5525 return change_address (temp
, mode
,
5526 fix_lexical_addr (XEXP (temp
, 0), exp
));
5528 if (SAVE_EXPR_RTL (exp
) == 0)
5530 if (mode
== VOIDmode
)
5533 temp
= assign_temp (type
, 3, 0, 0);
5535 SAVE_EXPR_RTL (exp
) = temp
;
5536 if (!optimize
&& GET_CODE (temp
) == REG
)
5537 save_expr_regs
= gen_rtx_EXPR_LIST (VOIDmode
, temp
,
5540 /* If the mode of TEMP does not match that of the expression, it
5541 must be a promoted value. We pass store_expr a SUBREG of the
5542 wanted mode but mark it so that we know that it was already
5543 extended. Note that `unsignedp' was modified above in
5546 if (GET_CODE (temp
) == REG
&& GET_MODE (temp
) != mode
)
5548 temp
= gen_rtx_SUBREG (mode
, SAVE_EXPR_RTL (exp
), 0);
5549 SUBREG_PROMOTED_VAR_P (temp
) = 1;
5550 SUBREG_PROMOTED_UNSIGNED_P (temp
) = unsignedp
;
5553 if (temp
== const0_rtx
)
5554 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
5555 EXPAND_MEMORY_USE_BAD
);
5557 store_expr (TREE_OPERAND (exp
, 0), temp
, 0);
5559 TREE_USED (exp
) = 1;
5562 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
5563 must be a promoted value. We return a SUBREG of the wanted mode,
5564 but mark it so that we know that it was already extended. */
5566 if (GET_CODE (SAVE_EXPR_RTL (exp
)) == REG
5567 && GET_MODE (SAVE_EXPR_RTL (exp
)) != mode
)
5569 /* Compute the signedness and make the proper SUBREG. */
5570 promote_mode (type
, mode
, &unsignedp
, 0);
5571 temp
= gen_rtx_SUBREG (mode
, SAVE_EXPR_RTL (exp
), 0);
5572 SUBREG_PROMOTED_VAR_P (temp
) = 1;
5573 SUBREG_PROMOTED_UNSIGNED_P (temp
) = unsignedp
;
5577 return SAVE_EXPR_RTL (exp
);
5582 temp
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
5583 TREE_OPERAND (exp
, 0) = unsave_expr_now (TREE_OPERAND (exp
, 0));
5587 case PLACEHOLDER_EXPR
:
5589 tree placeholder_expr
;
5591 /* If there is an object on the head of the placeholder list,
5592 see if some object in it of type TYPE or a pointer to it. For
5593 further information, see tree.def. */
5594 for (placeholder_expr
= placeholder_list
;
5595 placeholder_expr
!= 0;
5596 placeholder_expr
= TREE_CHAIN (placeholder_expr
))
5598 tree need_type
= TYPE_MAIN_VARIANT (type
);
5600 tree old_list
= placeholder_list
;
5603 /* Find the outermost reference that is of the type we want.
5604 If none, see if any object has a type that is a pointer to
5605 the type we want. */
5606 for (elt
= TREE_PURPOSE (placeholder_expr
);
5607 elt
!= 0 && object
== 0;
5609 = ((TREE_CODE (elt
) == COMPOUND_EXPR
5610 || TREE_CODE (elt
) == COND_EXPR
)
5611 ? TREE_OPERAND (elt
, 1)
5612 : (TREE_CODE_CLASS (TREE_CODE (elt
)) == 'r'
5613 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '1'
5614 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '2'
5615 || TREE_CODE_CLASS (TREE_CODE (elt
)) == 'e')
5616 ? TREE_OPERAND (elt
, 0) : 0))
5617 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt
)) == need_type
)
5620 for (elt
= TREE_PURPOSE (placeholder_expr
);
5621 elt
!= 0 && object
== 0;
5623 = ((TREE_CODE (elt
) == COMPOUND_EXPR
5624 || TREE_CODE (elt
) == COND_EXPR
)
5625 ? TREE_OPERAND (elt
, 1)
5626 : (TREE_CODE_CLASS (TREE_CODE (elt
)) == 'r'
5627 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '1'
5628 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '2'
5629 || TREE_CODE_CLASS (TREE_CODE (elt
)) == 'e')
5630 ? TREE_OPERAND (elt
, 0) : 0))
5631 if (POINTER_TYPE_P (TREE_TYPE (elt
))
5632 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt
)))
5634 object
= build1 (INDIRECT_REF
, need_type
, elt
);
5638 /* Expand this object skipping the list entries before
5639 it was found in case it is also a PLACEHOLDER_EXPR.
5640 In that case, we want to translate it using subsequent
5642 placeholder_list
= TREE_CHAIN (placeholder_expr
);
5643 temp
= expand_expr (object
, original_target
, tmode
,
5645 placeholder_list
= old_list
;
5651 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
5654 case WITH_RECORD_EXPR
:
5655 /* Put the object on the placeholder list, expand our first operand,
5656 and pop the list. */
5657 placeholder_list
= tree_cons (TREE_OPERAND (exp
, 1), NULL_TREE
,
5659 target
= expand_expr (TREE_OPERAND (exp
, 0), original_target
,
5660 tmode
, ro_modifier
);
5661 placeholder_list
= TREE_CHAIN (placeholder_list
);
5665 expand_exit_loop_if_false (NULL_PTR
,
5666 invert_truthvalue (TREE_OPERAND (exp
, 0)));
5671 expand_start_loop (1);
5672 expand_expr_stmt (TREE_OPERAND (exp
, 0));
5680 tree vars
= TREE_OPERAND (exp
, 0);
5681 int vars_need_expansion
= 0;
5683 /* Need to open a binding contour here because
5684 if there are any cleanups they must be contained here. */
5685 expand_start_bindings (0);
5687 /* Mark the corresponding BLOCK for output in its proper place. */
5688 if (TREE_OPERAND (exp
, 2) != 0
5689 && ! TREE_USED (TREE_OPERAND (exp
, 2)))
5690 insert_block (TREE_OPERAND (exp
, 2));
5692 /* If VARS have not yet been expanded, expand them now. */
5695 if (DECL_RTL (vars
) == 0)
5697 vars_need_expansion
= 1;
5700 expand_decl_init (vars
);
5701 vars
= TREE_CHAIN (vars
);
5704 temp
= expand_expr (TREE_OPERAND (exp
, 1), target
, tmode
, ro_modifier
);
5706 expand_end_bindings (TREE_OPERAND (exp
, 0), 0, 0);
5712 if (RTL_EXPR_SEQUENCE (exp
))
5714 if (RTL_EXPR_SEQUENCE (exp
) == const0_rtx
)
5716 emit_insns (RTL_EXPR_SEQUENCE (exp
));
5717 RTL_EXPR_SEQUENCE (exp
) = const0_rtx
;
5719 preserve_rtl_expr_result (RTL_EXPR_RTL (exp
));
5720 free_temps_for_rtl_expr (exp
);
5721 return RTL_EXPR_RTL (exp
);
5724 /* If we don't need the result, just ensure we evaluate any
5729 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
5730 expand_expr (TREE_VALUE (elt
), const0_rtx
, VOIDmode
,
5731 EXPAND_MEMORY_USE_BAD
);
5735 /* All elts simple constants => refer to a constant in memory. But
5736 if this is a non-BLKmode mode, let it store a field at a time
5737 since that should make a CONST_INT or CONST_DOUBLE when we
5738 fold. Likewise, if we have a target we can use, it is best to
5739 store directly into the target unless the type is large enough
5740 that memcpy will be used. If we are making an initializer and
5741 all operands are constant, put it in memory as well. */
5742 else if ((TREE_STATIC (exp
)
5743 && ((mode
== BLKmode
5744 && ! (target
!= 0 && safe_from_p (target
, exp
, 1)))
5745 || TREE_ADDRESSABLE (exp
)
5746 || (TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
5747 && (move_by_pieces_ninsns
5748 (TREE_INT_CST_LOW (TYPE_SIZE (type
))/BITS_PER_UNIT
,
5749 TYPE_ALIGN (type
) / BITS_PER_UNIT
)
5751 && ! mostly_zeros_p (exp
))))
5752 || (modifier
== EXPAND_INITIALIZER
&& TREE_CONSTANT (exp
)))
5754 rtx constructor
= output_constant_def (exp
);
5755 if (modifier
!= EXPAND_CONST_ADDRESS
5756 && modifier
!= EXPAND_INITIALIZER
5757 && modifier
!= EXPAND_SUM
5758 && (! memory_address_p (GET_MODE (constructor
),
5759 XEXP (constructor
, 0))
5761 && GET_CODE (XEXP (constructor
, 0)) != REG
)))
5762 constructor
= change_address (constructor
, VOIDmode
,
5763 XEXP (constructor
, 0));
5769 /* Handle calls that pass values in multiple non-contiguous
5770 locations. The Irix 6 ABI has examples of this. */
5771 if (target
== 0 || ! safe_from_p (target
, exp
, 1)
5772 || GET_CODE (target
) == PARALLEL
)
5774 if (mode
!= BLKmode
&& ! TREE_ADDRESSABLE (exp
))
5775 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
5777 target
= assign_temp (type
, 0, 1, 1);
5780 if (TREE_READONLY (exp
))
5782 if (GET_CODE (target
) == MEM
)
5783 target
= copy_rtx (target
);
5785 RTX_UNCHANGING_P (target
) = 1;
5788 store_constructor (exp
, target
, 0);
5794 tree exp1
= TREE_OPERAND (exp
, 0);
5797 tree string
= string_constant (exp1
, &index
);
5800 /* Try to optimize reads from const strings. */
5802 && TREE_CODE (string
) == STRING_CST
5803 && TREE_CODE (index
) == INTEGER_CST
5804 && !TREE_INT_CST_HIGH (index
)
5805 && (i
= TREE_INT_CST_LOW (index
)) < TREE_STRING_LENGTH (string
)
5806 && GET_MODE_CLASS (mode
) == MODE_INT
5807 && GET_MODE_SIZE (mode
) == 1
5808 && modifier
!= EXPAND_MEMORY_USE_WO
)
5809 return GEN_INT (TREE_STRING_POINTER (string
)[i
]);
5811 op0
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
5812 op0
= memory_address (mode
, op0
);
5814 if (flag_check_memory_usage
&& !AGGREGATE_TYPE_P (TREE_TYPE (exp
)))
5816 enum memory_use_mode memory_usage
;
5817 memory_usage
= get_memory_usage_from_modifier (modifier
);
5819 if (memory_usage
!= MEMORY_USE_DONT
)
5821 in_check_memory_usage
= 1;
5822 emit_library_call (chkr_check_addr_libfunc
, 1, VOIDmode
, 3,
5824 GEN_INT (int_size_in_bytes (type
)),
5825 TYPE_MODE (sizetype
),
5826 GEN_INT (memory_usage
),
5827 TYPE_MODE (integer_type_node
));
5828 in_check_memory_usage
= 0;
5832 temp
= gen_rtx_MEM (mode
, op0
);
5833 /* If address was computed by addition,
5834 mark this as an element of an aggregate. */
5835 if (TREE_CODE (exp1
) == PLUS_EXPR
5836 || (TREE_CODE (exp1
) == SAVE_EXPR
5837 && TREE_CODE (TREE_OPERAND (exp1
, 0)) == PLUS_EXPR
)
5838 || AGGREGATE_TYPE_P (TREE_TYPE (exp
))
5839 /* If the pointer is actually a REFERENCE_TYPE, this could
5840 be pointing into some aggregate too. */
5841 || TREE_CODE (TREE_TYPE (exp1
)) == REFERENCE_TYPE
5842 || (TREE_CODE (exp1
) == ADDR_EXPR
5843 && (exp2
= TREE_OPERAND (exp1
, 0))
5844 && AGGREGATE_TYPE_P (TREE_TYPE (exp2
)))
5845 /* This may have been an array reference to the first element
5846 that was optimized away from being an addition. */
5847 || (TREE_CODE (exp1
) == NOP_EXPR
5848 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp1
, 0)))
5850 || ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp1
, 0)))
5852 && (AGGREGATE_TYPE_P
5853 (TREE_TYPE (TREE_TYPE
5854 (TREE_OPERAND (exp1
, 0)))))))))
5855 MEM_IN_STRUCT_P (temp
) = 1;
5856 MEM_VOLATILE_P (temp
) = TREE_THIS_VOLATILE (exp
) | flag_volatile
;
5857 MEM_ALIAS_SET (temp
) = get_alias_set (exp
);
5859 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
5860 here, because, in C and C++, the fact that a location is accessed
5861 through a pointer to const does not mean that the value there can
5862 never change. Languages where it can never change should
5863 also set TREE_STATIC. */
5864 RTX_UNCHANGING_P (temp
) = TREE_READONLY (exp
) & TREE_STATIC (exp
);
5869 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) != ARRAY_TYPE
)
5873 tree array
= TREE_OPERAND (exp
, 0);
5874 tree domain
= TYPE_DOMAIN (TREE_TYPE (array
));
5875 tree low_bound
= domain
? TYPE_MIN_VALUE (domain
) : integer_zero_node
;
5876 tree index
= TREE_OPERAND (exp
, 1);
5877 tree index_type
= TREE_TYPE (index
);
5880 /* Optimize the special-case of a zero lower bound.
5882 We convert the low_bound to sizetype to avoid some problems
5883 with constant folding. (E.g. suppose the lower bound is 1,
5884 and its mode is QI. Without the conversion, (ARRAY
5885 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
5886 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
5888 But sizetype isn't quite right either (especially if
5889 the lowbound is negative). FIXME */
5891 if (! integer_zerop (low_bound
))
5892 index
= fold (build (MINUS_EXPR
, index_type
, index
,
5893 convert (sizetype
, low_bound
)));
5895 /* Fold an expression like: "foo"[2].
5896 This is not done in fold so it won't happen inside &.
5897 Don't fold if this is for wide characters since it's too
5898 difficult to do correctly and this is a very rare case. */
5900 if (TREE_CODE (array
) == STRING_CST
5901 && TREE_CODE (index
) == INTEGER_CST
5902 && !TREE_INT_CST_HIGH (index
)
5903 && (i
= TREE_INT_CST_LOW (index
)) < TREE_STRING_LENGTH (array
)
5904 && GET_MODE_CLASS (mode
) == MODE_INT
5905 && GET_MODE_SIZE (mode
) == 1)
5906 return GEN_INT (TREE_STRING_POINTER (array
)[i
]);
5908 /* If this is a constant index into a constant array,
5909 just get the value from the array. Handle both the cases when
5910 we have an explicit constructor and when our operand is a variable
5911 that was declared const. */
5913 if (TREE_CODE (array
) == CONSTRUCTOR
&& ! TREE_SIDE_EFFECTS (array
))
5915 if (TREE_CODE (index
) == INTEGER_CST
5916 && TREE_INT_CST_HIGH (index
) == 0)
5918 tree elem
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0));
5920 i
= TREE_INT_CST_LOW (index
);
5922 elem
= TREE_CHAIN (elem
);
5924 return expand_expr (fold (TREE_VALUE (elem
)), target
,
5925 tmode
, ro_modifier
);
5929 else if (optimize
>= 1
5930 && TREE_READONLY (array
) && ! TREE_SIDE_EFFECTS (array
)
5931 && TREE_CODE (array
) == VAR_DECL
&& DECL_INITIAL (array
)
5932 && TREE_CODE (DECL_INITIAL (array
)) != ERROR_MARK
)
5934 if (TREE_CODE (index
) == INTEGER_CST
)
5936 tree init
= DECL_INITIAL (array
);
5938 i
= TREE_INT_CST_LOW (index
);
5939 if (TREE_CODE (init
) == CONSTRUCTOR
)
5941 tree elem
= CONSTRUCTOR_ELTS (init
);
5944 && !tree_int_cst_equal (TREE_PURPOSE (elem
), index
))
5945 elem
= TREE_CHAIN (elem
);
5947 return expand_expr (fold (TREE_VALUE (elem
)), target
,
5948 tmode
, ro_modifier
);
5950 else if (TREE_CODE (init
) == STRING_CST
5951 && TREE_INT_CST_HIGH (index
) == 0
5952 && (TREE_INT_CST_LOW (index
)
5953 < TREE_STRING_LENGTH (init
)))
5955 (TREE_STRING_POINTER
5956 (init
)[TREE_INT_CST_LOW (index
)]));
5961 /* ... fall through ... */
5965 /* If the operand is a CONSTRUCTOR, we can just extract the
5966 appropriate field if it is present. Don't do this if we have
5967 already written the data since we want to refer to that copy
5968 and varasm.c assumes that's what we'll do. */
5969 if (code
!= ARRAY_REF
5970 && TREE_CODE (TREE_OPERAND (exp
, 0)) == CONSTRUCTOR
5971 && TREE_CST_RTL (TREE_OPERAND (exp
, 0)) == 0)
5975 for (elt
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)); elt
;
5976 elt
= TREE_CHAIN (elt
))
5977 if (TREE_PURPOSE (elt
) == TREE_OPERAND (exp
, 1)
5978 /* We can normally use the value of the field in the
5979 CONSTRUCTOR. However, if this is a bitfield in
5980 an integral mode that we can fit in a HOST_WIDE_INT,
5981 we must mask only the number of bits in the bitfield,
5982 since this is done implicitly by the constructor. If
5983 the bitfield does not meet either of those conditions,
5984 we can't do this optimization. */
5985 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt
))
5986 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt
)))
5988 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt
)))
5989 <= HOST_BITS_PER_WIDE_INT
))))
5991 op0
= expand_expr (TREE_VALUE (elt
), target
, tmode
, modifier
);
5992 if (DECL_BIT_FIELD (TREE_PURPOSE (elt
)))
5994 int bitsize
= DECL_FIELD_SIZE (TREE_PURPOSE (elt
));
5996 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt
))))
5998 op1
= GEN_INT (((HOST_WIDE_INT
) 1 << bitsize
) - 1);
5999 op0
= expand_and (op0
, op1
, target
);
6003 enum machine_mode imode
6004 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt
)));
6006 = build_int_2 (GET_MODE_BITSIZE (imode
) - bitsize
,
6009 op0
= expand_shift (LSHIFT_EXPR
, imode
, op0
, count
,
6011 op0
= expand_shift (RSHIFT_EXPR
, imode
, op0
, count
,
6021 enum machine_mode mode1
;
6027 tree tem
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
6028 &mode1
, &unsignedp
, &volatilep
,
6031 /* If we got back the original object, something is wrong. Perhaps
6032 we are evaluating an expression too early. In any event, don't
6033 infinitely recurse. */
6037 /* If TEM's type is a union of variable size, pass TARGET to the inner
6038 computation, since it will need a temporary and TARGET is known
6039 to have to do. This occurs in unchecked conversion in Ada. */
6041 op0
= expand_expr (tem
,
6042 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
6043 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
6045 ? target
: NULL_RTX
),
6047 modifier
== EXPAND_INITIALIZER
6048 ? modifier
: EXPAND_NORMAL
);
6050 /* If this is a constant, put it into a register if it is a
6051 legitimate constant and memory if it isn't. */
6052 if (CONSTANT_P (op0
))
6054 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (tem
));
6055 if (mode
!= BLKmode
&& LEGITIMATE_CONSTANT_P (op0
))
6056 op0
= force_reg (mode
, op0
);
6058 op0
= validize_mem (force_const_mem (mode
, op0
));
6063 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
6065 if (GET_CODE (op0
) != MEM
)
6068 if (GET_MODE (offset_rtx
) != ptr_mode
)
6070 #ifdef POINTERS_EXTEND_UNSIGNED
6071 offset_rtx
= convert_memory_address (ptr_mode
, offset_rtx
);
6073 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
6077 if (GET_CODE (op0
) == MEM
6078 && GET_MODE (op0
) == BLKmode
6080 && (bitpos
% bitsize
) == 0
6081 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
6082 && (alignment
* BITS_PER_UNIT
) == GET_MODE_ALIGNMENT (mode1
))
6084 rtx temp
= change_address (op0
, mode1
,
6085 plus_constant (XEXP (op0
, 0),
6088 if (GET_CODE (XEXP (temp
, 0)) == REG
)
6091 op0
= change_address (op0
, mode1
,
6092 force_reg (GET_MODE (XEXP (temp
, 0)),
6098 op0
= change_address (op0
, VOIDmode
,
6099 gen_rtx_PLUS (ptr_mode
, XEXP (op0
, 0),
6100 force_reg (ptr_mode
, offset_rtx
)));
6103 /* Don't forget about volatility even if this is a bitfield. */
6104 if (GET_CODE (op0
) == MEM
&& volatilep
&& ! MEM_VOLATILE_P (op0
))
6106 op0
= copy_rtx (op0
);
6107 MEM_VOLATILE_P (op0
) = 1;
6110 /* Check the access. */
6111 if (flag_check_memory_usage
&& GET_CODE (op0
) == MEM
)
6113 enum memory_use_mode memory_usage
;
6114 memory_usage
= get_memory_usage_from_modifier (modifier
);
6116 if (memory_usage
!= MEMORY_USE_DONT
)
6121 to
= plus_constant (XEXP (op0
, 0), (bitpos
/ BITS_PER_UNIT
));
6122 size
= (bitpos
% BITS_PER_UNIT
) + bitsize
+ BITS_PER_UNIT
- 1;
6124 /* Check the access right of the pointer. */
6125 if (size
> BITS_PER_UNIT
)
6126 emit_library_call (chkr_check_addr_libfunc
, 1, VOIDmode
, 3,
6128 GEN_INT (size
/ BITS_PER_UNIT
),
6129 TYPE_MODE (sizetype
),
6130 GEN_INT (memory_usage
),
6131 TYPE_MODE (integer_type_node
));
6135 /* In cases where an aligned union has an unaligned object
6136 as a field, we might be extracting a BLKmode value from
6137 an integer-mode (e.g., SImode) object. Handle this case
6138 by doing the extract into an object as wide as the field
6139 (which we know to be the width of a basic mode), then
6140 storing into memory, and changing the mode to BLKmode.
6141 If we ultimately want the address (EXPAND_CONST_ADDRESS or
6142 EXPAND_INITIALIZER), then we must not copy to a temporary. */
6143 if (mode1
== VOIDmode
6144 || GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
6145 || (modifier
!= EXPAND_CONST_ADDRESS
6146 && modifier
!= EXPAND_INITIALIZER
6147 && ((mode1
!= BLKmode
&& ! direct_load
[(int) mode1
]
6148 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
6149 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
)
6150 /* If the field isn't aligned enough to fetch as a memref,
6151 fetch it as a bit field. */
6152 || (SLOW_UNALIGNED_ACCESS
6153 && ((TYPE_ALIGN (TREE_TYPE (tem
)) < GET_MODE_ALIGNMENT (mode
))
6154 || (bitpos
% GET_MODE_ALIGNMENT (mode
) != 0))))))
6156 enum machine_mode ext_mode
= mode
;
6158 if (ext_mode
== BLKmode
)
6159 ext_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
6161 if (ext_mode
== BLKmode
)
6163 /* In this case, BITPOS must start at a byte boundary and
6164 TARGET, if specified, must be a MEM. */
6165 if (GET_CODE (op0
) != MEM
6166 || (target
!= 0 && GET_CODE (target
) != MEM
)
6167 || bitpos
% BITS_PER_UNIT
!= 0)
6170 op0
= change_address (op0
, VOIDmode
,
6171 plus_constant (XEXP (op0
, 0),
6172 bitpos
/ BITS_PER_UNIT
));
6174 target
= assign_temp (type
, 0, 1, 1);
6176 emit_block_move (target
, op0
,
6177 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
6184 op0
= validize_mem (op0
);
6186 if (GET_CODE (op0
) == MEM
&& GET_CODE (XEXP (op0
, 0)) == REG
)
6187 mark_reg_pointer (XEXP (op0
, 0), alignment
);
6189 op0
= extract_bit_field (op0
, bitsize
, bitpos
,
6190 unsignedp
, target
, ext_mode
, ext_mode
,
6192 int_size_in_bytes (TREE_TYPE (tem
)));
6194 /* If the result is a record type and BITSIZE is narrower than
6195 the mode of OP0, an integral mode, and this is a big endian
6196 machine, we must put the field into the high-order bits. */
6197 if (TREE_CODE (type
) == RECORD_TYPE
&& BYTES_BIG_ENDIAN
6198 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
6199 && bitsize
< GET_MODE_BITSIZE (GET_MODE (op0
)))
6200 op0
= expand_shift (LSHIFT_EXPR
, GET_MODE (op0
), op0
,
6201 size_int (GET_MODE_BITSIZE (GET_MODE (op0
))
6205 if (mode
== BLKmode
)
6207 rtx
new = assign_stack_temp (ext_mode
,
6208 bitsize
/ BITS_PER_UNIT
, 0);
6210 emit_move_insn (new, op0
);
6211 op0
= copy_rtx (new);
6212 PUT_MODE (op0
, BLKmode
);
6213 MEM_IN_STRUCT_P (op0
) = 1;
6219 /* If the result is BLKmode, use that to access the object
6221 if (mode
== BLKmode
)
6224 /* Get a reference to just this component. */
6225 if (modifier
== EXPAND_CONST_ADDRESS
6226 || modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
6227 op0
= gen_rtx_MEM (mode1
, plus_constant (XEXP (op0
, 0),
6228 (bitpos
/ BITS_PER_UNIT
)));
6230 op0
= change_address (op0
, mode1
,
6231 plus_constant (XEXP (op0
, 0),
6232 (bitpos
/ BITS_PER_UNIT
)));
6234 if (GET_CODE (op0
) == MEM
)
6235 MEM_ALIAS_SET (op0
) = get_alias_set (exp
);
6237 if (GET_CODE (XEXP (op0
, 0)) == REG
)
6238 mark_reg_pointer (XEXP (op0
, 0), alignment
);
6240 MEM_IN_STRUCT_P (op0
) = 1;
6241 MEM_VOLATILE_P (op0
) |= volatilep
;
6242 if (mode
== mode1
|| mode1
== BLKmode
|| mode1
== tmode
6243 || modifier
== EXPAND_CONST_ADDRESS
6244 || modifier
== EXPAND_INITIALIZER
)
6246 else if (target
== 0)
6247 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
6249 convert_move (target
, op0
, unsignedp
);
6253 /* Intended for a reference to a buffer of a file-object in Pascal.
6254 But it's not certain that a special tree code will really be
6255 necessary for these. INDIRECT_REF might work for them. */
6261 /* Pascal set IN expression.
6264 rlo = set_low - (set_low%bits_per_word);
6265 the_word = set [ (index - rlo)/bits_per_word ];
6266 bit_index = index % bits_per_word;
6267 bitmask = 1 << bit_index;
6268 return !!(the_word & bitmask); */
6270 tree set
= TREE_OPERAND (exp
, 0);
6271 tree index
= TREE_OPERAND (exp
, 1);
6272 int iunsignedp
= TREE_UNSIGNED (TREE_TYPE (index
));
6273 tree set_type
= TREE_TYPE (set
);
6274 tree set_low_bound
= TYPE_MIN_VALUE (TYPE_DOMAIN (set_type
));
6275 tree set_high_bound
= TYPE_MAX_VALUE (TYPE_DOMAIN (set_type
));
6276 rtx index_val
= expand_expr (index
, 0, VOIDmode
, 0);
6277 rtx lo_r
= expand_expr (set_low_bound
, 0, VOIDmode
, 0);
6278 rtx hi_r
= expand_expr (set_high_bound
, 0, VOIDmode
, 0);
6279 rtx setval
= expand_expr (set
, 0, VOIDmode
, 0);
6280 rtx setaddr
= XEXP (setval
, 0);
6281 enum machine_mode index_mode
= TYPE_MODE (TREE_TYPE (index
));
6283 rtx diff
, quo
, rem
, addr
, bit
, result
;
6285 preexpand_calls (exp
);
6287 /* If domain is empty, answer is no. Likewise if index is constant
6288 and out of bounds. */
6289 if (((TREE_CODE (set_high_bound
) == INTEGER_CST
6290 && TREE_CODE (set_low_bound
) == INTEGER_CST
6291 && tree_int_cst_lt (set_high_bound
, set_low_bound
))
6292 || (TREE_CODE (index
) == INTEGER_CST
6293 && TREE_CODE (set_low_bound
) == INTEGER_CST
6294 && tree_int_cst_lt (index
, set_low_bound
))
6295 || (TREE_CODE (set_high_bound
) == INTEGER_CST
6296 && TREE_CODE (index
) == INTEGER_CST
6297 && tree_int_cst_lt (set_high_bound
, index
))))
6301 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
6303 /* If we get here, we have to generate the code for both cases
6304 (in range and out of range). */
6306 op0
= gen_label_rtx ();
6307 op1
= gen_label_rtx ();
6309 if (! (GET_CODE (index_val
) == CONST_INT
6310 && GET_CODE (lo_r
) == CONST_INT
))
6312 emit_cmp_insn (index_val
, lo_r
, LT
, NULL_RTX
,
6313 GET_MODE (index_val
), iunsignedp
, 0);
6314 emit_jump_insn (gen_blt (op1
));
6317 if (! (GET_CODE (index_val
) == CONST_INT
6318 && GET_CODE (hi_r
) == CONST_INT
))
6320 emit_cmp_insn (index_val
, hi_r
, GT
, NULL_RTX
,
6321 GET_MODE (index_val
), iunsignedp
, 0);
6322 emit_jump_insn (gen_bgt (op1
));
6325 /* Calculate the element number of bit zero in the first word
6327 if (GET_CODE (lo_r
) == CONST_INT
)
6328 rlow
= GEN_INT (INTVAL (lo_r
)
6329 & ~ ((HOST_WIDE_INT
) 1 << BITS_PER_UNIT
));
6331 rlow
= expand_binop (index_mode
, and_optab
, lo_r
,
6332 GEN_INT (~((HOST_WIDE_INT
) 1 << BITS_PER_UNIT
)),
6333 NULL_RTX
, iunsignedp
, OPTAB_LIB_WIDEN
);
6335 diff
= expand_binop (index_mode
, sub_optab
, index_val
, rlow
,
6336 NULL_RTX
, iunsignedp
, OPTAB_LIB_WIDEN
);
6338 quo
= expand_divmod (0, TRUNC_DIV_EXPR
, index_mode
, diff
,
6339 GEN_INT (BITS_PER_UNIT
), NULL_RTX
, iunsignedp
);
6340 rem
= expand_divmod (1, TRUNC_MOD_EXPR
, index_mode
, index_val
,
6341 GEN_INT (BITS_PER_UNIT
), NULL_RTX
, iunsignedp
);
6343 addr
= memory_address (byte_mode
,
6344 expand_binop (index_mode
, add_optab
, diff
,
6345 setaddr
, NULL_RTX
, iunsignedp
,
6348 /* Extract the bit we want to examine */
6349 bit
= expand_shift (RSHIFT_EXPR
, byte_mode
,
6350 gen_rtx_MEM (byte_mode
, addr
),
6351 make_tree (TREE_TYPE (index
), rem
),
6353 result
= expand_binop (byte_mode
, and_optab
, bit
, const1_rtx
,
6354 GET_MODE (target
) == byte_mode
? target
: 0,
6355 1, OPTAB_LIB_WIDEN
);
6357 if (result
!= target
)
6358 convert_move (target
, result
, 1);
6360 /* Output the code to handle the out-of-range case. */
6363 emit_move_insn (target
, const0_rtx
);
6368 case WITH_CLEANUP_EXPR
:
6369 if (RTL_EXPR_RTL (exp
) == 0)
6372 = expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, ro_modifier
);
6373 expand_decl_cleanup (NULL_TREE
, TREE_OPERAND (exp
, 2));
6375 /* That's it for this cleanup. */
6376 TREE_OPERAND (exp
, 2) = 0;
6378 return RTL_EXPR_RTL (exp
);
6380 case CLEANUP_POINT_EXPR
:
6382 extern int temp_slot_level
;
6383 /* Start a new binding layer that will keep track of all cleanup
6384 actions to be performed. */
6385 expand_start_bindings (0);
6387 target_temp_slot_level
= temp_slot_level
;
6389 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, ro_modifier
);
6390 /* If we're going to use this value, load it up now. */
6392 op0
= force_not_mem (op0
);
6393 preserve_temp_slots (op0
);
6394 expand_end_bindings (NULL_TREE
, 0, 0);
6399 /* Check for a built-in function. */
6400 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
6401 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
6403 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
6404 return expand_builtin (exp
, target
, subtarget
, tmode
, ignore
);
6406 /* If this call was expanded already by preexpand_calls,
6407 just return the result we got. */
6408 if (CALL_EXPR_RTL (exp
) != 0)
6409 return CALL_EXPR_RTL (exp
);
6411 return expand_call (exp
, target
, ignore
);
6413 case NON_LVALUE_EXPR
:
6416 case REFERENCE_EXPR
:
6417 if (TREE_CODE (type
) == UNION_TYPE
)
6419 tree valtype
= TREE_TYPE (TREE_OPERAND (exp
, 0));
6422 if (mode
!= BLKmode
)
6423 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
6425 target
= assign_temp (type
, 0, 1, 1);
6428 if (GET_CODE (target
) == MEM
)
6429 /* Store data into beginning of memory target. */
6430 store_expr (TREE_OPERAND (exp
, 0),
6431 change_address (target
, TYPE_MODE (valtype
), 0), 0);
6433 else if (GET_CODE (target
) == REG
)
6434 /* Store this field into a union of the proper type. */
6435 store_field (target
, GET_MODE_BITSIZE (TYPE_MODE (valtype
)), 0,
6436 TYPE_MODE (valtype
), TREE_OPERAND (exp
, 0),
6438 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp
, 0))));
6442 /* Return the entire union. */
6446 if (mode
== TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
6448 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
,
6451 /* If the signedness of the conversion differs and OP0 is
6452 a promoted SUBREG, clear that indication since we now
6453 have to do the proper extension. */
6454 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))) != unsignedp
6455 && GET_CODE (op0
) == SUBREG
)
6456 SUBREG_PROMOTED_VAR_P (op0
) = 0;
6461 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, mode
, 0);
6462 if (GET_MODE (op0
) == mode
)
6465 /* If OP0 is a constant, just convert it into the proper mode. */
6466 if (CONSTANT_P (op0
))
6468 convert_modes (mode
, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
6469 op0
, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
6471 if (modifier
== EXPAND_INITIALIZER
)
6472 return gen_rtx_fmt_e (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
);
6476 convert_to_mode (mode
, op0
,
6477 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
6479 convert_move (target
, op0
,
6480 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
6484 /* We come here from MINUS_EXPR when the second operand is a
6487 this_optab
= add_optab
;
6489 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
6490 something else, make sure we add the register to the constant and
6491 then to the other thing. This case can occur during strength
6492 reduction and doing it this way will produce better code if the
6493 frame pointer or argument pointer is eliminated.
6495 fold-const.c will ensure that the constant is always in the inner
6496 PLUS_EXPR, so the only case we need to do anything about is if
6497 sp, ap, or fp is our second argument, in which case we must swap
6498 the innermost first argument and our second argument. */
6500 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == PLUS_EXPR
6501 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1)) == INTEGER_CST
6502 && TREE_CODE (TREE_OPERAND (exp
, 1)) == RTL_EXPR
6503 && (RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == frame_pointer_rtx
6504 || RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == stack_pointer_rtx
6505 || RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == arg_pointer_rtx
))
6507 tree t
= TREE_OPERAND (exp
, 1);
6509 TREE_OPERAND (exp
, 1) = TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
6510 TREE_OPERAND (TREE_OPERAND (exp
, 0), 0) = t
;
6513 /* If the result is to be ptr_mode and we are adding an integer to
6514 something, we might be forming a constant. So try to use
6515 plus_constant. If it produces a sum and we can't accept it,
6516 use force_operand. This allows P = &ARR[const] to generate
6517 efficient code on machines where a SYMBOL_REF is not a valid
6520 If this is an EXPAND_SUM call, always return the sum. */
6521 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
6522 || mode
== ptr_mode
)
6524 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
6525 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
6526 && TREE_CONSTANT (TREE_OPERAND (exp
, 1)))
6528 op1
= expand_expr (TREE_OPERAND (exp
, 1), subtarget
, VOIDmode
,
6530 op1
= plus_constant (op1
, TREE_INT_CST_LOW (TREE_OPERAND (exp
, 0)));
6531 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
6532 op1
= force_operand (op1
, target
);
6536 else if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
6537 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_INT
6538 && TREE_CONSTANT (TREE_OPERAND (exp
, 0)))
6540 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
6542 if (! CONSTANT_P (op0
))
6544 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
6545 VOIDmode
, modifier
);
6546 /* Don't go to both_summands if modifier
6547 says it's not right to return a PLUS. */
6548 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
6552 op0
= plus_constant (op0
, TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)));
6553 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
6554 op0
= force_operand (op0
, target
);
6559 /* No sense saving up arithmetic to be done
6560 if it's all in the wrong mode to form part of an address.
6561 And force_operand won't know whether to sign-extend or
6563 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
6564 || mode
!= ptr_mode
)
6567 preexpand_calls (exp
);
6568 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
6571 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, ro_modifier
);
6572 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, ro_modifier
);
6575 /* Make sure any term that's a sum with a constant comes last. */
6576 if (GET_CODE (op0
) == PLUS
6577 && CONSTANT_P (XEXP (op0
, 1)))
6583 /* If adding to a sum including a constant,
6584 associate it to put the constant outside. */
6585 if (GET_CODE (op1
) == PLUS
6586 && CONSTANT_P (XEXP (op1
, 1)))
6588 rtx constant_term
= const0_rtx
;
6590 temp
= simplify_binary_operation (PLUS
, mode
, XEXP (op1
, 0), op0
);
6593 /* Ensure that MULT comes first if there is one. */
6594 else if (GET_CODE (op0
) == MULT
)
6595 op0
= gen_rtx_PLUS (mode
, op0
, XEXP (op1
, 0));
6597 op0
= gen_rtx_PLUS (mode
, XEXP (op1
, 0), op0
);
6599 /* Let's also eliminate constants from op0 if possible. */
6600 op0
= eliminate_constant_term (op0
, &constant_term
);
6602 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
6603 their sum should be a constant. Form it into OP1, since the
6604 result we want will then be OP0 + OP1. */
6606 temp
= simplify_binary_operation (PLUS
, mode
, constant_term
,
6611 op1
= gen_rtx_PLUS (mode
, constant_term
, XEXP (op1
, 1));
6614 /* Put a constant term last and put a multiplication first. */
6615 if (CONSTANT_P (op0
) || GET_CODE (op1
) == MULT
)
6616 temp
= op1
, op1
= op0
, op0
= temp
;
6618 temp
= simplify_binary_operation (PLUS
, mode
, op0
, op1
);
6619 return temp
? temp
: gen_rtx_PLUS (mode
, op0
, op1
);
6622 /* For initializers, we are allowed to return a MINUS of two
6623 symbolic constants. Here we handle all cases when both operands
6625 /* Handle difference of two symbolic constants,
6626 for the sake of an initializer. */
6627 if ((modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
6628 && really_constant_p (TREE_OPERAND (exp
, 0))
6629 && really_constant_p (TREE_OPERAND (exp
, 1)))
6631 rtx op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
,
6632 VOIDmode
, ro_modifier
);
6633 rtx op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
6634 VOIDmode
, ro_modifier
);
6636 /* If the last operand is a CONST_INT, use plus_constant of
6637 the negated constant. Else make the MINUS. */
6638 if (GET_CODE (op1
) == CONST_INT
)
6639 return plus_constant (op0
, - INTVAL (op1
));
6641 return gen_rtx_MINUS (mode
, op0
, op1
);
6643 /* Convert A - const to A + (-const). */
6644 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
6646 tree negated
= fold (build1 (NEGATE_EXPR
, type
,
6647 TREE_OPERAND (exp
, 1)));
6649 /* Deal with the case where we can't negate the constant
6651 if (TREE_UNSIGNED (type
) || TREE_OVERFLOW (negated
))
6653 tree newtype
= signed_type (type
);
6654 tree newop0
= convert (newtype
, TREE_OPERAND (exp
, 0));
6655 tree newop1
= convert (newtype
, TREE_OPERAND (exp
, 1));
6656 tree newneg
= fold (build1 (NEGATE_EXPR
, newtype
, newop1
));
6658 if (! TREE_OVERFLOW (newneg
))
6659 return expand_expr (convert (type
,
6660 build (PLUS_EXPR
, newtype
,
6662 target
, tmode
, ro_modifier
);
6666 exp
= build (PLUS_EXPR
, type
, TREE_OPERAND (exp
, 0), negated
);
6670 this_optab
= sub_optab
;
6674 preexpand_calls (exp
);
6675 /* If first operand is constant, swap them.
6676 Thus the following special case checks need only
6677 check the second operand. */
6678 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
)
6680 register tree t1
= TREE_OPERAND (exp
, 0);
6681 TREE_OPERAND (exp
, 0) = TREE_OPERAND (exp
, 1);
6682 TREE_OPERAND (exp
, 1) = t1
;
6685 /* Attempt to return something suitable for generating an
6686 indexed address, for machines that support that. */
6688 if (modifier
== EXPAND_SUM
&& mode
== ptr_mode
6689 && TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
6690 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
6692 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
6695 /* Apply distributive law if OP0 is x+c. */
6696 if (GET_CODE (op0
) == PLUS
6697 && GET_CODE (XEXP (op0
, 1)) == CONST_INT
)
6698 return gen_rtx_PLUS (mode
,
6699 gen_rtx_MULT (mode
, XEXP (op0
, 0),
6700 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)))),
6701 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))
6702 * INTVAL (XEXP (op0
, 1))));
6704 if (GET_CODE (op0
) != REG
)
6705 op0
= force_operand (op0
, NULL_RTX
);
6706 if (GET_CODE (op0
) != REG
)
6707 op0
= copy_to_mode_reg (mode
, op0
);
6709 return gen_rtx_MULT (mode
, op0
,
6710 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))));
6713 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
6716 /* Check for multiplying things that have been extended
6717 from a narrower type. If this machine supports multiplying
6718 in that narrower type with a result in the desired type,
6719 do it that way, and avoid the explicit type-conversion. */
6720 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == NOP_EXPR
6721 && TREE_CODE (type
) == INTEGER_TYPE
6722 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
6723 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))))
6724 && ((TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
6725 && int_fits_type_p (TREE_OPERAND (exp
, 1),
6726 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
6727 /* Don't use a widening multiply if a shift will do. */
6728 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
6729 > HOST_BITS_PER_WIDE_INT
)
6730 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))) < 0))
6732 (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
6733 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
6735 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))))
6736 /* If both operands are extended, they must either both
6737 be zero-extended or both be sign-extended. */
6738 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
6740 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))))))
6742 enum machine_mode innermode
6743 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)));
6744 optab other_optab
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
6745 ? smul_widen_optab
: umul_widen_optab
);
6746 this_optab
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
6747 ? umul_widen_optab
: smul_widen_optab
);
6748 if (mode
== GET_MODE_WIDER_MODE (innermode
))
6750 if (this_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
6752 op0
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
6753 NULL_RTX
, VOIDmode
, 0);
6754 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
6755 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
6758 op1
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
6759 NULL_RTX
, VOIDmode
, 0);
6762 else if (other_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
6763 && innermode
== word_mode
)
6766 op0
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
6767 NULL_RTX
, VOIDmode
, 0);
6768 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
6769 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
6772 op1
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
6773 NULL_RTX
, VOIDmode
, 0);
6774 temp
= expand_binop (mode
, other_optab
, op0
, op1
, target
,
6775 unsignedp
, OPTAB_LIB_WIDEN
);
6776 htem
= expand_mult_highpart_adjust (innermode
,
6777 gen_highpart (innermode
, temp
),
6779 gen_highpart (innermode
, temp
),
6781 emit_move_insn (gen_highpart (innermode
, temp
), htem
);
6786 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
6787 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
6788 return expand_mult (mode
, op0
, op1
, target
, unsignedp
);
6790 case TRUNC_DIV_EXPR
:
6791 case FLOOR_DIV_EXPR
:
6793 case ROUND_DIV_EXPR
:
6794 case EXACT_DIV_EXPR
:
6795 preexpand_calls (exp
);
6796 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
6798 /* Possible optimization: compute the dividend with EXPAND_SUM
6799 then if the divisor is constant can optimize the case
6800 where some terms of the dividend have coeffs divisible by it. */
6801 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
6802 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
6803 return expand_divmod (0, code
, mode
, op0
, op1
, target
, unsignedp
);
6806 this_optab
= flodiv_optab
;
6809 case TRUNC_MOD_EXPR
:
6810 case FLOOR_MOD_EXPR
:
6812 case ROUND_MOD_EXPR
:
6813 preexpand_calls (exp
);
6814 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
6816 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
6817 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
6818 return expand_divmod (1, code
, mode
, op0
, op1
, target
, unsignedp
);
6820 case FIX_ROUND_EXPR
:
6821 case FIX_FLOOR_EXPR
:
6823 abort (); /* Not used for C. */
6825 case FIX_TRUNC_EXPR
:
6826 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
6828 target
= gen_reg_rtx (mode
);
6829 expand_fix (target
, op0
, unsignedp
);
6833 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
6835 target
= gen_reg_rtx (mode
);
6836 /* expand_float can't figure out what to do if FROM has VOIDmode.
6837 So give it the correct mode. With -O, cse will optimize this. */
6838 if (GET_MODE (op0
) == VOIDmode
)
6839 op0
= copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
6841 expand_float (target
, op0
,
6842 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
6846 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
6847 temp
= expand_unop (mode
, neg_optab
, op0
, target
, 0);
6853 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
6855 /* Handle complex values specially. */
6856 if (GET_MODE_CLASS (mode
) == MODE_COMPLEX_INT
6857 || GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
)
6858 return expand_complex_abs (mode
, op0
, target
, unsignedp
);
6860 /* Unsigned abs is simply the operand. Testing here means we don't
6861 risk generating incorrect code below. */
6862 if (TREE_UNSIGNED (type
))
6865 return expand_abs (mode
, op0
, target
, unsignedp
,
6866 safe_from_p (target
, TREE_OPERAND (exp
, 0), 1));
6870 target
= original_target
;
6871 if (target
== 0 || ! safe_from_p (target
, TREE_OPERAND (exp
, 1), 1)
6872 || (GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
))
6873 || GET_MODE (target
) != mode
6874 || (GET_CODE (target
) == REG
6875 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
6876 target
= gen_reg_rtx (mode
);
6877 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
6878 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, 0);
6880 /* First try to do it with a special MIN or MAX instruction.
6881 If that does not win, use a conditional jump to select the proper
6883 this_optab
= (TREE_UNSIGNED (type
)
6884 ? (code
== MIN_EXPR
? umin_optab
: umax_optab
)
6885 : (code
== MIN_EXPR
? smin_optab
: smax_optab
));
6887 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
, unsignedp
,
6892 /* At this point, a MEM target is no longer useful; we will get better
6895 if (GET_CODE (target
) == MEM
)
6896 target
= gen_reg_rtx (mode
);
6899 emit_move_insn (target
, op0
);
6901 op0
= gen_label_rtx ();
6903 /* If this mode is an integer too wide to compare properly,
6904 compare word by word. Rely on cse to optimize constant cases. */
6905 if (GET_MODE_CLASS (mode
) == MODE_INT
&& !can_compare_p (mode
))
6907 if (code
== MAX_EXPR
)
6908 do_jump_by_parts_greater_rtx (mode
, TREE_UNSIGNED (type
),
6909 target
, op1
, NULL_RTX
, op0
);
6911 do_jump_by_parts_greater_rtx (mode
, TREE_UNSIGNED (type
),
6912 op1
, target
, NULL_RTX
, op0
);
6913 emit_move_insn (target
, op1
);
6917 if (code
== MAX_EXPR
)
6918 temp
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 1)))
6919 ? compare_from_rtx (target
, op1
, GEU
, 1, mode
, NULL_RTX
, 0)
6920 : compare_from_rtx (target
, op1
, GE
, 0, mode
, NULL_RTX
, 0));
6922 temp
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 1)))
6923 ? compare_from_rtx (target
, op1
, LEU
, 1, mode
, NULL_RTX
, 0)
6924 : compare_from_rtx (target
, op1
, LE
, 0, mode
, NULL_RTX
, 0));
6925 if (temp
== const0_rtx
)
6926 emit_move_insn (target
, op1
);
6927 else if (temp
!= const_true_rtx
)
6929 if (bcc_gen_fctn
[(int) GET_CODE (temp
)] != 0)
6930 emit_jump_insn ((*bcc_gen_fctn
[(int) GET_CODE (temp
)]) (op0
));
6933 emit_move_insn (target
, op1
);
6940 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
6941 temp
= expand_unop (mode
, one_cmpl_optab
, op0
, target
, 1);
6947 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
6948 temp
= expand_unop (mode
, ffs_optab
, op0
, target
, 1);
6953 /* ??? Can optimize bitwise operations with one arg constant.
6954 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
6955 and (a bitwise1 b) bitwise2 b (etc)
6956 but that is probably not worth while. */
6958 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
6959 boolean values when we want in all cases to compute both of them. In
6960 general it is fastest to do TRUTH_AND_EXPR by computing both operands
6961 as actual zero-or-1 values and then bitwise anding. In cases where
6962 there cannot be any side effects, better code would be made by
6963 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
6964 how to recognize those cases. */
6966 case TRUTH_AND_EXPR
:
6968 this_optab
= and_optab
;
6973 this_optab
= ior_optab
;
6976 case TRUTH_XOR_EXPR
:
6978 this_optab
= xor_optab
;
6985 preexpand_calls (exp
);
6986 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
6988 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
6989 return expand_shift (code
, mode
, op0
, TREE_OPERAND (exp
, 1), target
,
6992 /* Could determine the answer when only additive constants differ. Also,
6993 the addition of one can be handled by changing the condition. */
7000 preexpand_calls (exp
);
7001 temp
= do_store_flag (exp
, target
, tmode
!= VOIDmode
? tmode
: mode
, 0);
7005 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7006 if (code
== NE_EXPR
&& integer_zerop (TREE_OPERAND (exp
, 1))
7008 && GET_CODE (original_target
) == REG
7009 && (GET_MODE (original_target
)
7010 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
7012 temp
= expand_expr (TREE_OPERAND (exp
, 0), original_target
,
7015 if (temp
!= original_target
)
7016 temp
= copy_to_reg (temp
);
7018 op1
= gen_label_rtx ();
7019 emit_cmp_insn (temp
, const0_rtx
, EQ
, NULL_RTX
,
7020 GET_MODE (temp
), unsignedp
, 0);
7021 emit_jump_insn (gen_beq (op1
));
7022 emit_move_insn (temp
, const1_rtx
);
7027 /* If no set-flag instruction, must generate a conditional
7028 store into a temporary variable. Drop through
7029 and handle this like && and ||. */
7031 case TRUTH_ANDIF_EXPR
:
7032 case TRUTH_ORIF_EXPR
:
7034 && (target
== 0 || ! safe_from_p (target
, exp
, 1)
7035 /* Make sure we don't have a hard reg (such as function's return
7036 value) live across basic blocks, if not optimizing. */
7037 || (!optimize
&& GET_CODE (target
) == REG
7038 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)))
7039 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
7042 emit_clr_insn (target
);
7044 op1
= gen_label_rtx ();
7045 jumpifnot (exp
, op1
);
7048 emit_0_to_1_insn (target
);
7051 return ignore
? const0_rtx
: target
;
7053 case TRUTH_NOT_EXPR
:
7054 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, 0);
7055 /* The parser is careful to generate TRUTH_NOT_EXPR
7056 only with operands that are always zero or one. */
7057 temp
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
,
7058 target
, 1, OPTAB_LIB_WIDEN
);
7064 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
7066 return expand_expr (TREE_OPERAND (exp
, 1),
7067 (ignore
? const0_rtx
: target
),
7071 /* If we would have a "singleton" (see below) were it not for a
7072 conversion in each arm, bring that conversion back out. */
7073 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
7074 && TREE_CODE (TREE_OPERAND (exp
, 2)) == NOP_EXPR
7075 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0))
7076 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 2), 0))))
7078 tree
true = TREE_OPERAND (TREE_OPERAND (exp
, 1), 0);
7079 tree
false = TREE_OPERAND (TREE_OPERAND (exp
, 2), 0);
7081 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
7082 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7083 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
7084 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
7085 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
7086 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7087 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
7088 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
7089 return expand_expr (build1 (NOP_EXPR
, type
,
7090 build (COND_EXPR
, TREE_TYPE (true),
7091 TREE_OPERAND (exp
, 0),
7093 target
, tmode
, modifier
);
7097 /* Note that COND_EXPRs whose type is a structure or union
7098 are required to be constructed to contain assignments of
7099 a temporary variable, so that we can evaluate them here
7100 for side effect only. If type is void, we must do likewise. */
7102 /* If an arm of the branch requires a cleanup,
7103 only that cleanup is performed. */
7106 tree binary_op
= 0, unary_op
= 0;
7108 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
7109 convert it to our mode, if necessary. */
7110 if (integer_onep (TREE_OPERAND (exp
, 1))
7111 && integer_zerop (TREE_OPERAND (exp
, 2))
7112 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<')
7116 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
7121 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, mode
, ro_modifier
);
7122 if (GET_MODE (op0
) == mode
)
7126 target
= gen_reg_rtx (mode
);
7127 convert_move (target
, op0
, unsignedp
);
7131 /* Check for X ? A + B : A. If we have this, we can copy A to the
7132 output and conditionally add B. Similarly for unary operations.
7133 Don't do this if X has side-effects because those side effects
7134 might affect A or B and the "?" operation is a sequence point in
7135 ANSI. (operand_equal_p tests for side effects.) */
7137 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 1))) == '2'
7138 && operand_equal_p (TREE_OPERAND (exp
, 2),
7139 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0), 0))
7140 singleton
= TREE_OPERAND (exp
, 2), binary_op
= TREE_OPERAND (exp
, 1);
7141 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 2))) == '2'
7142 && operand_equal_p (TREE_OPERAND (exp
, 1),
7143 TREE_OPERAND (TREE_OPERAND (exp
, 2), 0), 0))
7144 singleton
= TREE_OPERAND (exp
, 1), binary_op
= TREE_OPERAND (exp
, 2);
7145 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 1))) == '1'
7146 && operand_equal_p (TREE_OPERAND (exp
, 2),
7147 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0), 0))
7148 singleton
= TREE_OPERAND (exp
, 2), unary_op
= TREE_OPERAND (exp
, 1);
7149 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 2))) == '1'
7150 && operand_equal_p (TREE_OPERAND (exp
, 1),
7151 TREE_OPERAND (TREE_OPERAND (exp
, 2), 0), 0))
7152 singleton
= TREE_OPERAND (exp
, 1), unary_op
= TREE_OPERAND (exp
, 2);
7154 /* If we are not to produce a result, we have no target. Otherwise,
7155 if a target was specified use it; it will not be used as an
7156 intermediate target unless it is safe. If no target, use a
7161 else if (original_target
7162 && (safe_from_p (original_target
, TREE_OPERAND (exp
, 0), 1)
7163 || (singleton
&& GET_CODE (original_target
) == REG
7164 && REGNO (original_target
) >= FIRST_PSEUDO_REGISTER
7165 && original_target
== var_rtx (singleton
)))
7166 && GET_MODE (original_target
) == mode
7167 #ifdef HAVE_conditional_move
7168 && (! can_conditionally_move_p (mode
)
7169 || GET_CODE (original_target
) == REG
7170 || TREE_ADDRESSABLE (type
))
7172 && ! (GET_CODE (original_target
) == MEM
7173 && MEM_VOLATILE_P (original_target
)))
7174 temp
= original_target
;
7175 else if (TREE_ADDRESSABLE (type
))
7178 temp
= assign_temp (type
, 0, 0, 1);
7180 /* If we had X ? A + C : A, with C a constant power of 2, and we can
7181 do the test of X as a store-flag operation, do this as
7182 A + ((X != 0) << log C). Similarly for other simple binary
7183 operators. Only do for C == 1 if BRANCH_COST is low. */
7184 if (temp
&& singleton
&& binary_op
7185 && (TREE_CODE (binary_op
) == PLUS_EXPR
7186 || TREE_CODE (binary_op
) == MINUS_EXPR
7187 || TREE_CODE (binary_op
) == BIT_IOR_EXPR
7188 || TREE_CODE (binary_op
) == BIT_XOR_EXPR
)
7189 && (BRANCH_COST
>= 3 ? integer_pow2p (TREE_OPERAND (binary_op
, 1))
7190 : integer_onep (TREE_OPERAND (binary_op
, 1)))
7191 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<')
7194 optab boptab
= (TREE_CODE (binary_op
) == PLUS_EXPR
? add_optab
7195 : TREE_CODE (binary_op
) == MINUS_EXPR
? sub_optab
7196 : TREE_CODE (binary_op
) == BIT_IOR_EXPR
? ior_optab
7199 /* If we had X ? A : A + 1, do this as A + (X == 0).
7201 We have to invert the truth value here and then put it
7202 back later if do_store_flag fails. We cannot simply copy
7203 TREE_OPERAND (exp, 0) to another variable and modify that
7204 because invert_truthvalue can modify the tree pointed to
7206 if (singleton
== TREE_OPERAND (exp
, 1))
7207 TREE_OPERAND (exp
, 0)
7208 = invert_truthvalue (TREE_OPERAND (exp
, 0));
7210 result
= do_store_flag (TREE_OPERAND (exp
, 0),
7211 (safe_from_p (temp
, singleton
, 1)
7213 mode
, BRANCH_COST
<= 1);
7215 if (result
!= 0 && ! integer_onep (TREE_OPERAND (binary_op
, 1)))
7216 result
= expand_shift (LSHIFT_EXPR
, mode
, result
,
7217 build_int_2 (tree_log2
7221 (safe_from_p (temp
, singleton
, 1)
7222 ? temp
: NULL_RTX
), 0);
7226 op1
= expand_expr (singleton
, NULL_RTX
, VOIDmode
, 0);
7227 return expand_binop (mode
, boptab
, op1
, result
, temp
,
7228 unsignedp
, OPTAB_LIB_WIDEN
);
7230 else if (singleton
== TREE_OPERAND (exp
, 1))
7231 TREE_OPERAND (exp
, 0)
7232 = invert_truthvalue (TREE_OPERAND (exp
, 0));
7235 do_pending_stack_adjust ();
7237 op0
= gen_label_rtx ();
7239 if (singleton
&& ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0)))
7243 /* If the target conflicts with the other operand of the
7244 binary op, we can't use it. Also, we can't use the target
7245 if it is a hard register, because evaluating the condition
7246 might clobber it. */
7248 && ! safe_from_p (temp
, TREE_OPERAND (binary_op
, 1), 1))
7249 || (GET_CODE (temp
) == REG
7250 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
))
7251 temp
= gen_reg_rtx (mode
);
7252 store_expr (singleton
, temp
, 0);
7255 expand_expr (singleton
,
7256 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
7257 if (singleton
== TREE_OPERAND (exp
, 1))
7258 jumpif (TREE_OPERAND (exp
, 0), op0
);
7260 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
7262 start_cleanup_deferral ();
7263 if (binary_op
&& temp
== 0)
7264 /* Just touch the other operand. */
7265 expand_expr (TREE_OPERAND (binary_op
, 1),
7266 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
7268 store_expr (build (TREE_CODE (binary_op
), type
,
7269 make_tree (type
, temp
),
7270 TREE_OPERAND (binary_op
, 1)),
7273 store_expr (build1 (TREE_CODE (unary_op
), type
,
7274 make_tree (type
, temp
)),
7278 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
7279 comparison operator. If we have one of these cases, set the
7280 output to A, branch on A (cse will merge these two references),
7281 then set the output to FOO. */
7283 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<'
7284 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1))
7285 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
7286 TREE_OPERAND (exp
, 1), 0)
7287 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
7288 || TREE_CODE (TREE_OPERAND (exp
, 1)) == SAVE_EXPR
)
7289 && safe_from_p (temp
, TREE_OPERAND (exp
, 2), 1))
7291 if (GET_CODE (temp
) == REG
&& REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
7292 temp
= gen_reg_rtx (mode
);
7293 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
7294 jumpif (TREE_OPERAND (exp
, 0), op0
);
7296 start_cleanup_deferral ();
7297 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
7301 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<'
7302 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1))
7303 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
7304 TREE_OPERAND (exp
, 2), 0)
7305 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
7306 || TREE_CODE (TREE_OPERAND (exp
, 2)) == SAVE_EXPR
)
7307 && safe_from_p (temp
, TREE_OPERAND (exp
, 1), 1))
7309 if (GET_CODE (temp
) == REG
&& REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
7310 temp
= gen_reg_rtx (mode
);
7311 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
7312 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
7314 start_cleanup_deferral ();
7315 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
7320 op1
= gen_label_rtx ();
7321 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
7323 start_cleanup_deferral ();
7325 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
7327 expand_expr (TREE_OPERAND (exp
, 1),
7328 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
7329 end_cleanup_deferral ();
7331 emit_jump_insn (gen_jump (op1
));
7334 start_cleanup_deferral ();
7336 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
7338 expand_expr (TREE_OPERAND (exp
, 2),
7339 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
7342 end_cleanup_deferral ();
7353 /* Something needs to be initialized, but we didn't know
7354 where that thing was when building the tree. For example,
7355 it could be the return value of a function, or a parameter
7356 to a function which lays down in the stack, or a temporary
7357 variable which must be passed by reference.
7359 We guarantee that the expression will either be constructed
7360 or copied into our original target. */
7362 tree slot
= TREE_OPERAND (exp
, 0);
7363 tree cleanups
= NULL_TREE
;
7366 if (TREE_CODE (slot
) != VAR_DECL
)
7370 target
= original_target
;
7374 if (DECL_RTL (slot
) != 0)
7376 target
= DECL_RTL (slot
);
7377 /* If we have already expanded the slot, so don't do
7379 if (TREE_OPERAND (exp
, 1) == NULL_TREE
)
7384 target
= assign_temp (type
, 2, 0, 1);
7385 /* All temp slots at this level must not conflict. */
7386 preserve_temp_slots (target
);
7387 DECL_RTL (slot
) = target
;
7388 if (TREE_ADDRESSABLE (slot
))
7390 TREE_ADDRESSABLE (slot
) = 0;
7391 mark_addressable (slot
);
7394 /* Since SLOT is not known to the called function
7395 to belong to its stack frame, we must build an explicit
7396 cleanup. This case occurs when we must build up a reference
7397 to pass the reference as an argument. In this case,
7398 it is very likely that such a reference need not be
7401 if (TREE_OPERAND (exp
, 2) == 0)
7402 TREE_OPERAND (exp
, 2) = maybe_build_cleanup (slot
);
7403 cleanups
= TREE_OPERAND (exp
, 2);
7408 /* This case does occur, when expanding a parameter which
7409 needs to be constructed on the stack. The target
7410 is the actual stack address that we want to initialize.
7411 The function we call will perform the cleanup in this case. */
7413 /* If we have already assigned it space, use that space,
7414 not target that we were passed in, as our target
7415 parameter is only a hint. */
7416 if (DECL_RTL (slot
) != 0)
7418 target
= DECL_RTL (slot
);
7419 /* If we have already expanded the slot, so don't do
7421 if (TREE_OPERAND (exp
, 1) == NULL_TREE
)
7426 DECL_RTL (slot
) = target
;
7427 /* If we must have an addressable slot, then make sure that
7428 the RTL that we just stored in slot is OK. */
7429 if (TREE_ADDRESSABLE (slot
))
7431 TREE_ADDRESSABLE (slot
) = 0;
7432 mark_addressable (slot
);
7437 exp1
= TREE_OPERAND (exp
, 3) = TREE_OPERAND (exp
, 1);
7438 /* Mark it as expanded. */
7439 TREE_OPERAND (exp
, 1) = NULL_TREE
;
7441 TREE_USED (slot
) = 1;
7442 store_expr (exp1
, target
, 0);
7444 expand_decl_cleanup (NULL_TREE
, cleanups
);
7451 tree lhs
= TREE_OPERAND (exp
, 0);
7452 tree rhs
= TREE_OPERAND (exp
, 1);
7453 tree noncopied_parts
= 0;
7454 tree lhs_type
= TREE_TYPE (lhs
);
7456 temp
= expand_assignment (lhs
, rhs
, ! ignore
, original_target
!= 0);
7457 if (TYPE_NONCOPIED_PARTS (lhs_type
) != 0 && !fixed_type_p (rhs
))
7458 noncopied_parts
= init_noncopied_parts (stabilize_reference (lhs
),
7459 TYPE_NONCOPIED_PARTS (lhs_type
));
7460 while (noncopied_parts
!= 0)
7462 expand_assignment (TREE_VALUE (noncopied_parts
),
7463 TREE_PURPOSE (noncopied_parts
), 0, 0);
7464 noncopied_parts
= TREE_CHAIN (noncopied_parts
);
7471 /* If lhs is complex, expand calls in rhs before computing it.
7472 That's so we don't compute a pointer and save it over a call.
7473 If lhs is simple, compute it first so we can give it as a
7474 target if the rhs is just a call. This avoids an extra temp and copy
7475 and that prevents a partial-subsumption which makes bad code.
7476 Actually we could treat component_ref's of vars like vars. */
7478 tree lhs
= TREE_OPERAND (exp
, 0);
7479 tree rhs
= TREE_OPERAND (exp
, 1);
7480 tree noncopied_parts
= 0;
7481 tree lhs_type
= TREE_TYPE (lhs
);
7485 if (TREE_CODE (lhs
) != VAR_DECL
7486 && TREE_CODE (lhs
) != RESULT_DECL
7487 && TREE_CODE (lhs
) != PARM_DECL
7488 && ! (TREE_CODE (lhs
) == INDIRECT_REF
7489 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs
, 0)))))
7490 preexpand_calls (exp
);
7492 /* Check for |= or &= of a bitfield of size one into another bitfield
7493 of size 1. In this case, (unless we need the result of the
7494 assignment) we can do this more efficiently with a
7495 test followed by an assignment, if necessary.
7497 ??? At this point, we can't get a BIT_FIELD_REF here. But if
7498 things change so we do, this code should be enhanced to
7501 && TREE_CODE (lhs
) == COMPONENT_REF
7502 && (TREE_CODE (rhs
) == BIT_IOR_EXPR
7503 || TREE_CODE (rhs
) == BIT_AND_EXPR
)
7504 && TREE_OPERAND (rhs
, 0) == lhs
7505 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == COMPONENT_REF
7506 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs
, 1))) == 1
7507 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs
, 1), 1))) == 1)
7509 rtx label
= gen_label_rtx ();
7511 do_jump (TREE_OPERAND (rhs
, 1),
7512 TREE_CODE (rhs
) == BIT_IOR_EXPR
? label
: 0,
7513 TREE_CODE (rhs
) == BIT_AND_EXPR
? label
: 0);
7514 expand_assignment (lhs
, convert (TREE_TYPE (rhs
),
7515 (TREE_CODE (rhs
) == BIT_IOR_EXPR
7517 : integer_zero_node
)),
7519 do_pending_stack_adjust ();
7524 if (TYPE_NONCOPIED_PARTS (lhs_type
) != 0
7525 && ! (fixed_type_p (lhs
) && fixed_type_p (rhs
)))
7526 noncopied_parts
= save_noncopied_parts (stabilize_reference (lhs
),
7527 TYPE_NONCOPIED_PARTS (lhs_type
));
7529 temp
= expand_assignment (lhs
, rhs
, ! ignore
, original_target
!= 0);
7530 while (noncopied_parts
!= 0)
7532 expand_assignment (TREE_PURPOSE (noncopied_parts
),
7533 TREE_VALUE (noncopied_parts
), 0, 0);
7534 noncopied_parts
= TREE_CHAIN (noncopied_parts
);
7539 case PREINCREMENT_EXPR
:
7540 case PREDECREMENT_EXPR
:
7541 return expand_increment (exp
, 0, ignore
);
7543 case POSTINCREMENT_EXPR
:
7544 case POSTDECREMENT_EXPR
:
7545 /* Faster to treat as pre-increment if result is not used. */
7546 return expand_increment (exp
, ! ignore
, ignore
);
7549 /* If nonzero, TEMP will be set to the address of something that might
7550 be a MEM corresponding to a stack slot. */
7553 /* Are we taking the address of a nested function? */
7554 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == FUNCTION_DECL
7555 && decl_function_context (TREE_OPERAND (exp
, 0)) != 0
7556 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp
, 0))
7557 && ! TREE_STATIC (exp
))
7559 op0
= trampoline_address (TREE_OPERAND (exp
, 0));
7560 op0
= force_operand (op0
, target
);
7562 /* If we are taking the address of something erroneous, just
7564 else if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ERROR_MARK
)
7568 /* We make sure to pass const0_rtx down if we came in with
7569 ignore set, to avoid doing the cleanups twice for something. */
7570 op0
= expand_expr (TREE_OPERAND (exp
, 0),
7571 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
,
7572 (modifier
== EXPAND_INITIALIZER
7573 ? modifier
: EXPAND_CONST_ADDRESS
));
7575 /* If we are going to ignore the result, OP0 will have been set
7576 to const0_rtx, so just return it. Don't get confused and
7577 think we are taking the address of the constant. */
7581 op0
= protect_from_queue (op0
, 0);
7583 /* We would like the object in memory. If it is a constant,
7584 we can have it be statically allocated into memory. For
7585 a non-constant (REG, SUBREG or CONCAT), we need to allocate some
7586 memory and store the value into it. */
7588 if (CONSTANT_P (op0
))
7589 op0
= force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
7591 else if (GET_CODE (op0
) == MEM
)
7593 mark_temp_addr_taken (op0
);
7594 temp
= XEXP (op0
, 0);
7597 else if (GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
7598 || GET_CODE (op0
) == CONCAT
|| GET_CODE (op0
) == ADDRESSOF
)
7600 /* If this object is in a register, it must be not
7602 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7603 rtx memloc
= assign_temp (inner_type
, 1, 1, 1);
7605 mark_temp_addr_taken (memloc
);
7606 emit_move_insn (memloc
, op0
);
7610 if (GET_CODE (op0
) != MEM
)
7613 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7615 temp
= XEXP (op0
, 0);
7616 #ifdef POINTERS_EXTEND_UNSIGNED
7617 if (GET_MODE (temp
) == Pmode
&& GET_MODE (temp
) != mode
7618 && mode
== ptr_mode
)
7619 temp
= convert_memory_address (ptr_mode
, temp
);
7624 op0
= force_operand (XEXP (op0
, 0), target
);
7627 if (flag_force_addr
&& GET_CODE (op0
) != REG
)
7628 op0
= force_reg (Pmode
, op0
);
7630 if (GET_CODE (op0
) == REG
7631 && ! REG_USERVAR_P (op0
))
7632 mark_reg_pointer (op0
, TYPE_ALIGN (TREE_TYPE (type
)) / BITS_PER_UNIT
);
7634 /* If we might have had a temp slot, add an equivalent address
7637 update_temp_slot_address (temp
, op0
);
7639 #ifdef POINTERS_EXTEND_UNSIGNED
7640 if (GET_MODE (op0
) == Pmode
&& GET_MODE (op0
) != mode
7641 && mode
== ptr_mode
)
7642 op0
= convert_memory_address (ptr_mode
, op0
);
7647 case ENTRY_VALUE_EXPR
:
7650 /* COMPLEX type for Extended Pascal & Fortran */
7653 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
7656 /* Get the rtx code of the operands. */
7657 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
7658 op1
= expand_expr (TREE_OPERAND (exp
, 1), 0, VOIDmode
, 0);
7661 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
7665 /* Move the real (op0) and imaginary (op1) parts to their location. */
7666 emit_move_insn (gen_realpart (mode
, target
), op0
);
7667 emit_move_insn (gen_imagpart (mode
, target
), op1
);
7669 insns
= get_insns ();
7672 /* Complex construction should appear as a single unit. */
7673 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
7674 each with a separate pseudo as destination.
7675 It's not correct for flow to treat them as a unit. */
7676 if (GET_CODE (target
) != CONCAT
)
7677 emit_no_conflict_block (insns
, target
, op0
, op1
, NULL_RTX
);
7685 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
7686 return gen_realpart (mode
, op0
);
7689 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
7690 return gen_imagpart (mode
, op0
);
7694 enum machine_mode partmode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
7698 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
7701 target
= gen_reg_rtx (mode
);
7705 /* Store the realpart and the negated imagpart to target. */
7706 emit_move_insn (gen_realpart (partmode
, target
),
7707 gen_realpart (partmode
, op0
));
7709 imag_t
= gen_imagpart (partmode
, target
);
7710 temp
= expand_unop (partmode
, neg_optab
,
7711 gen_imagpart (partmode
, op0
), imag_t
, 0);
7713 emit_move_insn (imag_t
, temp
);
7715 insns
= get_insns ();
7718 /* Conjugate should appear as a single unit
7719 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
7720 each with a separate pseudo as destination.
7721 It's not correct for flow to treat them as a unit. */
7722 if (GET_CODE (target
) != CONCAT
)
7723 emit_no_conflict_block (insns
, target
, op0
, NULL_RTX
, NULL_RTX
);
7730 case TRY_CATCH_EXPR
:
7732 tree handler
= TREE_OPERAND (exp
, 1);
7734 expand_eh_region_start ();
7736 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
7738 expand_eh_region_end (handler
);
7745 rtx dcc
= get_dynamic_cleanup_chain ();
7746 emit_move_insn (dcc
, validize_mem (gen_rtx_MEM (Pmode
, dcc
)));
7752 rtx dhc
= get_dynamic_handler_chain ();
7753 emit_move_insn (dhc
, validize_mem (gen_rtx_MEM (Pmode
, dhc
)));
7758 op0
= CONST0_RTX (tmode
);
7764 return (*lang_expand_expr
) (exp
, original_target
, tmode
, modifier
);
7767 /* Here to do an ordinary binary operator, generating an instruction
7768 from the optab already placed in `this_optab'. */
7770 preexpand_calls (exp
);
7771 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
7773 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7774 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
7776 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
,
7777 unsignedp
, OPTAB_LIB_WIDEN
);
7785 /* Return the alignment in bits of EXP, a pointer valued expression.
7786 But don't return more than MAX_ALIGN no matter what.
7787 The alignment returned is, by default, the alignment of the thing that
7788 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
7790 Otherwise, look at the expression to see if we can do better, i.e., if the
7791 expression is actually pointing at an object whose alignment is tighter. */
7794 get_pointer_alignment (exp
, max_align
)
7798 unsigned align
, inner
;
7800 if (TREE_CODE (TREE_TYPE (exp
)) != POINTER_TYPE
)
7803 align
= TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp
)));
7804 align
= MIN (align
, max_align
);
7808 switch (TREE_CODE (exp
))
7812 case NON_LVALUE_EXPR
:
7813 exp
= TREE_OPERAND (exp
, 0);
7814 if (TREE_CODE (TREE_TYPE (exp
)) != POINTER_TYPE
)
7816 inner
= TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp
)));
7817 align
= MIN (inner
, max_align
);
7821 /* If sum of pointer + int, restrict our maximum alignment to that
7822 imposed by the integer. If not, we can't do any better than
7824 if (TREE_CODE (TREE_OPERAND (exp
, 1)) != INTEGER_CST
)
7827 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)) * BITS_PER_UNIT
)
7832 exp
= TREE_OPERAND (exp
, 0);
7836 /* See what we are pointing at and look at its alignment. */
7837 exp
= TREE_OPERAND (exp
, 0);
7838 if (TREE_CODE (exp
) == FUNCTION_DECL
)
7839 align
= FUNCTION_BOUNDARY
;
7840 else if (TREE_CODE_CLASS (TREE_CODE (exp
)) == 'd')
7841 align
= DECL_ALIGN (exp
);
7842 #ifdef CONSTANT_ALIGNMENT
7843 else if (TREE_CODE_CLASS (TREE_CODE (exp
)) == 'c')
7844 align
= CONSTANT_ALIGNMENT (exp
, align
);
7846 return MIN (align
, max_align
);
7854 /* Return the tree node and offset if a given argument corresponds to
7855 a string constant. */
7858 string_constant (arg
, ptr_offset
)
7864 if (TREE_CODE (arg
) == ADDR_EXPR
7865 && TREE_CODE (TREE_OPERAND (arg
, 0)) == STRING_CST
)
7867 *ptr_offset
= integer_zero_node
;
7868 return TREE_OPERAND (arg
, 0);
7870 else if (TREE_CODE (arg
) == PLUS_EXPR
)
7872 tree arg0
= TREE_OPERAND (arg
, 0);
7873 tree arg1
= TREE_OPERAND (arg
, 1);
7878 if (TREE_CODE (arg0
) == ADDR_EXPR
7879 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == STRING_CST
)
7882 return TREE_OPERAND (arg0
, 0);
7884 else if (TREE_CODE (arg1
) == ADDR_EXPR
7885 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == STRING_CST
)
7888 return TREE_OPERAND (arg1
, 0);
7895 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
7896 way, because it could contain a zero byte in the middle.
7897 TREE_STRING_LENGTH is the size of the character array, not the string.
7899 Unfortunately, string_constant can't access the values of const char
7900 arrays with initializers, so neither can we do so here. */
7910 src
= string_constant (src
, &offset_node
);
7913 max
= TREE_STRING_LENGTH (src
);
7914 ptr
= TREE_STRING_POINTER (src
);
7915 if (offset_node
&& TREE_CODE (offset_node
) != INTEGER_CST
)
7917 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
7918 compute the offset to the following null if we don't know where to
7919 start searching for it. */
7921 for (i
= 0; i
< max
; i
++)
7924 /* We don't know the starting offset, but we do know that the string
7925 has no internal zero bytes. We can assume that the offset falls
7926 within the bounds of the string; otherwise, the programmer deserves
7927 what he gets. Subtract the offset from the length of the string,
7929 /* This would perhaps not be valid if we were dealing with named
7930 arrays in addition to literal string constants. */
7931 return size_binop (MINUS_EXPR
, size_int (max
), offset_node
);
7934 /* We have a known offset into the string. Start searching there for
7935 a null character. */
7936 if (offset_node
== 0)
7940 /* Did we get a long long offset? If so, punt. */
7941 if (TREE_INT_CST_HIGH (offset_node
) != 0)
7943 offset
= TREE_INT_CST_LOW (offset_node
);
7945 /* If the offset is known to be out of bounds, warn, and call strlen at
7947 if (offset
< 0 || offset
> max
)
7949 warning ("offset outside bounds of constant string");
7952 /* Use strlen to search for the first zero byte. Since any strings
7953 constructed with build_string will have nulls appended, we win even
7954 if we get handed something like (char[4])"abcd".
7956 Since OFFSET is our starting index into the string, no further
7957 calculation is needed. */
7958 return size_int (strlen (ptr
+ offset
));
7962 expand_builtin_return_addr (fndecl_code
, count
, tem
)
7963 enum built_in_function fndecl_code
;
7969 /* Some machines need special handling before we can access
7970 arbitrary frames. For example, on the sparc, we must first flush
7971 all register windows to the stack. */
7972 #ifdef SETUP_FRAME_ADDRESSES
7974 SETUP_FRAME_ADDRESSES ();
7977 /* On the sparc, the return address is not in the frame, it is in a
7978 register. There is no way to access it off of the current frame
7979 pointer, but it can be accessed off the previous frame pointer by
7980 reading the value from the register window save area. */
7981 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
7982 if (fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
7986 /* Scan back COUNT frames to the specified frame. */
7987 for (i
= 0; i
< count
; i
++)
7989 /* Assume the dynamic chain pointer is in the word that the
7990 frame address points to, unless otherwise specified. */
7991 #ifdef DYNAMIC_CHAIN_ADDRESS
7992 tem
= DYNAMIC_CHAIN_ADDRESS (tem
);
7994 tem
= memory_address (Pmode
, tem
);
7995 tem
= copy_to_reg (gen_rtx_MEM (Pmode
, tem
));
7998 /* For __builtin_frame_address, return what we've got. */
7999 if (fndecl_code
== BUILT_IN_FRAME_ADDRESS
)
8002 /* For __builtin_return_address, Get the return address from that
8004 #ifdef RETURN_ADDR_RTX
8005 tem
= RETURN_ADDR_RTX (count
, tem
);
8007 tem
= memory_address (Pmode
,
8008 plus_constant (tem
, GET_MODE_SIZE (Pmode
)));
8009 tem
= gen_rtx_MEM (Pmode
, tem
);
8014 /* __builtin_setjmp is passed a pointer to an array of five words (not
8015 all will be used on all machines). It operates similarly to the C
8016 library function of the same name, but is more efficient. Much of
8017 the code below (and for longjmp) is copied from the handling of
8020 NOTE: This is intended for use by GNAT and the exception handling
8021 scheme in the compiler and will only work in the method used by
8025 expand_builtin_setjmp (buf_addr
, target
, first_label
, next_label
)
8028 rtx first_label
, next_label
;
8030 rtx lab1
= gen_label_rtx ();
8031 enum machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
8032 enum machine_mode value_mode
;
8035 value_mode
= TYPE_MODE (integer_type_node
);
8037 #ifdef POINTERS_EXTEND_UNSIGNED
8038 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
8041 buf_addr
= force_reg (Pmode
, buf_addr
);
8043 if (target
== 0 || GET_CODE (target
) != REG
8044 || REGNO (target
) < FIRST_PSEUDO_REGISTER
)
8045 target
= gen_reg_rtx (value_mode
);
8049 /* We store the frame pointer and the address of lab1 in the buffer
8050 and use the rest of it for the stack save area, which is
8051 machine-dependent. */
8053 #ifndef BUILTIN_SETJMP_FRAME_VALUE
8054 #define BUILTIN_SETJMP_FRAME_VALUE virtual_stack_vars_rtx
8057 emit_move_insn (gen_rtx_MEM (Pmode
, buf_addr
),
8058 BUILTIN_SETJMP_FRAME_VALUE
);
8059 emit_move_insn (validize_mem
8060 (gen_rtx_MEM (Pmode
,
8061 plus_constant (buf_addr
,
8062 GET_MODE_SIZE (Pmode
)))),
8063 gen_rtx_LABEL_REF (Pmode
, lab1
));
8065 stack_save
= gen_rtx_MEM (sa_mode
,
8066 plus_constant (buf_addr
,
8067 2 * GET_MODE_SIZE (Pmode
)));
8068 emit_stack_save (SAVE_NONLOCAL
, &stack_save
, NULL_RTX
);
8070 /* If there is further processing to do, do it. */
8071 #ifdef HAVE_builtin_setjmp_setup
8072 if (HAVE_builtin_setjmp_setup
)
8073 emit_insn (gen_builtin_setjmp_setup (buf_addr
));
8076 /* Set TARGET to zero and branch to the first-time-through label. */
8077 emit_move_insn (target
, const0_rtx
);
8078 emit_jump_insn (gen_jump (first_label
));
8082 /* Tell flow about the strange goings on. */
8083 current_function_has_nonlocal_label
= 1;
8085 /* Clobber the FP when we get here, so we have to make sure it's
8086 marked as used by this function. */
8087 emit_insn (gen_rtx_USE (VOIDmode
, hard_frame_pointer_rtx
));
8089 /* Mark the static chain as clobbered here so life information
8090 doesn't get messed up for it. */
8091 emit_insn (gen_rtx_CLOBBER (VOIDmode
, static_chain_rtx
));
8093 /* Now put in the code to restore the frame pointer, and argument
8094 pointer, if needed. The code below is from expand_end_bindings
8095 in stmt.c; see detailed documentation there. */
8096 #ifdef HAVE_nonlocal_goto
8097 if (! HAVE_nonlocal_goto
)
8099 emit_move_insn (virtual_stack_vars_rtx
, hard_frame_pointer_rtx
);
8101 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
8102 if (fixed_regs
[ARG_POINTER_REGNUM
])
8104 #ifdef ELIMINABLE_REGS
8106 static struct elims
{int from
, to
;} elim_regs
[] = ELIMINABLE_REGS
;
8108 for (i
= 0; i
< sizeof elim_regs
/ sizeof elim_regs
[0]; i
++)
8109 if (elim_regs
[i
].from
== ARG_POINTER_REGNUM
8110 && elim_regs
[i
].to
== HARD_FRAME_POINTER_REGNUM
)
8113 if (i
== sizeof elim_regs
/ sizeof elim_regs
[0])
8116 /* Now restore our arg pointer from the address at which it
8117 was saved in our stack frame.
8118 If there hasn't be space allocated for it yet, make
8120 if (arg_pointer_save_area
== 0)
8121 arg_pointer_save_area
8122 = assign_stack_local (Pmode
, GET_MODE_SIZE (Pmode
), 0);
8123 emit_move_insn (virtual_incoming_args_rtx
,
8124 copy_to_reg (arg_pointer_save_area
));
8129 #ifdef HAVE_builtin_setjmp_receiver
8130 if (HAVE_builtin_setjmp_receiver
)
8131 emit_insn (gen_builtin_setjmp_receiver (lab1
));
8134 #ifdef HAVE_nonlocal_goto_receiver
8135 if (HAVE_nonlocal_goto_receiver
)
8136 emit_insn (gen_nonlocal_goto_receiver ());
8143 /* Set TARGET, and branch to the next-time-through label. */
8144 emit_move_insn (target
, const1_rtx
);
8145 emit_jump_insn (gen_jump (next_label
));
8152 expand_builtin_longjmp (buf_addr
, value
)
8153 rtx buf_addr
, value
;
8156 enum machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
8158 #ifdef POINTERS_EXTEND_UNSIGNED
8159 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
8161 buf_addr
= force_reg (Pmode
, buf_addr
);
8163 /* We used to store value in static_chain_rtx, but that fails if pointers
8164 are smaller than integers. We instead require that the user must pass
8165 a second argument of 1, because that is what builtin_setjmp will
8166 return. This also makes EH slightly more efficient, since we are no
8167 longer copying around a value that we don't care about. */
8168 if (value
!= const1_rtx
)
8171 #ifdef HAVE_builtin_longjmp
8172 if (HAVE_builtin_longjmp
)
8173 emit_insn (gen_builtin_longjmp (buf_addr
));
8177 fp
= gen_rtx_MEM (Pmode
, buf_addr
);
8178 lab
= gen_rtx_MEM (Pmode
, plus_constant (buf_addr
,
8179 GET_MODE_SIZE (Pmode
)));
8181 stack
= gen_rtx_MEM (sa_mode
, plus_constant (buf_addr
,
8182 2 * GET_MODE_SIZE (Pmode
)));
8184 /* Pick up FP, label, and SP from the block and jump. This code is
8185 from expand_goto in stmt.c; see there for detailed comments. */
8186 #if HAVE_nonlocal_goto
8187 if (HAVE_nonlocal_goto
)
8188 /* We have to pass a value to the nonlocal_goto pattern that will
8189 get copied into the static_chain pointer, but it does not matter
8190 what that value is, because builtin_setjmp does not use it. */
8191 emit_insn (gen_nonlocal_goto (value
, fp
, stack
, lab
));
8195 lab
= copy_to_reg (lab
);
8197 emit_move_insn (hard_frame_pointer_rtx
, fp
);
8198 emit_stack_restore (SAVE_NONLOCAL
, stack
, NULL_RTX
);
8200 emit_insn (gen_rtx_USE (VOIDmode
, hard_frame_pointer_rtx
));
8201 emit_insn (gen_rtx_USE (VOIDmode
, stack_pointer_rtx
));
8202 emit_indirect_jump (lab
);
8208 get_memory_rtx (exp
)
8214 mem
= gen_rtx_MEM (BLKmode
,
8215 memory_address (BLKmode
,
8216 expand_expr (exp
, NULL_RTX
,
8217 ptr_mode
, EXPAND_SUM
)));
8219 RTX_UNCHANGING_P (mem
) = TREE_READONLY (exp
);
8221 /* Figure out the type of the object pointed to. Set MEM_IN_STRUCT_P
8222 if the value is the address of a structure or if the expression is
8223 cast to a pointer to structure type. */
8226 while (TREE_CODE (exp
) == NOP_EXPR
)
8228 tree cast_type
= TREE_TYPE (exp
);
8229 if (TREE_CODE (cast_type
) == POINTER_TYPE
8230 && AGGREGATE_TYPE_P (TREE_TYPE (cast_type
)))
8235 exp
= TREE_OPERAND (exp
, 0);
8238 if (is_aggregate
== 0)
8242 if (TREE_CODE (exp
) == ADDR_EXPR
)
8243 /* If this is the address of an object, check whether the
8244 object is an array. */
8245 type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
8247 type
= TREE_TYPE (TREE_TYPE (exp
));
8248 is_aggregate
= AGGREGATE_TYPE_P (type
);
8251 MEM_IN_STRUCT_P (mem
) = is_aggregate
;
8256 /* Expand an expression EXP that calls a built-in function,
8257 with result going to TARGET if that's convenient
8258 (and in mode MODE if that's convenient).
8259 SUBTARGET may be used as the target for computing one of EXP's operands.
8260 IGNORE is nonzero if the value is to be ignored. */
8262 #define CALLED_AS_BUILT_IN(NODE) \
8263 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
8266 expand_builtin (exp
, target
, subtarget
, mode
, ignore
)
8270 enum machine_mode mode
;
8273 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
8274 tree arglist
= TREE_OPERAND (exp
, 1);
8277 enum machine_mode value_mode
= TYPE_MODE (TREE_TYPE (exp
));
8278 optab builtin_optab
;
8280 switch (DECL_FUNCTION_CODE (fndecl
))
8285 /* build_function_call changes these into ABS_EXPR. */
8290 /* Treat these like sqrt, but only if the user asks for them. */
8291 if (! flag_fast_math
)
8293 case BUILT_IN_FSQRT
:
8294 /* If not optimizing, call the library function. */
8299 /* Arg could be wrong type if user redeclared this fcn wrong. */
8300 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != REAL_TYPE
)
8303 /* Stabilize and compute the argument. */
8304 if (TREE_CODE (TREE_VALUE (arglist
)) != VAR_DECL
8305 && TREE_CODE (TREE_VALUE (arglist
)) != PARM_DECL
)
8307 exp
= copy_node (exp
);
8308 arglist
= copy_node (arglist
);
8309 TREE_OPERAND (exp
, 1) = arglist
;
8310 TREE_VALUE (arglist
) = save_expr (TREE_VALUE (arglist
));
8312 op0
= expand_expr (TREE_VALUE (arglist
), subtarget
, VOIDmode
, 0);
8314 /* Make a suitable register to place result in. */
8315 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
8320 switch (DECL_FUNCTION_CODE (fndecl
))
8323 builtin_optab
= sin_optab
; break;
8325 builtin_optab
= cos_optab
; break;
8326 case BUILT_IN_FSQRT
:
8327 builtin_optab
= sqrt_optab
; break;
8332 /* Compute into TARGET.
8333 Set TARGET to wherever the result comes back. */
8334 target
= expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist
))),
8335 builtin_optab
, op0
, target
, 0);
8337 /* If we were unable to expand via the builtin, stop the
8338 sequence (without outputting the insns) and break, causing
8339 a call to the library function. */
8346 /* Check the results by default. But if flag_fast_math is turned on,
8347 then assume sqrt will always be called with valid arguments. */
8349 if (! flag_fast_math
)
8351 /* Don't define the builtin FP instructions
8352 if your machine is not IEEE. */
8353 if (TARGET_FLOAT_FORMAT
!= IEEE_FLOAT_FORMAT
)
8356 lab1
= gen_label_rtx ();
8358 /* Test the result; if it is NaN, set errno=EDOM because
8359 the argument was not in the domain. */
8360 emit_cmp_insn (target
, target
, EQ
, 0, GET_MODE (target
), 0, 0);
8361 emit_jump_insn (gen_beq (lab1
));
8365 #ifdef GEN_ERRNO_RTX
8366 rtx errno_rtx
= GEN_ERRNO_RTX
;
8369 = gen_rtx_MEM (word_mode
, gen_rtx_SYMBOL_REF (Pmode
, "errno"));
8372 emit_move_insn (errno_rtx
, GEN_INT (TARGET_EDOM
));
8375 /* We can't set errno=EDOM directly; let the library call do it.
8376 Pop the arguments right away in case the call gets deleted. */
8378 expand_call (exp
, target
, 0);
8385 /* Output the entire sequence. */
8386 insns
= get_insns ();
8395 /* __builtin_apply_args returns block of memory allocated on
8396 the stack into which is stored the arg pointer, structure
8397 value address, static chain, and all the registers that might
8398 possibly be used in performing a function call. The code is
8399 moved to the start of the function so the incoming values are
8401 case BUILT_IN_APPLY_ARGS
:
8402 /* Don't do __builtin_apply_args more than once in a function.
8403 Save the result of the first call and reuse it. */
8404 if (apply_args_value
!= 0)
8405 return apply_args_value
;
8407 /* When this function is called, it means that registers must be
8408 saved on entry to this function. So we migrate the
8409 call to the first insn of this function. */
8414 temp
= expand_builtin_apply_args ();
8418 apply_args_value
= temp
;
8420 /* Put the sequence after the NOTE that starts the function.
8421 If this is inside a SEQUENCE, make the outer-level insn
8422 chain current, so the code is placed at the start of the
8424 push_topmost_sequence ();
8425 emit_insns_before (seq
, NEXT_INSN (get_insns ()));
8426 pop_topmost_sequence ();
8430 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
8431 FUNCTION with a copy of the parameters described by
8432 ARGUMENTS, and ARGSIZE. It returns a block of memory
8433 allocated on the stack into which is stored all the registers
8434 that might possibly be used for returning the result of a
8435 function. ARGUMENTS is the value returned by
8436 __builtin_apply_args. ARGSIZE is the number of bytes of
8437 arguments that must be copied. ??? How should this value be
8438 computed? We'll also need a safe worst case value for varargs
8440 case BUILT_IN_APPLY
:
8442 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8443 || ! POINTER_TYPE_P (TREE_TYPE (TREE_VALUE (arglist
)))
8444 || TREE_CHAIN (arglist
) == 0
8445 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist
)))) != POINTER_TYPE
8446 || TREE_CHAIN (TREE_CHAIN (arglist
)) == 0
8447 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
))))) != INTEGER_TYPE
)
8455 for (t
= arglist
, i
= 0; t
; t
= TREE_CHAIN (t
), i
++)
8456 ops
[i
] = expand_expr (TREE_VALUE (t
), NULL_RTX
, VOIDmode
, 0);
8458 return expand_builtin_apply (ops
[0], ops
[1], ops
[2]);
8461 /* __builtin_return (RESULT) causes the function to return the
8462 value described by RESULT. RESULT is address of the block of
8463 memory returned by __builtin_apply. */
8464 case BUILT_IN_RETURN
:
8466 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8467 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) == POINTER_TYPE
)
8468 expand_builtin_return (expand_expr (TREE_VALUE (arglist
),
8469 NULL_RTX
, VOIDmode
, 0));
8472 case BUILT_IN_SAVEREGS
:
8473 /* Don't do __builtin_saveregs more than once in a function.
8474 Save the result of the first call and reuse it. */
8475 if (saveregs_value
!= 0)
8476 return saveregs_value
;
8478 /* When this function is called, it means that registers must be
8479 saved on entry to this function. So we migrate the
8480 call to the first insn of this function. */
8484 /* Now really call the function. `expand_call' does not call
8485 expand_builtin, so there is no danger of infinite recursion here. */
8488 #ifdef EXPAND_BUILTIN_SAVEREGS
8489 /* Do whatever the machine needs done in this case. */
8490 temp
= EXPAND_BUILTIN_SAVEREGS (arglist
);
8492 /* The register where the function returns its value
8493 is likely to have something else in it, such as an argument.
8494 So preserve that register around the call. */
8496 if (value_mode
!= VOIDmode
)
8498 rtx valreg
= hard_libcall_value (value_mode
);
8499 rtx saved_valreg
= gen_reg_rtx (value_mode
);
8501 emit_move_insn (saved_valreg
, valreg
);
8502 temp
= expand_call (exp
, target
, ignore
);
8503 emit_move_insn (valreg
, saved_valreg
);
8506 /* Generate the call, putting the value in a pseudo. */
8507 temp
= expand_call (exp
, target
, ignore
);
8513 saveregs_value
= temp
;
8515 /* Put the sequence after the NOTE that starts the function.
8516 If this is inside a SEQUENCE, make the outer-level insn
8517 chain current, so the code is placed at the start of the
8519 push_topmost_sequence ();
8520 emit_insns_before (seq
, NEXT_INSN (get_insns ()));
8521 pop_topmost_sequence ();
8525 /* __builtin_args_info (N) returns word N of the arg space info
8526 for the current function. The number and meanings of words
8527 is controlled by the definition of CUMULATIVE_ARGS. */
8528 case BUILT_IN_ARGS_INFO
:
8530 int nwords
= sizeof (CUMULATIVE_ARGS
) / sizeof (int);
8531 int *word_ptr
= (int *) ¤t_function_args_info
;
8533 /* These are used by the code below that is if 0'ed away */
8535 tree type
, elts
, result
;
8538 if (sizeof (CUMULATIVE_ARGS
) % sizeof (int) != 0)
8539 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
8540 __FILE__
, __LINE__
);
8544 tree arg
= TREE_VALUE (arglist
);
8545 if (TREE_CODE (arg
) != INTEGER_CST
)
8546 error ("argument of `__builtin_args_info' must be constant");
8549 int wordnum
= TREE_INT_CST_LOW (arg
);
8551 if (wordnum
< 0 || wordnum
>= nwords
|| TREE_INT_CST_HIGH (arg
))
8552 error ("argument of `__builtin_args_info' out of range");
8554 return GEN_INT (word_ptr
[wordnum
]);
8558 error ("missing argument in `__builtin_args_info'");
8563 for (i
= 0; i
< nwords
; i
++)
8564 elts
= tree_cons (NULL_TREE
, build_int_2 (word_ptr
[i
], 0));
8566 type
= build_array_type (integer_type_node
,
8567 build_index_type (build_int_2 (nwords
, 0)));
8568 result
= build (CONSTRUCTOR
, type
, NULL_TREE
, nreverse (elts
));
8569 TREE_CONSTANT (result
) = 1;
8570 TREE_STATIC (result
) = 1;
8571 result
= build (INDIRECT_REF
, build_pointer_type (type
), result
);
8572 TREE_CONSTANT (result
) = 1;
8573 return expand_expr (result
, NULL_RTX
, VOIDmode
, EXPAND_MEMORY_USE_BAD
);
8577 /* Return the address of the first anonymous stack arg. */
8578 case BUILT_IN_NEXT_ARG
:
8580 tree fntype
= TREE_TYPE (current_function_decl
);
8582 if ((TYPE_ARG_TYPES (fntype
) == 0
8583 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype
)))
8585 && ! current_function_varargs
)
8587 error ("`va_start' used in function with fixed args");
8593 tree last_parm
= tree_last (DECL_ARGUMENTS (current_function_decl
));
8594 tree arg
= TREE_VALUE (arglist
);
8596 /* Strip off all nops for the sake of the comparison. This
8597 is not quite the same as STRIP_NOPS. It does more.
8598 We must also strip off INDIRECT_EXPR for C++ reference
8600 while (TREE_CODE (arg
) == NOP_EXPR
8601 || TREE_CODE (arg
) == CONVERT_EXPR
8602 || TREE_CODE (arg
) == NON_LVALUE_EXPR
8603 || TREE_CODE (arg
) == INDIRECT_REF
)
8604 arg
= TREE_OPERAND (arg
, 0);
8605 if (arg
!= last_parm
)
8606 warning ("second parameter of `va_start' not last named argument");
8608 else if (! current_function_varargs
)
8609 /* Evidently an out of date version of <stdarg.h>; can't validate
8610 va_start's second argument, but can still work as intended. */
8611 warning ("`__builtin_next_arg' called without an argument");
8614 return expand_binop (Pmode
, add_optab
,
8615 current_function_internal_arg_pointer
,
8616 current_function_arg_offset_rtx
,
8617 NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
8619 case BUILT_IN_CLASSIFY_TYPE
:
8622 tree type
= TREE_TYPE (TREE_VALUE (arglist
));
8623 enum tree_code code
= TREE_CODE (type
);
8624 if (code
== VOID_TYPE
)
8625 return GEN_INT (void_type_class
);
8626 if (code
== INTEGER_TYPE
)
8627 return GEN_INT (integer_type_class
);
8628 if (code
== CHAR_TYPE
)
8629 return GEN_INT (char_type_class
);
8630 if (code
== ENUMERAL_TYPE
)
8631 return GEN_INT (enumeral_type_class
);
8632 if (code
== BOOLEAN_TYPE
)
8633 return GEN_INT (boolean_type_class
);
8634 if (code
== POINTER_TYPE
)
8635 return GEN_INT (pointer_type_class
);
8636 if (code
== REFERENCE_TYPE
)
8637 return GEN_INT (reference_type_class
);
8638 if (code
== OFFSET_TYPE
)
8639 return GEN_INT (offset_type_class
);
8640 if (code
== REAL_TYPE
)
8641 return GEN_INT (real_type_class
);
8642 if (code
== COMPLEX_TYPE
)
8643 return GEN_INT (complex_type_class
);
8644 if (code
== FUNCTION_TYPE
)
8645 return GEN_INT (function_type_class
);
8646 if (code
== METHOD_TYPE
)
8647 return GEN_INT (method_type_class
);
8648 if (code
== RECORD_TYPE
)
8649 return GEN_INT (record_type_class
);
8650 if (code
== UNION_TYPE
|| code
== QUAL_UNION_TYPE
)
8651 return GEN_INT (union_type_class
);
8652 if (code
== ARRAY_TYPE
)
8654 if (TYPE_STRING_FLAG (type
))
8655 return GEN_INT (string_type_class
);
8657 return GEN_INT (array_type_class
);
8659 if (code
== SET_TYPE
)
8660 return GEN_INT (set_type_class
);
8661 if (code
== FILE_TYPE
)
8662 return GEN_INT (file_type_class
);
8663 if (code
== LANG_TYPE
)
8664 return GEN_INT (lang_type_class
);
8666 return GEN_INT (no_type_class
);
8668 case BUILT_IN_CONSTANT_P
:
8673 tree arg
= TREE_VALUE (arglist
);
8676 if (really_constant_p (arg
)
8677 || (TREE_CODE (arg
) == ADDR_EXPR
8678 && TREE_CODE (TREE_OPERAND (arg
, 0)) == STRING_CST
))
8681 /* Only emit CONSTANT_P_RTX if CSE will be run.
8682 Moreover, we don't want to expand trees that have side effects,
8683 as the original __builtin_constant_p did not evaluate its
8684 argument at all, and we would break existing usage by changing
8685 this. This quirk was generally useful, eliminating a bit of hair
8686 in the writing of the macros that use this function. Now the
8687 same thing can be better accomplished in an inline function. */
8689 if (! cse_not_expected
&& ! TREE_SIDE_EFFECTS (arg
))
8691 /* Lazy fixup of old code: issue a warning and fail the test. */
8692 if (! can_handle_constant_p
)
8694 warning ("Delayed evaluation of __builtin_constant_p not supported on this target.");
8695 warning ("Please report this as a bug to egcs-bugs@cygnus.com.");
8698 return gen_rtx_CONSTANT_P_RTX (TYPE_MODE (integer_type_node
),
8699 expand_expr (arg
, NULL_RTX
,
8706 case BUILT_IN_FRAME_ADDRESS
:
8707 /* The argument must be a nonnegative integer constant.
8708 It counts the number of frames to scan up the stack.
8709 The value is the address of that frame. */
8710 case BUILT_IN_RETURN_ADDRESS
:
8711 /* The argument must be a nonnegative integer constant.
8712 It counts the number of frames to scan up the stack.
8713 The value is the return address saved in that frame. */
8715 /* Warning about missing arg was already issued. */
8717 else if (TREE_CODE (TREE_VALUE (arglist
)) != INTEGER_CST
8718 || tree_int_cst_sgn (TREE_VALUE (arglist
)) < 0)
8720 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
8721 error ("invalid arg to `__builtin_frame_address'");
8723 error ("invalid arg to `__builtin_return_address'");
8728 rtx tem
= expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl
),
8729 TREE_INT_CST_LOW (TREE_VALUE (arglist
)),
8730 hard_frame_pointer_rtx
);
8732 /* Some ports cannot access arbitrary stack frames. */
8735 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
8736 warning ("unsupported arg to `__builtin_frame_address'");
8738 warning ("unsupported arg to `__builtin_return_address'");
8742 /* For __builtin_frame_address, return what we've got. */
8743 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
8746 if (GET_CODE (tem
) != REG
)
8747 tem
= copy_to_reg (tem
);
8751 /* Returns the address of the area where the structure is returned.
8753 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS
:
8755 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl
)))
8756 || GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl
))) != MEM
)
8759 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl
)), 0);
8761 case BUILT_IN_ALLOCA
:
8763 /* Arg could be non-integer if user redeclared this fcn wrong. */
8764 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != INTEGER_TYPE
)
8767 /* Compute the argument. */
8768 op0
= expand_expr (TREE_VALUE (arglist
), NULL_RTX
, VOIDmode
, 0);
8770 /* Allocate the desired space. */
8771 return allocate_dynamic_stack_space (op0
, target
, BITS_PER_UNIT
);
8774 /* If not optimizing, call the library function. */
8775 if (!optimize
&& ! CALLED_AS_BUILT_IN (fndecl
))
8779 /* Arg could be non-integer if user redeclared this fcn wrong. */
8780 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != INTEGER_TYPE
)
8783 /* Compute the argument. */
8784 op0
= expand_expr (TREE_VALUE (arglist
), subtarget
, VOIDmode
, 0);
8785 /* Compute ffs, into TARGET if possible.
8786 Set TARGET to wherever the result comes back. */
8787 target
= expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist
))),
8788 ffs_optab
, op0
, target
, 1);
8793 case BUILT_IN_STRLEN
:
8794 /* If not optimizing, call the library function. */
8795 if (!optimize
&& ! CALLED_AS_BUILT_IN (fndecl
))
8799 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8800 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
)
8804 tree src
= TREE_VALUE (arglist
);
8805 tree len
= c_strlen (src
);
8808 = get_pointer_alignment (src
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
8810 rtx result
, src_rtx
, char_rtx
;
8811 enum machine_mode insn_mode
= value_mode
, char_mode
;
8812 enum insn_code icode
;
8814 /* If the length is known, just return it. */
8816 return expand_expr (len
, target
, mode
, EXPAND_MEMORY_USE_BAD
);
8818 /* If SRC is not a pointer type, don't do this operation inline. */
8822 /* Call a function if we can't compute strlen in the right mode. */
8824 while (insn_mode
!= VOIDmode
)
8826 icode
= strlen_optab
->handlers
[(int) insn_mode
].insn_code
;
8827 if (icode
!= CODE_FOR_nothing
)
8830 insn_mode
= GET_MODE_WIDER_MODE (insn_mode
);
8832 if (insn_mode
== VOIDmode
)
8835 /* Make a place to write the result of the instruction. */
8838 && GET_CODE (result
) == REG
8839 && GET_MODE (result
) == insn_mode
8840 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
8841 result
= gen_reg_rtx (insn_mode
);
8843 /* Make sure the operands are acceptable to the predicates. */
8845 if (! (*insn_operand_predicate
[(int)icode
][0]) (result
, insn_mode
))
8846 result
= gen_reg_rtx (insn_mode
);
8847 src_rtx
= memory_address (BLKmode
,
8848 expand_expr (src
, NULL_RTX
, ptr_mode
,
8851 if (! (*insn_operand_predicate
[(int)icode
][1]) (src_rtx
, Pmode
))
8852 src_rtx
= copy_to_mode_reg (Pmode
, src_rtx
);
8854 /* Check the string is readable and has an end. */
8855 if (flag_check_memory_usage
)
8856 emit_library_call (chkr_check_str_libfunc
, 1, VOIDmode
, 2,
8858 GEN_INT (MEMORY_USE_RO
),
8859 TYPE_MODE (integer_type_node
));
8861 char_rtx
= const0_rtx
;
8862 char_mode
= insn_operand_mode
[(int)icode
][2];
8863 if (! (*insn_operand_predicate
[(int)icode
][2]) (char_rtx
, char_mode
))
8864 char_rtx
= copy_to_mode_reg (char_mode
, char_rtx
);
8866 emit_insn (GEN_FCN (icode
) (result
,
8867 gen_rtx_MEM (BLKmode
, src_rtx
),
8868 char_rtx
, GEN_INT (align
)));
8870 /* Return the value in the proper mode for this function. */
8871 if (GET_MODE (result
) == value_mode
)
8873 else if (target
!= 0)
8875 convert_move (target
, result
, 0);
8879 return convert_to_mode (value_mode
, result
, 0);
8882 case BUILT_IN_STRCPY
:
8883 /* If not optimizing, call the library function. */
8884 if (!optimize
&& ! CALLED_AS_BUILT_IN (fndecl
))
8888 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8889 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
8890 || TREE_CHAIN (arglist
) == 0
8891 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist
)))) != POINTER_TYPE
)
8895 tree len
= c_strlen (TREE_VALUE (TREE_CHAIN (arglist
)));
8900 len
= size_binop (PLUS_EXPR
, len
, integer_one_node
);
8902 chainon (arglist
, build_tree_list (NULL_TREE
, len
));
8906 case BUILT_IN_MEMCPY
:
8907 /* If not optimizing, call the library function. */
8908 if (!optimize
&& ! CALLED_AS_BUILT_IN (fndecl
))
8912 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8913 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
8914 || TREE_CHAIN (arglist
) == 0
8915 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist
))))
8917 || TREE_CHAIN (TREE_CHAIN (arglist
)) == 0
8918 || (TREE_CODE (TREE_TYPE (TREE_VALUE
8919 (TREE_CHAIN (TREE_CHAIN (arglist
)))))
8924 tree dest
= TREE_VALUE (arglist
);
8925 tree src
= TREE_VALUE (TREE_CHAIN (arglist
));
8926 tree len
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
8929 = get_pointer_alignment (src
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
8931 = get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
8932 rtx dest_mem
, src_mem
, dest_addr
, len_rtx
;
8934 /* If either SRC or DEST is not a pointer type, don't do
8935 this operation in-line. */
8936 if (src_align
== 0 || dest_align
== 0)
8938 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRCPY
)
8939 TREE_CHAIN (TREE_CHAIN (arglist
)) = 0;
8943 dest_mem
= get_memory_rtx (dest
);
8944 src_mem
= get_memory_rtx (src
);
8945 len_rtx
= expand_expr (len
, NULL_RTX
, VOIDmode
, 0);
8947 /* Just copy the rights of SRC to the rights of DEST. */
8948 if (flag_check_memory_usage
)
8949 emit_library_call (chkr_copy_bitmap_libfunc
, 1, VOIDmode
, 3,
8950 XEXP (dest_mem
, 0), ptr_mode
,
8951 XEXP (src_mem
, 0), ptr_mode
,
8952 len_rtx
, TYPE_MODE (sizetype
));
8954 /* Copy word part most expediently. */
8956 = emit_block_move (dest_mem
, src_mem
, len_rtx
,
8957 MIN (src_align
, dest_align
));
8960 dest_addr
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
8965 case BUILT_IN_MEMSET
:
8966 /* If not optimizing, call the library function. */
8967 if (!optimize
&& ! CALLED_AS_BUILT_IN (fndecl
))
8971 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8972 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
8973 || TREE_CHAIN (arglist
) == 0
8974 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist
))))
8976 || TREE_CHAIN (TREE_CHAIN (arglist
)) == 0
8978 != (TREE_CODE (TREE_TYPE
8980 (TREE_CHAIN (TREE_CHAIN (arglist
))))))))
8984 tree dest
= TREE_VALUE (arglist
);
8985 tree val
= TREE_VALUE (TREE_CHAIN (arglist
));
8986 tree len
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
8989 = get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
8990 rtx dest_mem
, dest_addr
, len_rtx
;
8992 /* If DEST is not a pointer type, don't do this
8993 operation in-line. */
8994 if (dest_align
== 0)
8997 /* If the arguments have side-effects, then we can only evaluate
8998 them at most once. The following code evaluates them twice if
8999 they are not constants because we break out to expand_call
9000 in that case. They can't be constants if they have side-effects
9001 so we can check for that first. Alternatively, we could call
9002 save_expr to make multiple evaluation safe. */
9003 if (TREE_SIDE_EFFECTS (val
) || TREE_SIDE_EFFECTS (len
))
9006 /* If VAL is not 0, don't do this operation in-line. */
9007 if (expand_expr (val
, NULL_RTX
, VOIDmode
, 0) != const0_rtx
)
9010 /* If LEN does not expand to a constant, don't do this
9011 operation in-line. */
9012 len_rtx
= expand_expr (len
, NULL_RTX
, VOIDmode
, 0);
9013 if (GET_CODE (len_rtx
) != CONST_INT
)
9016 dest_mem
= get_memory_rtx (dest
);
9018 /* Just check DST is writable and mark it as readable. */
9019 if (flag_check_memory_usage
)
9020 emit_library_call (chkr_check_addr_libfunc
, 1, VOIDmode
, 3,
9021 XEXP (dest_mem
, 0), ptr_mode
,
9022 len_rtx
, TYPE_MODE (sizetype
),
9023 GEN_INT (MEMORY_USE_WO
),
9024 TYPE_MODE (integer_type_node
));
9027 dest_addr
= clear_storage (dest_mem
, len_rtx
, dest_align
);
9030 dest_addr
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
9035 /* These comparison functions need an instruction that returns an actual
9036 index. An ordinary compare that just sets the condition codes
9038 #ifdef HAVE_cmpstrsi
9039 case BUILT_IN_STRCMP
:
9040 /* If not optimizing, call the library function. */
9041 if (!optimize
&& ! CALLED_AS_BUILT_IN (fndecl
))
9044 /* If we need to check memory accesses, call the library function. */
9045 if (flag_check_memory_usage
)
9049 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9050 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
9051 || TREE_CHAIN (arglist
) == 0
9052 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist
)))) != POINTER_TYPE
)
9054 else if (!HAVE_cmpstrsi
)
9057 tree arg1
= TREE_VALUE (arglist
);
9058 tree arg2
= TREE_VALUE (TREE_CHAIN (arglist
));
9061 len
= c_strlen (arg1
);
9063 len
= size_binop (PLUS_EXPR
, integer_one_node
, len
);
9064 len2
= c_strlen (arg2
);
9066 len2
= size_binop (PLUS_EXPR
, integer_one_node
, len2
);
9068 /* If we don't have a constant length for the first, use the length
9069 of the second, if we know it. We don't require a constant for
9070 this case; some cost analysis could be done if both are available
9071 but neither is constant. For now, assume they're equally cheap.
9073 If both strings have constant lengths, use the smaller. This
9074 could arise if optimization results in strcpy being called with
9075 two fixed strings, or if the code was machine-generated. We should
9076 add some code to the `memcmp' handler below to deal with such
9077 situations, someday. */
9078 if (!len
|| TREE_CODE (len
) != INTEGER_CST
)
9085 else if (len2
&& TREE_CODE (len2
) == INTEGER_CST
)
9087 if (tree_int_cst_lt (len2
, len
))
9091 chainon (arglist
, build_tree_list (NULL_TREE
, len
));
9095 case BUILT_IN_MEMCMP
:
9096 /* If not optimizing, call the library function. */
9097 if (!optimize
&& ! CALLED_AS_BUILT_IN (fndecl
))
9100 /* If we need to check memory accesses, call the library function. */
9101 if (flag_check_memory_usage
)
9105 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9106 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
9107 || TREE_CHAIN (arglist
) == 0
9108 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist
)))) != POINTER_TYPE
9109 || TREE_CHAIN (TREE_CHAIN (arglist
)) == 0
9110 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
))))) != INTEGER_TYPE
)
9112 else if (!HAVE_cmpstrsi
)
9115 tree arg1
= TREE_VALUE (arglist
);
9116 tree arg2
= TREE_VALUE (TREE_CHAIN (arglist
));
9117 tree len
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
9121 = get_pointer_alignment (arg1
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
9123 = get_pointer_alignment (arg2
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
9124 enum machine_mode insn_mode
9125 = insn_operand_mode
[(int) CODE_FOR_cmpstrsi
][0];
9127 /* If we don't have POINTER_TYPE, call the function. */
9128 if (arg1_align
== 0 || arg2_align
== 0)
9130 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRCMP
)
9131 TREE_CHAIN (TREE_CHAIN (arglist
)) = 0;
9135 /* Make a place to write the result of the instruction. */
9138 && GET_CODE (result
) == REG
&& GET_MODE (result
) == insn_mode
9139 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
9140 result
= gen_reg_rtx (insn_mode
);
9142 emit_insn (gen_cmpstrsi (result
, get_memory_rtx (arg1
),
9143 get_memory_rtx (arg2
),
9144 expand_expr (len
, NULL_RTX
, VOIDmode
, 0),
9145 GEN_INT (MIN (arg1_align
, arg2_align
))));
9147 /* Return the value in the proper mode for this function. */
9148 mode
= TYPE_MODE (TREE_TYPE (exp
));
9149 if (GET_MODE (result
) == mode
)
9151 else if (target
!= 0)
9153 convert_move (target
, result
, 0);
9157 return convert_to_mode (mode
, result
, 0);
9160 case BUILT_IN_STRCMP
:
9161 case BUILT_IN_MEMCMP
:
9165 case BUILT_IN_SETJMP
:
9167 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
)
9171 rtx buf_addr
= expand_expr (TREE_VALUE (arglist
), subtarget
,
9173 rtx lab
= gen_label_rtx ();
9174 rtx ret
= expand_builtin_setjmp (buf_addr
, target
, lab
, lab
);
9179 /* __builtin_longjmp is passed a pointer to an array of five words.
9180 It's similar to the C library longjmp function but works with
9181 __builtin_setjmp above. */
9182 case BUILT_IN_LONGJMP
:
9183 if (arglist
== 0 || TREE_CHAIN (arglist
) == 0
9184 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
)
9188 rtx buf_addr
= expand_expr (TREE_VALUE (arglist
), subtarget
,
9190 rtx value
= expand_expr (TREE_VALUE (TREE_CHAIN (arglist
)),
9191 NULL_RTX
, VOIDmode
, 0);
9193 if (value
!= const1_rtx
)
9195 error ("__builtin_longjmp second argument must be 1");
9199 expand_builtin_longjmp (buf_addr
, value
);
9206 emit_insn (gen_trap ());
9209 error ("__builtin_trap not supported by this target");
9213 /* Various hooks for the DWARF 2 __throw routine. */
9214 case BUILT_IN_UNWIND_INIT
:
9215 expand_builtin_unwind_init ();
9218 return frame_pointer_rtx
;
9220 return stack_pointer_rtx
;
9221 #ifdef DWARF2_UNWIND_INFO
9222 case BUILT_IN_DWARF_FP_REGNUM
:
9223 return expand_builtin_dwarf_fp_regnum ();
9224 case BUILT_IN_DWARF_REG_SIZE
:
9225 return expand_builtin_dwarf_reg_size (TREE_VALUE (arglist
), target
);
9227 case BUILT_IN_FROB_RETURN_ADDR
:
9228 return expand_builtin_frob_return_addr (TREE_VALUE (arglist
));
9229 case BUILT_IN_EXTRACT_RETURN_ADDR
:
9230 return expand_builtin_extract_return_addr (TREE_VALUE (arglist
));
9231 case BUILT_IN_SET_RETURN_ADDR_REG
:
9232 expand_builtin_set_return_addr_reg (TREE_VALUE (arglist
));
9234 case BUILT_IN_EH_STUB_OLD
:
9235 return expand_builtin_eh_stub_old ();
9236 case BUILT_IN_EH_STUB
:
9237 return expand_builtin_eh_stub ();
9238 case BUILT_IN_SET_EH_REGS
:
9239 expand_builtin_set_eh_regs (TREE_VALUE (arglist
),
9240 TREE_VALUE (TREE_CHAIN (arglist
)));
9243 default: /* just do library call, if unknown builtin */
9244 error ("built-in function `%s' not currently supported",
9245 IDENTIFIER_POINTER (DECL_NAME (fndecl
)));
9248 /* The switch statement above can drop through to cause the function
9249 to be called normally. */
9251 return expand_call (exp
, target
, ignore
);
9254 /* Built-in functions to perform an untyped call and return. */
9256 /* For each register that may be used for calling a function, this
9257 gives a mode used to copy the register's value. VOIDmode indicates
9258 the register is not used for calling a function. If the machine
9259 has register windows, this gives only the outbound registers.
9260 INCOMING_REGNO gives the corresponding inbound register. */
9261 static enum machine_mode apply_args_mode
[FIRST_PSEUDO_REGISTER
];
9263 /* For each register that may be used for returning values, this gives
9264 a mode used to copy the register's value. VOIDmode indicates the
9265 register is not used for returning values. If the machine has
9266 register windows, this gives only the outbound registers.
9267 INCOMING_REGNO gives the corresponding inbound register. */
9268 static enum machine_mode apply_result_mode
[FIRST_PSEUDO_REGISTER
];
9270 /* For each register that may be used for calling a function, this
9271 gives the offset of that register into the block returned by
9272 __builtin_apply_args. 0 indicates that the register is not
9273 used for calling a function. */
9274 static int apply_args_reg_offset
[FIRST_PSEUDO_REGISTER
];
9276 /* Return the offset of register REGNO into the block returned by
9277 __builtin_apply_args. This is not declared static, since it is
9278 needed in objc-act.c. */
9281 apply_args_register_offset (regno
)
9286 /* Arguments are always put in outgoing registers (in the argument
9287 block) if such make sense. */
9288 #ifdef OUTGOING_REGNO
9289 regno
= OUTGOING_REGNO(regno
);
9291 return apply_args_reg_offset
[regno
];
9294 /* Return the size required for the block returned by __builtin_apply_args,
9295 and initialize apply_args_mode. */
9300 static int size
= -1;
9302 enum machine_mode mode
;
9304 /* The values computed by this function never change. */
9307 /* The first value is the incoming arg-pointer. */
9308 size
= GET_MODE_SIZE (Pmode
);
9310 /* The second value is the structure value address unless this is
9311 passed as an "invisible" first argument. */
9312 if (struct_value_rtx
)
9313 size
+= GET_MODE_SIZE (Pmode
);
9315 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
9316 if (FUNCTION_ARG_REGNO_P (regno
))
9318 /* Search for the proper mode for copying this register's
9319 value. I'm not sure this is right, but it works so far. */
9320 enum machine_mode best_mode
= VOIDmode
;
9322 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
9324 mode
= GET_MODE_WIDER_MODE (mode
))
9325 if (HARD_REGNO_MODE_OK (regno
, mode
)
9326 && HARD_REGNO_NREGS (regno
, mode
) == 1)
9329 if (best_mode
== VOIDmode
)
9330 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
);
9332 mode
= GET_MODE_WIDER_MODE (mode
))
9333 if (HARD_REGNO_MODE_OK (regno
, mode
)
9334 && (mov_optab
->handlers
[(int) mode
].insn_code
9335 != CODE_FOR_nothing
))
9339 if (mode
== VOIDmode
)
9342 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
9343 if (size
% align
!= 0)
9344 size
= CEIL (size
, align
) * align
;
9345 apply_args_reg_offset
[regno
] = size
;
9346 size
+= GET_MODE_SIZE (mode
);
9347 apply_args_mode
[regno
] = mode
;
9351 apply_args_mode
[regno
] = VOIDmode
;
9352 apply_args_reg_offset
[regno
] = 0;
9358 /* Return the size required for the block returned by __builtin_apply,
9359 and initialize apply_result_mode. */
9362 apply_result_size ()
9364 static int size
= -1;
9366 enum machine_mode mode
;
9368 /* The values computed by this function never change. */
9373 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
9374 if (FUNCTION_VALUE_REGNO_P (regno
))
9376 /* Search for the proper mode for copying this register's
9377 value. I'm not sure this is right, but it works so far. */
9378 enum machine_mode best_mode
= VOIDmode
;
9380 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
9382 mode
= GET_MODE_WIDER_MODE (mode
))
9383 if (HARD_REGNO_MODE_OK (regno
, mode
))
9386 if (best_mode
== VOIDmode
)
9387 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
);
9389 mode
= GET_MODE_WIDER_MODE (mode
))
9390 if (HARD_REGNO_MODE_OK (regno
, mode
)
9391 && (mov_optab
->handlers
[(int) mode
].insn_code
9392 != CODE_FOR_nothing
))
9396 if (mode
== VOIDmode
)
9399 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
9400 if (size
% align
!= 0)
9401 size
= CEIL (size
, align
) * align
;
9402 size
+= GET_MODE_SIZE (mode
);
9403 apply_result_mode
[regno
] = mode
;
9406 apply_result_mode
[regno
] = VOIDmode
;
9408 /* Allow targets that use untyped_call and untyped_return to override
9409 the size so that machine-specific information can be stored here. */
9410 #ifdef APPLY_RESULT_SIZE
9411 size
= APPLY_RESULT_SIZE
;
9417 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
9418 /* Create a vector describing the result block RESULT. If SAVEP is true,
9419 the result block is used to save the values; otherwise it is used to
9420 restore the values. */
9423 result_vector (savep
, result
)
9427 int regno
, size
, align
, nelts
;
9428 enum machine_mode mode
;
9430 rtx
*savevec
= (rtx
*) alloca (FIRST_PSEUDO_REGISTER
* sizeof (rtx
));
9433 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
9434 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
9436 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
9437 if (size
% align
!= 0)
9438 size
= CEIL (size
, align
) * align
;
9439 reg
= gen_rtx_REG (mode
, savep
? regno
: INCOMING_REGNO (regno
));
9440 mem
= change_address (result
, mode
,
9441 plus_constant (XEXP (result
, 0), size
));
9442 savevec
[nelts
++] = (savep
9443 ? gen_rtx_SET (VOIDmode
, mem
, reg
)
9444 : gen_rtx_SET (VOIDmode
, reg
, mem
));
9445 size
+= GET_MODE_SIZE (mode
);
9447 return gen_rtx_PARALLEL (VOIDmode
, gen_rtvec_v (nelts
, savevec
));
9449 #endif /* HAVE_untyped_call or HAVE_untyped_return */
9451 /* Save the state required to perform an untyped call with the same
9452 arguments as were passed to the current function. */
9455 expand_builtin_apply_args ()
9458 int size
, align
, regno
;
9459 enum machine_mode mode
;
9461 /* Create a block where the arg-pointer, structure value address,
9462 and argument registers can be saved. */
9463 registers
= assign_stack_local (BLKmode
, apply_args_size (), -1);
9465 /* Walk past the arg-pointer and structure value address. */
9466 size
= GET_MODE_SIZE (Pmode
);
9467 if (struct_value_rtx
)
9468 size
+= GET_MODE_SIZE (Pmode
);
9470 /* Save each register used in calling a function to the block. */
9471 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
9472 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
9476 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
9477 if (size
% align
!= 0)
9478 size
= CEIL (size
, align
) * align
;
9480 tem
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
9483 /* For reg-stack.c's stack register household.
9484 Compare with a similar piece of code in function.c. */
9486 emit_insn (gen_rtx_USE (mode
, tem
));
9489 emit_move_insn (change_address (registers
, mode
,
9490 plus_constant (XEXP (registers
, 0),
9493 size
+= GET_MODE_SIZE (mode
);
9496 /* Save the arg pointer to the block. */
9497 emit_move_insn (change_address (registers
, Pmode
, XEXP (registers
, 0)),
9498 copy_to_reg (virtual_incoming_args_rtx
));
9499 size
= GET_MODE_SIZE (Pmode
);
9501 /* Save the structure value address unless this is passed as an
9502 "invisible" first argument. */
9503 if (struct_value_incoming_rtx
)
9505 emit_move_insn (change_address (registers
, Pmode
,
9506 plus_constant (XEXP (registers
, 0),
9508 copy_to_reg (struct_value_incoming_rtx
));
9509 size
+= GET_MODE_SIZE (Pmode
);
9512 /* Return the address of the block. */
9513 return copy_addr_to_reg (XEXP (registers
, 0));
9516 /* Perform an untyped call and save the state required to perform an
9517 untyped return of whatever value was returned by the given function. */
9520 expand_builtin_apply (function
, arguments
, argsize
)
9521 rtx function
, arguments
, argsize
;
9523 int size
, align
, regno
;
9524 enum machine_mode mode
;
9525 rtx incoming_args
, result
, reg
, dest
, call_insn
;
9526 rtx old_stack_level
= 0;
9527 rtx call_fusage
= 0;
9529 /* Create a block where the return registers can be saved. */
9530 result
= assign_stack_local (BLKmode
, apply_result_size (), -1);
9532 /* ??? The argsize value should be adjusted here. */
9534 /* Fetch the arg pointer from the ARGUMENTS block. */
9535 incoming_args
= gen_reg_rtx (Pmode
);
9536 emit_move_insn (incoming_args
,
9537 gen_rtx_MEM (Pmode
, arguments
));
9538 #ifndef STACK_GROWS_DOWNWARD
9539 incoming_args
= expand_binop (Pmode
, sub_optab
, incoming_args
, argsize
,
9540 incoming_args
, 0, OPTAB_LIB_WIDEN
);
9543 /* Perform postincrements before actually calling the function. */
9546 /* Push a new argument block and copy the arguments. */
9547 do_pending_stack_adjust ();
9549 /* Save the stack with nonlocal if available */
9550 #ifdef HAVE_save_stack_nonlocal
9551 if (HAVE_save_stack_nonlocal
)
9552 emit_stack_save (SAVE_NONLOCAL
, &old_stack_level
, NULL_RTX
);
9555 emit_stack_save (SAVE_BLOCK
, &old_stack_level
, NULL_RTX
);
9557 /* Push a block of memory onto the stack to store the memory arguments.
9558 Save the address in a register, and copy the memory arguments. ??? I
9559 haven't figured out how the calling convention macros effect this,
9560 but it's likely that the source and/or destination addresses in
9561 the block copy will need updating in machine specific ways. */
9562 dest
= allocate_dynamic_stack_space (argsize
, 0, 0);
9563 emit_block_move (gen_rtx_MEM (BLKmode
, dest
),
9564 gen_rtx_MEM (BLKmode
, incoming_args
),
9566 PARM_BOUNDARY
/ BITS_PER_UNIT
);
9568 /* Refer to the argument block. */
9570 arguments
= gen_rtx_MEM (BLKmode
, arguments
);
9572 /* Walk past the arg-pointer and structure value address. */
9573 size
= GET_MODE_SIZE (Pmode
);
9574 if (struct_value_rtx
)
9575 size
+= GET_MODE_SIZE (Pmode
);
9577 /* Restore each of the registers previously saved. Make USE insns
9578 for each of these registers for use in making the call. */
9579 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
9580 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
9582 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
9583 if (size
% align
!= 0)
9584 size
= CEIL (size
, align
) * align
;
9585 reg
= gen_rtx_REG (mode
, regno
);
9586 emit_move_insn (reg
,
9587 change_address (arguments
, mode
,
9588 plus_constant (XEXP (arguments
, 0),
9591 use_reg (&call_fusage
, reg
);
9592 size
+= GET_MODE_SIZE (mode
);
9595 /* Restore the structure value address unless this is passed as an
9596 "invisible" first argument. */
9597 size
= GET_MODE_SIZE (Pmode
);
9598 if (struct_value_rtx
)
9600 rtx value
= gen_reg_rtx (Pmode
);
9601 emit_move_insn (value
,
9602 change_address (arguments
, Pmode
,
9603 plus_constant (XEXP (arguments
, 0),
9605 emit_move_insn (struct_value_rtx
, value
);
9606 if (GET_CODE (struct_value_rtx
) == REG
)
9607 use_reg (&call_fusage
, struct_value_rtx
);
9608 size
+= GET_MODE_SIZE (Pmode
);
9611 /* All arguments and registers used for the call are set up by now! */
9612 function
= prepare_call_address (function
, NULL_TREE
, &call_fusage
, 0);
9614 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
9615 and we don't want to load it into a register as an optimization,
9616 because prepare_call_address already did it if it should be done. */
9617 if (GET_CODE (function
) != SYMBOL_REF
)
9618 function
= memory_address (FUNCTION_MODE
, function
);
9620 /* Generate the actual call instruction and save the return value. */
9621 #ifdef HAVE_untyped_call
9622 if (HAVE_untyped_call
)
9623 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE
, function
),
9624 result
, result_vector (1, result
)));
9627 #ifdef HAVE_call_value
9628 if (HAVE_call_value
)
9632 /* Locate the unique return register. It is not possible to
9633 express a call that sets more than one return register using
9634 call_value; use untyped_call for that. In fact, untyped_call
9635 only needs to save the return registers in the given block. */
9636 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
9637 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
9640 abort (); /* HAVE_untyped_call required. */
9641 valreg
= gen_rtx_REG (mode
, regno
);
9644 emit_call_insn (gen_call_value (valreg
,
9645 gen_rtx_MEM (FUNCTION_MODE
, function
),
9646 const0_rtx
, NULL_RTX
, const0_rtx
));
9648 emit_move_insn (change_address (result
, GET_MODE (valreg
),
9656 /* Find the CALL insn we just emitted. */
9657 for (call_insn
= get_last_insn ();
9658 call_insn
&& GET_CODE (call_insn
) != CALL_INSN
;
9659 call_insn
= PREV_INSN (call_insn
))
9665 /* Put the register usage information on the CALL. If there is already
9666 some usage information, put ours at the end. */
9667 if (CALL_INSN_FUNCTION_USAGE (call_insn
))
9671 for (link
= CALL_INSN_FUNCTION_USAGE (call_insn
); XEXP (link
, 1) != 0;
9672 link
= XEXP (link
, 1))
9675 XEXP (link
, 1) = call_fusage
;
9678 CALL_INSN_FUNCTION_USAGE (call_insn
) = call_fusage
;
9680 /* Restore the stack. */
9681 #ifdef HAVE_save_stack_nonlocal
9682 if (HAVE_save_stack_nonlocal
)
9683 emit_stack_restore (SAVE_NONLOCAL
, old_stack_level
, NULL_RTX
);
9686 emit_stack_restore (SAVE_BLOCK
, old_stack_level
, NULL_RTX
);
9688 /* Return the address of the result block. */
9689 return copy_addr_to_reg (XEXP (result
, 0));
9692 /* Perform an untyped return. */
9695 expand_builtin_return (result
)
9698 int size
, align
, regno
;
9699 enum machine_mode mode
;
9701 rtx call_fusage
= 0;
9703 apply_result_size ();
9704 result
= gen_rtx_MEM (BLKmode
, result
);
9706 #ifdef HAVE_untyped_return
9707 if (HAVE_untyped_return
)
9709 emit_jump_insn (gen_untyped_return (result
, result_vector (0, result
)));
9715 /* Restore the return value and note that each value is used. */
9717 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
9718 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
9720 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
9721 if (size
% align
!= 0)
9722 size
= CEIL (size
, align
) * align
;
9723 reg
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
9724 emit_move_insn (reg
,
9725 change_address (result
, mode
,
9726 plus_constant (XEXP (result
, 0),
9729 push_to_sequence (call_fusage
);
9730 emit_insn (gen_rtx_USE (VOIDmode
, reg
));
9731 call_fusage
= get_insns ();
9733 size
+= GET_MODE_SIZE (mode
);
9736 /* Put the USE insns before the return. */
9737 emit_insns (call_fusage
);
9739 /* Return whatever values was restored by jumping directly to the end
9741 expand_null_return ();
9744 /* Expand code for a post- or pre- increment or decrement
9745 and return the RTX for the result.
9746 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9749 expand_increment (exp
, post
, ignore
)
9753 register rtx op0
, op1
;
9754 register rtx temp
, value
;
9755 register tree incremented
= TREE_OPERAND (exp
, 0);
9756 optab this_optab
= add_optab
;
9758 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
9759 int op0_is_copy
= 0;
9760 int single_insn
= 0;
9761 /* 1 means we can't store into OP0 directly,
9762 because it is a subreg narrower than a word,
9763 and we don't dare clobber the rest of the word. */
9766 /* Stabilize any component ref that might need to be
9767 evaluated more than once below. */
9769 || TREE_CODE (incremented
) == BIT_FIELD_REF
9770 || (TREE_CODE (incremented
) == COMPONENT_REF
9771 && (TREE_CODE (TREE_OPERAND (incremented
, 0)) != INDIRECT_REF
9772 || DECL_BIT_FIELD (TREE_OPERAND (incremented
, 1)))))
9773 incremented
= stabilize_reference (incremented
);
9774 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9775 ones into save exprs so that they don't accidentally get evaluated
9776 more than once by the code below. */
9777 if (TREE_CODE (incremented
) == PREINCREMENT_EXPR
9778 || TREE_CODE (incremented
) == PREDECREMENT_EXPR
)
9779 incremented
= save_expr (incremented
);
9781 /* Compute the operands as RTX.
9782 Note whether OP0 is the actual lvalue or a copy of it:
9783 I believe it is a copy iff it is a register or subreg
9784 and insns were generated in computing it. */
9786 temp
= get_last_insn ();
9787 op0
= expand_expr (incremented
, NULL_RTX
, VOIDmode
, EXPAND_MEMORY_USE_RW
);
9789 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9790 in place but instead must do sign- or zero-extension during assignment,
9791 so we copy it into a new register and let the code below use it as
9794 Note that we can safely modify this SUBREG since it is know not to be
9795 shared (it was made by the expand_expr call above). */
9797 if (GET_CODE (op0
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (op0
))
9800 SUBREG_REG (op0
) = copy_to_reg (SUBREG_REG (op0
));
9804 else if (GET_CODE (op0
) == SUBREG
9805 && GET_MODE_BITSIZE (GET_MODE (op0
)) < BITS_PER_WORD
)
9807 /* We cannot increment this SUBREG in place. If we are
9808 post-incrementing, get a copy of the old value. Otherwise,
9809 just mark that we cannot increment in place. */
9811 op0
= copy_to_reg (op0
);
9816 op0_is_copy
= ((GET_CODE (op0
) == SUBREG
|| GET_CODE (op0
) == REG
)
9817 && temp
!= get_last_insn ());
9818 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
,
9819 EXPAND_MEMORY_USE_BAD
);
9821 /* Decide whether incrementing or decrementing. */
9822 if (TREE_CODE (exp
) == POSTDECREMENT_EXPR
9823 || TREE_CODE (exp
) == PREDECREMENT_EXPR
)
9824 this_optab
= sub_optab
;
9826 /* Convert decrement by a constant into a negative increment. */
9827 if (this_optab
== sub_optab
9828 && GET_CODE (op1
) == CONST_INT
)
9830 op1
= GEN_INT (- INTVAL (op1
));
9831 this_optab
= add_optab
;
9834 /* For a preincrement, see if we can do this with a single instruction. */
9837 icode
= (int) this_optab
->handlers
[(int) mode
].insn_code
;
9838 if (icode
!= (int) CODE_FOR_nothing
9839 /* Make sure that OP0 is valid for operands 0 and 1
9840 of the insn we want to queue. */
9841 && (*insn_operand_predicate
[icode
][0]) (op0
, mode
)
9842 && (*insn_operand_predicate
[icode
][1]) (op0
, mode
)
9843 && (*insn_operand_predicate
[icode
][2]) (op1
, mode
))
9847 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9848 then we cannot just increment OP0. We must therefore contrive to
9849 increment the original value. Then, for postincrement, we can return
9850 OP0 since it is a copy of the old value. For preincrement, expand here
9851 unless we can do it with a single insn.
9853 Likewise if storing directly into OP0 would clobber high bits
9854 we need to preserve (bad_subreg). */
9855 if (op0_is_copy
|| (!post
&& !single_insn
) || bad_subreg
)
9857 /* This is the easiest way to increment the value wherever it is.
9858 Problems with multiple evaluation of INCREMENTED are prevented
9859 because either (1) it is a component_ref or preincrement,
9860 in which case it was stabilized above, or (2) it is an array_ref
9861 with constant index in an array in a register, which is
9862 safe to reevaluate. */
9863 tree newexp
= build (((TREE_CODE (exp
) == POSTDECREMENT_EXPR
9864 || TREE_CODE (exp
) == PREDECREMENT_EXPR
)
9865 ? MINUS_EXPR
: PLUS_EXPR
),
9868 TREE_OPERAND (exp
, 1));
9870 while (TREE_CODE (incremented
) == NOP_EXPR
9871 || TREE_CODE (incremented
) == CONVERT_EXPR
)
9873 newexp
= convert (TREE_TYPE (incremented
), newexp
);
9874 incremented
= TREE_OPERAND (incremented
, 0);
9877 temp
= expand_assignment (incremented
, newexp
, ! post
&& ! ignore
, 0);
9878 return post
? op0
: temp
;
9883 /* We have a true reference to the value in OP0.
9884 If there is an insn to add or subtract in this mode, queue it.
9885 Queueing the increment insn avoids the register shuffling
9886 that often results if we must increment now and first save
9887 the old value for subsequent use. */
9889 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9890 op0
= stabilize (op0
);
9893 icode
= (int) this_optab
->handlers
[(int) mode
].insn_code
;
9894 if (icode
!= (int) CODE_FOR_nothing
9895 /* Make sure that OP0 is valid for operands 0 and 1
9896 of the insn we want to queue. */
9897 && (*insn_operand_predicate
[icode
][0]) (op0
, mode
)
9898 && (*insn_operand_predicate
[icode
][1]) (op0
, mode
))
9900 if (! (*insn_operand_predicate
[icode
][2]) (op1
, mode
))
9901 op1
= force_reg (mode
, op1
);
9903 return enqueue_insn (op0
, GEN_FCN (icode
) (op0
, op0
, op1
));
9905 if (icode
!= (int) CODE_FOR_nothing
&& GET_CODE (op0
) == MEM
)
9907 rtx addr
= (general_operand (XEXP (op0
, 0), mode
)
9908 ? force_reg (Pmode
, XEXP (op0
, 0))
9909 : copy_to_reg (XEXP (op0
, 0)));
9912 op0
= change_address (op0
, VOIDmode
, addr
);
9913 temp
= force_reg (GET_MODE (op0
), op0
);
9914 if (! (*insn_operand_predicate
[icode
][2]) (op1
, mode
))
9915 op1
= force_reg (mode
, op1
);
9917 /* The increment queue is LIFO, thus we have to `queue'
9918 the instructions in reverse order. */
9919 enqueue_insn (op0
, gen_move_insn (op0
, temp
));
9920 result
= enqueue_insn (temp
, GEN_FCN (icode
) (temp
, temp
, op1
));
9925 /* Preincrement, or we can't increment with one simple insn. */
9927 /* Save a copy of the value before inc or dec, to return it later. */
9928 temp
= value
= copy_to_reg (op0
);
9930 /* Arrange to return the incremented value. */
9931 /* Copy the rtx because expand_binop will protect from the queue,
9932 and the results of that would be invalid for us to return
9933 if our caller does emit_queue before using our result. */
9934 temp
= copy_rtx (value
= op0
);
9936 /* Increment however we can. */
9937 op1
= expand_binop (mode
, this_optab
, value
, op1
,
9938 flag_check_memory_usage
? NULL_RTX
: op0
,
9939 TREE_UNSIGNED (TREE_TYPE (exp
)), OPTAB_LIB_WIDEN
);
9940 /* Make sure the value is stored into OP0. */
9942 emit_move_insn (op0
, op1
);
9947 /* Expand all function calls contained within EXP, innermost ones first.
9948 But don't look within expressions that have sequence points.
9949 For each CALL_EXPR, record the rtx for its value
9950 in the CALL_EXPR_RTL field. */
9953 preexpand_calls (exp
)
9956 register int nops
, i
;
9957 int type
= TREE_CODE_CLASS (TREE_CODE (exp
));
9959 if (! do_preexpand_calls
)
9962 /* Only expressions and references can contain calls. */
9964 if (type
!= 'e' && type
!= '<' && type
!= '1' && type
!= '2' && type
!= 'r')
9967 switch (TREE_CODE (exp
))
9970 /* Do nothing if already expanded. */
9971 if (CALL_EXPR_RTL (exp
) != 0
9972 /* Do nothing if the call returns a variable-sized object. */
9973 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp
))) != INTEGER_CST
9974 /* Do nothing to built-in functions. */
9975 || (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
9976 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
9978 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))))
9981 CALL_EXPR_RTL (exp
) = expand_call (exp
, NULL_RTX
, 0);
9986 case TRUTH_ANDIF_EXPR
:
9987 case TRUTH_ORIF_EXPR
:
9988 /* If we find one of these, then we can be sure
9989 the adjust will be done for it (since it makes jumps).
9990 Do it now, so that if this is inside an argument
9991 of a function, we don't get the stack adjustment
9992 after some other args have already been pushed. */
9993 do_pending_stack_adjust ();
9998 case WITH_CLEANUP_EXPR
:
9999 case CLEANUP_POINT_EXPR
:
10000 case TRY_CATCH_EXPR
:
10004 if (SAVE_EXPR_RTL (exp
) != 0)
10011 nops
= tree_code_length
[(int) TREE_CODE (exp
)];
10012 for (i
= 0; i
< nops
; i
++)
10013 if (TREE_OPERAND (exp
, i
) != 0)
10015 type
= TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, i
)));
10016 if (type
== 'e' || type
== '<' || type
== '1' || type
== '2'
10018 preexpand_calls (TREE_OPERAND (exp
, i
));
10022 /* At the start of a function, record that we have no previously-pushed
10023 arguments waiting to be popped. */
10026 init_pending_stack_adjust ()
10028 pending_stack_adjust
= 0;
10031 /* When exiting from function, if safe, clear out any pending stack adjust
10032 so the adjustment won't get done.
10034 Note, if the current function calls alloca, then it must have a
10035 frame pointer regardless of the value of flag_omit_frame_pointer. */
10038 clear_pending_stack_adjust ()
10040 #ifdef EXIT_IGNORE_STACK
10042 && (! flag_omit_frame_pointer
|| current_function_calls_alloca
)
10043 && EXIT_IGNORE_STACK
10044 && ! (DECL_INLINE (current_function_decl
) && ! flag_no_inline
)
10045 && ! flag_inline_functions
)
10046 pending_stack_adjust
= 0;
10050 /* Pop any previously-pushed arguments that have not been popped yet. */
10053 do_pending_stack_adjust ()
10055 if (inhibit_defer_pop
== 0)
10057 if (pending_stack_adjust
!= 0)
10058 adjust_stack (GEN_INT (pending_stack_adjust
));
10059 pending_stack_adjust
= 0;
10063 /* Expand conditional expressions. */
10065 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
10066 LABEL is an rtx of code CODE_LABEL, in this function and all the
10070 jumpifnot (exp
, label
)
10074 do_jump (exp
, label
, NULL_RTX
);
10077 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
10080 jumpif (exp
, label
)
10084 do_jump (exp
, NULL_RTX
, label
);
10087 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
10088 the result is zero, or IF_TRUE_LABEL if the result is one.
10089 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
10090 meaning fall through in that case.
10092 do_jump always does any pending stack adjust except when it does not
10093 actually perform a jump. An example where there is no jump
10094 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
10096 This function is responsible for optimizing cases such as
10097 &&, || and comparison operators in EXP. */
10100 do_jump (exp
, if_false_label
, if_true_label
)
10102 rtx if_false_label
, if_true_label
;
10104 register enum tree_code code
= TREE_CODE (exp
);
10105 /* Some cases need to create a label to jump to
10106 in order to properly fall through.
10107 These cases set DROP_THROUGH_LABEL nonzero. */
10108 rtx drop_through_label
= 0;
10110 rtx comparison
= 0;
10113 enum machine_mode mode
;
10115 #ifdef MAX_INTEGER_COMPUTATION_MODE
10116 check_max_integer_computation_mode (exp
);
10127 temp
= integer_zerop (exp
) ? if_false_label
: if_true_label
;
10133 /* This is not true with #pragma weak */
10135 /* The address of something can never be zero. */
10137 emit_jump (if_true_label
);
10142 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == COMPONENT_REF
10143 || TREE_CODE (TREE_OPERAND (exp
, 0)) == BIT_FIELD_REF
10144 || TREE_CODE (TREE_OPERAND (exp
, 0)) == ARRAY_REF
)
10147 /* If we are narrowing the operand, we have to do the compare in the
10149 if ((TYPE_PRECISION (TREE_TYPE (exp
))
10150 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
10152 case NON_LVALUE_EXPR
:
10153 case REFERENCE_EXPR
:
10158 /* These cannot change zero->non-zero or vice versa. */
10159 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
10163 /* This is never less insns than evaluating the PLUS_EXPR followed by
10164 a test and can be longer if the test is eliminated. */
10166 /* Reduce to minus. */
10167 exp
= build (MINUS_EXPR
, TREE_TYPE (exp
),
10168 TREE_OPERAND (exp
, 0),
10169 fold (build1 (NEGATE_EXPR
, TREE_TYPE (TREE_OPERAND (exp
, 1)),
10170 TREE_OPERAND (exp
, 1))));
10171 /* Process as MINUS. */
10175 /* Non-zero iff operands of minus differ. */
10176 comparison
= compare (build (NE_EXPR
, TREE_TYPE (exp
),
10177 TREE_OPERAND (exp
, 0),
10178 TREE_OPERAND (exp
, 1)),
10183 /* If we are AND'ing with a small constant, do this comparison in the
10184 smallest type that fits. If the machine doesn't have comparisons
10185 that small, it will be converted back to the wider comparison.
10186 This helps if we are testing the sign bit of a narrower object.
10187 combine can't do this for us because it can't know whether a
10188 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
10190 if (! SLOW_BYTE_ACCESS
10191 && TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
10192 && TYPE_PRECISION (TREE_TYPE (exp
)) <= HOST_BITS_PER_WIDE_INT
10193 && (i
= floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)))) >= 0
10194 && (mode
= mode_for_size (i
+ 1, MODE_INT
, 0)) != BLKmode
10195 && (type
= type_for_mode (mode
, 1)) != 0
10196 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (exp
))
10197 && (cmp_optab
->handlers
[(int) TYPE_MODE (type
)].insn_code
10198 != CODE_FOR_nothing
))
10200 do_jump (convert (type
, exp
), if_false_label
, if_true_label
);
10205 case TRUTH_NOT_EXPR
:
10206 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
10209 case TRUTH_ANDIF_EXPR
:
10210 if (if_false_label
== 0)
10211 if_false_label
= drop_through_label
= gen_label_rtx ();
10212 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, NULL_RTX
);
10213 start_cleanup_deferral ();
10214 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
10215 end_cleanup_deferral ();
10218 case TRUTH_ORIF_EXPR
:
10219 if (if_true_label
== 0)
10220 if_true_label
= drop_through_label
= gen_label_rtx ();
10221 do_jump (TREE_OPERAND (exp
, 0), NULL_RTX
, if_true_label
);
10222 start_cleanup_deferral ();
10223 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
10224 end_cleanup_deferral ();
10227 case COMPOUND_EXPR
:
10228 push_temp_slots ();
10229 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
10230 preserve_temp_slots (NULL_RTX
);
10231 free_temp_slots ();
10234 do_pending_stack_adjust ();
10235 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
10238 case COMPONENT_REF
:
10239 case BIT_FIELD_REF
:
10242 int bitsize
, bitpos
, unsignedp
;
10243 enum machine_mode mode
;
10249 /* Get description of this reference. We don't actually care
10250 about the underlying object here. */
10251 get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
10252 &mode
, &unsignedp
, &volatilep
,
10255 type
= type_for_size (bitsize
, unsignedp
);
10256 if (! SLOW_BYTE_ACCESS
10257 && type
!= 0 && bitsize
>= 0
10258 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (exp
))
10259 && (cmp_optab
->handlers
[(int) TYPE_MODE (type
)].insn_code
10260 != CODE_FOR_nothing
))
10262 do_jump (convert (type
, exp
), if_false_label
, if_true_label
);
10269 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
10270 if (integer_onep (TREE_OPERAND (exp
, 1))
10271 && integer_zerop (TREE_OPERAND (exp
, 2)))
10272 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
10274 else if (integer_zerop (TREE_OPERAND (exp
, 1))
10275 && integer_onep (TREE_OPERAND (exp
, 2)))
10276 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
10280 register rtx label1
= gen_label_rtx ();
10281 drop_through_label
= gen_label_rtx ();
10283 do_jump (TREE_OPERAND (exp
, 0), label1
, NULL_RTX
);
10285 start_cleanup_deferral ();
10286 /* Now the THEN-expression. */
10287 do_jump (TREE_OPERAND (exp
, 1),
10288 if_false_label
? if_false_label
: drop_through_label
,
10289 if_true_label
? if_true_label
: drop_through_label
);
10290 /* In case the do_jump just above never jumps. */
10291 do_pending_stack_adjust ();
10292 emit_label (label1
);
10294 /* Now the ELSE-expression. */
10295 do_jump (TREE_OPERAND (exp
, 2),
10296 if_false_label
? if_false_label
: drop_through_label
,
10297 if_true_label
? if_true_label
: drop_through_label
);
10298 end_cleanup_deferral ();
10304 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
10306 if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_FLOAT
10307 || GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_INT
)
10309 tree exp0
= save_expr (TREE_OPERAND (exp
, 0));
10310 tree exp1
= save_expr (TREE_OPERAND (exp
, 1));
10313 (build (TRUTH_ANDIF_EXPR
, TREE_TYPE (exp
),
10314 fold (build (EQ_EXPR
, TREE_TYPE (exp
),
10315 fold (build1 (REALPART_EXPR
,
10316 TREE_TYPE (inner_type
),
10318 fold (build1 (REALPART_EXPR
,
10319 TREE_TYPE (inner_type
),
10321 fold (build (EQ_EXPR
, TREE_TYPE (exp
),
10322 fold (build1 (IMAGPART_EXPR
,
10323 TREE_TYPE (inner_type
),
10325 fold (build1 (IMAGPART_EXPR
,
10326 TREE_TYPE (inner_type
),
10328 if_false_label
, if_true_label
);
10331 else if (integer_zerop (TREE_OPERAND (exp
, 1)))
10332 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
10334 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_INT
10335 && !can_compare_p (TYPE_MODE (inner_type
)))
10336 do_jump_by_parts_equality (exp
, if_false_label
, if_true_label
);
10338 comparison
= compare (exp
, EQ
, EQ
);
10344 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
10346 if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_FLOAT
10347 || GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_INT
)
10349 tree exp0
= save_expr (TREE_OPERAND (exp
, 0));
10350 tree exp1
= save_expr (TREE_OPERAND (exp
, 1));
10353 (build (TRUTH_ORIF_EXPR
, TREE_TYPE (exp
),
10354 fold (build (NE_EXPR
, TREE_TYPE (exp
),
10355 fold (build1 (REALPART_EXPR
,
10356 TREE_TYPE (inner_type
),
10358 fold (build1 (REALPART_EXPR
,
10359 TREE_TYPE (inner_type
),
10361 fold (build (NE_EXPR
, TREE_TYPE (exp
),
10362 fold (build1 (IMAGPART_EXPR
,
10363 TREE_TYPE (inner_type
),
10365 fold (build1 (IMAGPART_EXPR
,
10366 TREE_TYPE (inner_type
),
10368 if_false_label
, if_true_label
);
10371 else if (integer_zerop (TREE_OPERAND (exp
, 1)))
10372 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
10374 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_INT
10375 && !can_compare_p (TYPE_MODE (inner_type
)))
10376 do_jump_by_parts_equality (exp
, if_true_label
, if_false_label
);
10378 comparison
= compare (exp
, NE
, NE
);
10383 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
10385 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
10386 do_jump_by_parts_greater (exp
, 1, if_false_label
, if_true_label
);
10388 comparison
= compare (exp
, LT
, LTU
);
10392 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
10394 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
10395 do_jump_by_parts_greater (exp
, 0, if_true_label
, if_false_label
);
10397 comparison
= compare (exp
, LE
, LEU
);
10401 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
10403 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
10404 do_jump_by_parts_greater (exp
, 0, if_false_label
, if_true_label
);
10406 comparison
= compare (exp
, GT
, GTU
);
10410 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
10412 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
10413 do_jump_by_parts_greater (exp
, 1, if_true_label
, if_false_label
);
10415 comparison
= compare (exp
, GE
, GEU
);
10420 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
10422 /* This is not needed any more and causes poor code since it causes
10423 comparisons and tests from non-SI objects to have different code
10425 /* Copy to register to avoid generating bad insns by cse
10426 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
10427 if (!cse_not_expected
&& GET_CODE (temp
) == MEM
)
10428 temp
= copy_to_reg (temp
);
10430 do_pending_stack_adjust ();
10431 if (GET_CODE (temp
) == CONST_INT
)
10432 comparison
= (temp
== const0_rtx
? const0_rtx
: const_true_rtx
);
10433 else if (GET_CODE (temp
) == LABEL_REF
)
10434 comparison
= const_true_rtx
;
10435 else if (GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
10436 && !can_compare_p (GET_MODE (temp
)))
10437 /* Note swapping the labels gives us not-equal. */
10438 do_jump_by_parts_equality_rtx (temp
, if_true_label
, if_false_label
);
10439 else if (GET_MODE (temp
) != VOIDmode
)
10440 comparison
= compare_from_rtx (temp
, CONST0_RTX (GET_MODE (temp
)),
10441 NE
, TREE_UNSIGNED (TREE_TYPE (exp
)),
10442 GET_MODE (temp
), NULL_RTX
, 0);
10447 /* Do any postincrements in the expression that was tested. */
10450 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
10451 straight into a conditional jump instruction as the jump condition.
10452 Otherwise, all the work has been done already. */
10454 if (comparison
== const_true_rtx
)
10457 emit_jump (if_true_label
);
10459 else if (comparison
== const0_rtx
)
10461 if (if_false_label
)
10462 emit_jump (if_false_label
);
10464 else if (comparison
)
10465 do_jump_for_compare (comparison
, if_false_label
, if_true_label
);
10467 if (drop_through_label
)
10469 /* If do_jump produces code that might be jumped around,
10470 do any stack adjusts from that code, before the place
10471 where control merges in. */
10472 do_pending_stack_adjust ();
10473 emit_label (drop_through_label
);
10477 /* Given a comparison expression EXP for values too wide to be compared
10478 with one insn, test the comparison and jump to the appropriate label.
10479 The code of EXP is ignored; we always test GT if SWAP is 0,
10480 and LT if SWAP is 1. */
10483 do_jump_by_parts_greater (exp
, swap
, if_false_label
, if_true_label
)
10486 rtx if_false_label
, if_true_label
;
10488 rtx op0
= expand_expr (TREE_OPERAND (exp
, swap
), NULL_RTX
, VOIDmode
, 0);
10489 rtx op1
= expand_expr (TREE_OPERAND (exp
, !swap
), NULL_RTX
, VOIDmode
, 0);
10490 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
10491 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
10492 rtx drop_through_label
= 0;
10493 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0)));
10496 if (! if_true_label
|| ! if_false_label
)
10497 drop_through_label
= gen_label_rtx ();
10498 if (! if_true_label
)
10499 if_true_label
= drop_through_label
;
10500 if (! if_false_label
)
10501 if_false_label
= drop_through_label
;
10503 /* Compare a word at a time, high order first. */
10504 for (i
= 0; i
< nwords
; i
++)
10507 rtx op0_word
, op1_word
;
10509 if (WORDS_BIG_ENDIAN
)
10511 op0_word
= operand_subword_force (op0
, i
, mode
);
10512 op1_word
= operand_subword_force (op1
, i
, mode
);
10516 op0_word
= operand_subword_force (op0
, nwords
- 1 - i
, mode
);
10517 op1_word
= operand_subword_force (op1
, nwords
- 1 - i
, mode
);
10520 /* All but high-order word must be compared as unsigned. */
10521 comp
= compare_from_rtx (op0_word
, op1_word
,
10522 (unsignedp
|| i
> 0) ? GTU
: GT
,
10523 unsignedp
, word_mode
, NULL_RTX
, 0);
10524 if (comp
== const_true_rtx
)
10525 emit_jump (if_true_label
);
10526 else if (comp
!= const0_rtx
)
10527 do_jump_for_compare (comp
, NULL_RTX
, if_true_label
);
10529 /* Consider lower words only if these are equal. */
10530 comp
= compare_from_rtx (op0_word
, op1_word
, NE
, unsignedp
, word_mode
,
10532 if (comp
== const_true_rtx
)
10533 emit_jump (if_false_label
);
10534 else if (comp
!= const0_rtx
)
10535 do_jump_for_compare (comp
, NULL_RTX
, if_false_label
);
10538 if (if_false_label
)
10539 emit_jump (if_false_label
);
10540 if (drop_through_label
)
10541 emit_label (drop_through_label
);
10544 /* Compare OP0 with OP1, word at a time, in mode MODE.
10545 UNSIGNEDP says to do unsigned comparison.
10546 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
10549 do_jump_by_parts_greater_rtx (mode
, unsignedp
, op0
, op1
, if_false_label
, if_true_label
)
10550 enum machine_mode mode
;
10553 rtx if_false_label
, if_true_label
;
10555 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
10556 rtx drop_through_label
= 0;
10559 if (! if_true_label
|| ! if_false_label
)
10560 drop_through_label
= gen_label_rtx ();
10561 if (! if_true_label
)
10562 if_true_label
= drop_through_label
;
10563 if (! if_false_label
)
10564 if_false_label
= drop_through_label
;
10566 /* Compare a word at a time, high order first. */
10567 for (i
= 0; i
< nwords
; i
++)
10570 rtx op0_word
, op1_word
;
10572 if (WORDS_BIG_ENDIAN
)
10574 op0_word
= operand_subword_force (op0
, i
, mode
);
10575 op1_word
= operand_subword_force (op1
, i
, mode
);
10579 op0_word
= operand_subword_force (op0
, nwords
- 1 - i
, mode
);
10580 op1_word
= operand_subword_force (op1
, nwords
- 1 - i
, mode
);
10583 /* All but high-order word must be compared as unsigned. */
10584 comp
= compare_from_rtx (op0_word
, op1_word
,
10585 (unsignedp
|| i
> 0) ? GTU
: GT
,
10586 unsignedp
, word_mode
, NULL_RTX
, 0);
10587 if (comp
== const_true_rtx
)
10588 emit_jump (if_true_label
);
10589 else if (comp
!= const0_rtx
)
10590 do_jump_for_compare (comp
, NULL_RTX
, if_true_label
);
10592 /* Consider lower words only if these are equal. */
10593 comp
= compare_from_rtx (op0_word
, op1_word
, NE
, unsignedp
, word_mode
,
10595 if (comp
== const_true_rtx
)
10596 emit_jump (if_false_label
);
10597 else if (comp
!= const0_rtx
)
10598 do_jump_for_compare (comp
, NULL_RTX
, if_false_label
);
10601 if (if_false_label
)
10602 emit_jump (if_false_label
);
10603 if (drop_through_label
)
10604 emit_label (drop_through_label
);
10607 /* Given an EQ_EXPR expression EXP for values too wide to be compared
10608 with one insn, test the comparison and jump to the appropriate label. */
10611 do_jump_by_parts_equality (exp
, if_false_label
, if_true_label
)
10613 rtx if_false_label
, if_true_label
;
10615 rtx op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
10616 rtx op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
10617 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
10618 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
10620 rtx drop_through_label
= 0;
10622 if (! if_false_label
)
10623 drop_through_label
= if_false_label
= gen_label_rtx ();
10625 for (i
= 0; i
< nwords
; i
++)
10627 rtx comp
= compare_from_rtx (operand_subword_force (op0
, i
, mode
),
10628 operand_subword_force (op1
, i
, mode
),
10629 EQ
, TREE_UNSIGNED (TREE_TYPE (exp
)),
10630 word_mode
, NULL_RTX
, 0);
10631 if (comp
== const_true_rtx
)
10632 emit_jump (if_false_label
);
10633 else if (comp
!= const0_rtx
)
10634 do_jump_for_compare (comp
, if_false_label
, NULL_RTX
);
10638 emit_jump (if_true_label
);
10639 if (drop_through_label
)
10640 emit_label (drop_through_label
);
10643 /* Jump according to whether OP0 is 0.
10644 We assume that OP0 has an integer mode that is too wide
10645 for the available compare insns. */
10648 do_jump_by_parts_equality_rtx (op0
, if_false_label
, if_true_label
)
10650 rtx if_false_label
, if_true_label
;
10652 int nwords
= GET_MODE_SIZE (GET_MODE (op0
)) / UNITS_PER_WORD
;
10655 rtx drop_through_label
= 0;
10657 /* The fastest way of doing this comparison on almost any machine is to
10658 "or" all the words and compare the result. If all have to be loaded
10659 from memory and this is a very wide item, it's possible this may
10660 be slower, but that's highly unlikely. */
10662 part
= gen_reg_rtx (word_mode
);
10663 emit_move_insn (part
, operand_subword_force (op0
, 0, GET_MODE (op0
)));
10664 for (i
= 1; i
< nwords
&& part
!= 0; i
++)
10665 part
= expand_binop (word_mode
, ior_optab
, part
,
10666 operand_subword_force (op0
, i
, GET_MODE (op0
)),
10667 part
, 1, OPTAB_WIDEN
);
10671 rtx comp
= compare_from_rtx (part
, const0_rtx
, EQ
, 1, word_mode
,
10674 if (comp
== const_true_rtx
)
10675 emit_jump (if_false_label
);
10676 else if (comp
== const0_rtx
)
10677 emit_jump (if_true_label
);
10679 do_jump_for_compare (comp
, if_false_label
, if_true_label
);
10684 /* If we couldn't do the "or" simply, do this with a series of compares. */
10685 if (! if_false_label
)
10686 drop_through_label
= if_false_label
= gen_label_rtx ();
10688 for (i
= 0; i
< nwords
; i
++)
10690 rtx comp
= compare_from_rtx (operand_subword_force (op0
, i
,
10692 const0_rtx
, EQ
, 1, word_mode
, NULL_RTX
, 0);
10693 if (comp
== const_true_rtx
)
10694 emit_jump (if_false_label
);
10695 else if (comp
!= const0_rtx
)
10696 do_jump_for_compare (comp
, if_false_label
, NULL_RTX
);
10700 emit_jump (if_true_label
);
10702 if (drop_through_label
)
10703 emit_label (drop_through_label
);
10706 /* Given a comparison expression in rtl form, output conditional branches to
10707 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
10710 do_jump_for_compare (comparison
, if_false_label
, if_true_label
)
10711 rtx comparison
, if_false_label
, if_true_label
;
10715 if (bcc_gen_fctn
[(int) GET_CODE (comparison
)] != 0)
10716 emit_jump_insn ((*bcc_gen_fctn
[(int) GET_CODE (comparison
)]) (if_true_label
));
10720 if (if_false_label
)
10721 emit_jump (if_false_label
);
10723 else if (if_false_label
)
10726 rtx prev
= get_last_insn ();
10729 /* Output the branch with the opposite condition. Then try to invert
10730 what is generated. If more than one insn is a branch, or if the
10731 branch is not the last insn written, abort. If we can't invert
10732 the branch, emit make a true label, redirect this jump to that,
10733 emit a jump to the false label and define the true label. */
10735 if (bcc_gen_fctn
[(int) GET_CODE (comparison
)] != 0)
10736 emit_jump_insn ((*bcc_gen_fctn
[(int) GET_CODE (comparison
)])(if_false_label
));
10740 /* Here we get the first insn that was just emitted. It used to be the
10741 case that, on some machines, emitting the branch would discard
10742 the previous compare insn and emit a replacement. This isn't
10743 done anymore, but abort if we see that PREV is deleted. */
10746 insn
= get_insns ();
10747 else if (INSN_DELETED_P (prev
))
10750 insn
= NEXT_INSN (prev
);
10752 for (; insn
; insn
= NEXT_INSN (insn
))
10753 if (GET_CODE (insn
) == JUMP_INSN
)
10760 if (branch
!= get_last_insn ())
10763 JUMP_LABEL (branch
) = if_false_label
;
10764 if (! invert_jump (branch
, if_false_label
))
10766 if_true_label
= gen_label_rtx ();
10767 redirect_jump (branch
, if_true_label
);
10768 emit_jump (if_false_label
);
10769 emit_label (if_true_label
);
10774 /* Generate code for a comparison expression EXP
10775 (including code to compute the values to be compared)
10776 and set (CC0) according to the result.
10777 SIGNED_CODE should be the rtx operation for this comparison for
10778 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10780 We force a stack adjustment unless there are currently
10781 things pushed on the stack that aren't yet used. */
10784 compare (exp
, signed_code
, unsigned_code
)
10786 enum rtx_code signed_code
, unsigned_code
;
10789 = expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
10791 = expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
10792 register tree type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
10793 register enum machine_mode mode
= TYPE_MODE (type
);
10794 int unsignedp
= TREE_UNSIGNED (type
);
10795 enum rtx_code code
= unsignedp
? unsigned_code
: signed_code
;
10797 #ifdef HAVE_canonicalize_funcptr_for_compare
10798 /* If function pointers need to be "canonicalized" before they can
10799 be reliably compared, then canonicalize them. */
10800 if (HAVE_canonicalize_funcptr_for_compare
10801 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == POINTER_TYPE
10802 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
10805 rtx new_op0
= gen_reg_rtx (mode
);
10807 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0
, op0
));
10811 if (HAVE_canonicalize_funcptr_for_compare
10812 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 1))) == POINTER_TYPE
10813 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
10816 rtx new_op1
= gen_reg_rtx (mode
);
10818 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1
, op1
));
10823 return compare_from_rtx (op0
, op1
, code
, unsignedp
, mode
,
10825 ? expr_size (TREE_OPERAND (exp
, 0)) : NULL_RTX
),
10826 TYPE_ALIGN (TREE_TYPE (exp
)) / BITS_PER_UNIT
);
10829 /* Like compare but expects the values to compare as two rtx's.
10830 The decision as to signed or unsigned comparison must be made by the caller.
10832 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10835 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10836 size of MODE should be used. */
10839 compare_from_rtx (op0
, op1
, code
, unsignedp
, mode
, size
, align
)
10840 register rtx op0
, op1
;
10841 enum rtx_code code
;
10843 enum machine_mode mode
;
10849 /* If one operand is constant, make it the second one. Only do this
10850 if the other operand is not constant as well. */
10852 if ((CONSTANT_P (op0
) && ! CONSTANT_P (op1
))
10853 || (GET_CODE (op0
) == CONST_INT
&& GET_CODE (op1
) != CONST_INT
))
10858 code
= swap_condition (code
);
10861 if (flag_force_mem
)
10863 op0
= force_not_mem (op0
);
10864 op1
= force_not_mem (op1
);
10867 do_pending_stack_adjust ();
10869 if (GET_CODE (op0
) == CONST_INT
&& GET_CODE (op1
) == CONST_INT
10870 && (tem
= simplify_relational_operation (code
, mode
, op0
, op1
)) != 0)
10874 /* There's no need to do this now that combine.c can eliminate lots of
10875 sign extensions. This can be less efficient in certain cases on other
10878 /* If this is a signed equality comparison, we can do it as an
10879 unsigned comparison since zero-extension is cheaper than sign
10880 extension and comparisons with zero are done as unsigned. This is
10881 the case even on machines that can do fast sign extension, since
10882 zero-extension is easier to combine with other operations than
10883 sign-extension is. If we are comparing against a constant, we must
10884 convert it to what it would look like unsigned. */
10885 if ((code
== EQ
|| code
== NE
) && ! unsignedp
10886 && GET_MODE_BITSIZE (GET_MODE (op0
)) <= HOST_BITS_PER_WIDE_INT
)
10888 if (GET_CODE (op1
) == CONST_INT
10889 && (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
))) != INTVAL (op1
))
10890 op1
= GEN_INT (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
)));
10895 emit_cmp_insn (op0
, op1
, code
, size
, mode
, unsignedp
, align
);
10897 return gen_rtx_fmt_ee (code
, VOIDmode
, cc0_rtx
, const0_rtx
);
10900 /* Generate code to calculate EXP using a store-flag instruction
10901 and return an rtx for the result. EXP is either a comparison
10902 or a TRUTH_NOT_EXPR whose operand is a comparison.
10904 If TARGET is nonzero, store the result there if convenient.
10906 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10909 Return zero if there is no suitable set-flag instruction
10910 available on this machine.
10912 Once expand_expr has been called on the arguments of the comparison,
10913 we are committed to doing the store flag, since it is not safe to
10914 re-evaluate the expression. We emit the store-flag insn by calling
10915 emit_store_flag, but only expand the arguments if we have a reason
10916 to believe that emit_store_flag will be successful. If we think that
10917 it will, but it isn't, we have to simulate the store-flag with a
10918 set/jump/set sequence. */
10921 do_store_flag (exp
, target
, mode
, only_cheap
)
10924 enum machine_mode mode
;
10927 enum rtx_code code
;
10928 tree arg0
, arg1
, type
;
10930 enum machine_mode operand_mode
;
10934 enum insn_code icode
;
10935 rtx subtarget
= target
;
10938 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10939 result at the end. We can't simply invert the test since it would
10940 have already been inverted if it were valid. This case occurs for
10941 some floating-point comparisons. */
10943 if (TREE_CODE (exp
) == TRUTH_NOT_EXPR
)
10944 invert
= 1, exp
= TREE_OPERAND (exp
, 0);
10946 arg0
= TREE_OPERAND (exp
, 0);
10947 arg1
= TREE_OPERAND (exp
, 1);
10948 type
= TREE_TYPE (arg0
);
10949 operand_mode
= TYPE_MODE (type
);
10950 unsignedp
= TREE_UNSIGNED (type
);
10952 /* We won't bother with BLKmode store-flag operations because it would mean
10953 passing a lot of information to emit_store_flag. */
10954 if (operand_mode
== BLKmode
)
10957 /* We won't bother with store-flag operations involving function pointers
10958 when function pointers must be canonicalized before comparisons. */
10959 #ifdef HAVE_canonicalize_funcptr_for_compare
10960 if (HAVE_canonicalize_funcptr_for_compare
10961 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == POINTER_TYPE
10962 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
10964 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 1))) == POINTER_TYPE
10965 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
10966 == FUNCTION_TYPE
))))
10973 /* Get the rtx comparison code to use. We know that EXP is a comparison
10974 operation of some type. Some comparisons against 1 and -1 can be
10975 converted to comparisons with zero. Do so here so that the tests
10976 below will be aware that we have a comparison with zero. These
10977 tests will not catch constants in the first operand, but constants
10978 are rarely passed as the first operand. */
10980 switch (TREE_CODE (exp
))
10989 if (integer_onep (arg1
))
10990 arg1
= integer_zero_node
, code
= unsignedp
? LEU
: LE
;
10992 code
= unsignedp
? LTU
: LT
;
10995 if (! unsignedp
&& integer_all_onesp (arg1
))
10996 arg1
= integer_zero_node
, code
= LT
;
10998 code
= unsignedp
? LEU
: LE
;
11001 if (! unsignedp
&& integer_all_onesp (arg1
))
11002 arg1
= integer_zero_node
, code
= GE
;
11004 code
= unsignedp
? GTU
: GT
;
11007 if (integer_onep (arg1
))
11008 arg1
= integer_zero_node
, code
= unsignedp
? GTU
: GT
;
11010 code
= unsignedp
? GEU
: GE
;
11016 /* Put a constant second. */
11017 if (TREE_CODE (arg0
) == REAL_CST
|| TREE_CODE (arg0
) == INTEGER_CST
)
11019 tem
= arg0
; arg0
= arg1
; arg1
= tem
;
11020 code
= swap_condition (code
);
11023 /* If this is an equality or inequality test of a single bit, we can
11024 do this by shifting the bit being tested to the low-order bit and
11025 masking the result with the constant 1. If the condition was EQ,
11026 we xor it with 1. This does not require an scc insn and is faster
11027 than an scc insn even if we have it. */
11029 if ((code
== NE
|| code
== EQ
)
11030 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
11031 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
11033 tree inner
= TREE_OPERAND (arg0
, 0);
11034 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
11037 /* If INNER is a right shift of a constant and it plus BITNUM does
11038 not overflow, adjust BITNUM and INNER. */
11040 if (TREE_CODE (inner
) == RSHIFT_EXPR
11041 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
11042 && TREE_INT_CST_HIGH (TREE_OPERAND (inner
, 1)) == 0
11043 && (bitnum
+ TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1))
11044 < TYPE_PRECISION (type
)))
11046 bitnum
+= TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1));
11047 inner
= TREE_OPERAND (inner
, 0);
11050 /* If we are going to be able to omit the AND below, we must do our
11051 operations as unsigned. If we must use the AND, we have a choice.
11052 Normally unsigned is faster, but for some machines signed is. */
11053 ops_unsignedp
= (bitnum
== TYPE_PRECISION (type
) - 1 ? 1
11054 #ifdef LOAD_EXTEND_OP
11055 : (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
? 0 : 1)
11061 if (subtarget
== 0 || GET_CODE (subtarget
) != REG
11062 || GET_MODE (subtarget
) != operand_mode
11063 || ! safe_from_p (subtarget
, inner
, 1))
11066 op0
= expand_expr (inner
, subtarget
, VOIDmode
, 0);
11069 op0
= expand_shift (RSHIFT_EXPR
, GET_MODE (op0
), op0
,
11070 size_int (bitnum
), subtarget
, ops_unsignedp
);
11072 if (GET_MODE (op0
) != mode
)
11073 op0
= convert_to_mode (mode
, op0
, ops_unsignedp
);
11075 if ((code
== EQ
&& ! invert
) || (code
== NE
&& invert
))
11076 op0
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
, subtarget
,
11077 ops_unsignedp
, OPTAB_LIB_WIDEN
);
11079 /* Put the AND last so it can combine with more things. */
11080 if (bitnum
!= TYPE_PRECISION (type
) - 1)
11081 op0
= expand_and (op0
, const1_rtx
, subtarget
);
11086 /* Now see if we are likely to be able to do this. Return if not. */
11087 if (! can_compare_p (operand_mode
))
11089 icode
= setcc_gen_code
[(int) code
];
11090 if (icode
== CODE_FOR_nothing
11091 || (only_cheap
&& insn_operand_mode
[(int) icode
][0] != mode
))
11093 /* We can only do this if it is one of the special cases that
11094 can be handled without an scc insn. */
11095 if ((code
== LT
&& integer_zerop (arg1
))
11096 || (! only_cheap
&& code
== GE
&& integer_zerop (arg1
)))
11098 else if (BRANCH_COST
>= 0
11099 && ! only_cheap
&& (code
== NE
|| code
== EQ
)
11100 && TREE_CODE (type
) != REAL_TYPE
11101 && ((abs_optab
->handlers
[(int) operand_mode
].insn_code
11102 != CODE_FOR_nothing
)
11103 || (ffs_optab
->handlers
[(int) operand_mode
].insn_code
11104 != CODE_FOR_nothing
)))
11110 preexpand_calls (exp
);
11111 if (subtarget
== 0 || GET_CODE (subtarget
) != REG
11112 || GET_MODE (subtarget
) != operand_mode
11113 || ! safe_from_p (subtarget
, arg1
, 1))
11116 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, 0);
11117 op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
11120 target
= gen_reg_rtx (mode
);
11122 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
11123 because, if the emit_store_flag does anything it will succeed and
11124 OP0 and OP1 will not be used subsequently. */
11126 result
= emit_store_flag (target
, code
,
11127 queued_subexp_p (op0
) ? copy_rtx (op0
) : op0
,
11128 queued_subexp_p (op1
) ? copy_rtx (op1
) : op1
,
11129 operand_mode
, unsignedp
, 1);
11134 result
= expand_binop (mode
, xor_optab
, result
, const1_rtx
,
11135 result
, 0, OPTAB_LIB_WIDEN
);
11139 /* If this failed, we have to do this with set/compare/jump/set code. */
11140 if (GET_CODE (target
) != REG
11141 || reg_mentioned_p (target
, op0
) || reg_mentioned_p (target
, op1
))
11142 target
= gen_reg_rtx (GET_MODE (target
));
11144 emit_move_insn (target
, invert
? const0_rtx
: const1_rtx
);
11145 result
= compare_from_rtx (op0
, op1
, code
, unsignedp
,
11146 operand_mode
, NULL_RTX
, 0);
11147 if (GET_CODE (result
) == CONST_INT
)
11148 return (((result
== const0_rtx
&& ! invert
)
11149 || (result
!= const0_rtx
&& invert
))
11150 ? const0_rtx
: const1_rtx
);
11152 label
= gen_label_rtx ();
11153 if (bcc_gen_fctn
[(int) code
] == 0)
11156 emit_jump_insn ((*bcc_gen_fctn
[(int) code
]) (label
));
11157 emit_move_insn (target
, invert
? const1_rtx
: const0_rtx
);
11158 emit_label (label
);
11163 /* Generate a tablejump instruction (used for switch statements). */
11165 #ifdef HAVE_tablejump
11167 /* INDEX is the value being switched on, with the lowest value
11168 in the table already subtracted.
11169 MODE is its expected mode (needed if INDEX is constant).
11170 RANGE is the length of the jump table.
11171 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11173 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11174 index value is out of range. */
11177 do_tablejump (index
, mode
, range
, table_label
, default_label
)
11178 rtx index
, range
, table_label
, default_label
;
11179 enum machine_mode mode
;
11181 register rtx temp
, vector
;
11183 /* Do an unsigned comparison (in the proper mode) between the index
11184 expression and the value which represents the length of the range.
11185 Since we just finished subtracting the lower bound of the range
11186 from the index expression, this comparison allows us to simultaneously
11187 check that the original index expression value is both greater than
11188 or equal to the minimum value of the range and less than or equal to
11189 the maximum value of the range. */
11191 emit_cmp_insn (index
, range
, GTU
, NULL_RTX
, mode
, 1, 0);
11192 emit_jump_insn (gen_bgtu (default_label
));
11194 /* If index is in range, it must fit in Pmode.
11195 Convert to Pmode so we can index with it. */
11197 index
= convert_to_mode (Pmode
, index
, 1);
11199 /* Don't let a MEM slip thru, because then INDEX that comes
11200 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11201 and break_out_memory_refs will go to work on it and mess it up. */
11202 #ifdef PIC_CASE_VECTOR_ADDRESS
11203 if (flag_pic
&& GET_CODE (index
) != REG
)
11204 index
= copy_to_mode_reg (Pmode
, index
);
11207 /* If flag_force_addr were to affect this address
11208 it could interfere with the tricky assumptions made
11209 about addresses that contain label-refs,
11210 which may be valid only very near the tablejump itself. */
11211 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11212 GET_MODE_SIZE, because this indicates how large insns are. The other
11213 uses should all be Pmode, because they are addresses. This code
11214 could fail if addresses and insns are not the same size. */
11215 index
= gen_rtx_PLUS (Pmode
,
11216 gen_rtx_MULT (Pmode
, index
,
11217 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE
))),
11218 gen_rtx_LABEL_REF (Pmode
, table_label
));
11219 #ifdef PIC_CASE_VECTOR_ADDRESS
11221 index
= PIC_CASE_VECTOR_ADDRESS (index
);
11224 index
= memory_address_noforce (CASE_VECTOR_MODE
, index
);
11225 temp
= gen_reg_rtx (CASE_VECTOR_MODE
);
11226 vector
= gen_rtx_MEM (CASE_VECTOR_MODE
, index
);
11227 RTX_UNCHANGING_P (vector
) = 1;
11228 convert_move (temp
, vector
, 0);
11230 emit_jump_insn (gen_tablejump (temp
, table_label
));
11232 /* If we are generating PIC code or if the table is PC-relative, the
11233 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11234 if (! CASE_VECTOR_PC_RELATIVE
&& ! flag_pic
)
11238 #endif /* HAVE_tablejump */