1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 92-98, 1999 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
30 #include "hard-reg-set.h"
33 #include "insn-flags.h"
34 #include "insn-codes.h"
35 #include "insn-config.h"
36 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
40 #include "typeclass.h"
44 #define CEIL(x,y) (((x) + (y) - 1) / (y))
46 /* Decide whether a function's arguments should be processed
47 from first to last or from last to first.
49 They should if the stack and args grow in opposite directions, but
50 only if we have push insns. */
54 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
55 #define PUSH_ARGS_REVERSED /* If it's last to first */
60 #ifndef STACK_PUSH_CODE
61 #ifdef STACK_GROWS_DOWNWARD
62 #define STACK_PUSH_CODE PRE_DEC
64 #define STACK_PUSH_CODE PRE_INC
68 /* Assume that case vectors are not pc-relative. */
69 #ifndef CASE_VECTOR_PC_RELATIVE
70 #define CASE_VECTOR_PC_RELATIVE 0
73 /* If this is nonzero, we do not bother generating VOLATILE
74 around volatile memory references, and we are willing to
75 output indirect addresses. If cse is to follow, we reject
76 indirect addresses so a useful potential cse is generated;
77 if it is used only once, instruction combination will produce
78 the same indirect address eventually. */
81 /* Nonzero to generate code for all the subroutines within an
82 expression before generating the upper levels of the expression.
83 Nowadays this is never zero. */
84 int do_preexpand_calls
= 1;
86 /* Number of units that we should eventually pop off the stack.
87 These are the arguments to function calls that have already returned. */
88 int pending_stack_adjust
;
90 /* Nonzero means stack pops must not be deferred, and deferred stack
91 pops must not be output. It is nonzero inside a function call,
92 inside a conditional expression, inside a statement expression,
93 and in other cases as well. */
94 int inhibit_defer_pop
;
96 /* Nonzero means __builtin_saveregs has already been done in this function.
97 The value is the pseudoreg containing the value __builtin_saveregs
99 static rtx saveregs_value
;
101 /* Similarly for __builtin_apply_args. */
102 static rtx apply_args_value
;
104 /* Don't check memory usage, since code is being emitted to check a memory
105 usage. Used when current_function_check_memory_usage is true, to avoid
106 infinite recursion. */
107 static int in_check_memory_usage
;
109 /* Postincrements that still need to be expanded. */
110 static rtx pending_chain
;
112 /* This structure is used by move_by_pieces to describe the move to
114 struct move_by_pieces
124 int explicit_inc_from
;
131 /* This structure is used by clear_by_pieces to describe the clear to
134 struct clear_by_pieces
146 extern struct obstack permanent_obstack
;
147 extern rtx arg_pointer_save_area
;
149 static rtx get_push_address
PROTO ((int));
151 static rtx enqueue_insn
PROTO((rtx
, rtx
));
152 static void init_queue
PROTO((void));
153 static int move_by_pieces_ninsns
PROTO((unsigned int, int));
154 static void move_by_pieces_1
PROTO((rtx (*) (rtx
, ...), enum machine_mode
,
155 struct move_by_pieces
*));
156 static void clear_by_pieces
PROTO((rtx
, int, int));
157 static void clear_by_pieces_1
PROTO((rtx (*) (rtx
, ...), enum machine_mode
,
158 struct clear_by_pieces
*));
159 static int is_zeros_p
PROTO((tree
));
160 static int mostly_zeros_p
PROTO((tree
));
161 static void store_constructor_field
PROTO((rtx
, int, int, enum machine_mode
,
163 static void store_constructor
PROTO((tree
, rtx
, int));
164 static rtx store_field
PROTO((rtx
, int, int, enum machine_mode
, tree
,
165 enum machine_mode
, int, int,
167 static enum memory_use_mode
168 get_memory_usage_from_modifier
PROTO((enum expand_modifier
));
169 static tree save_noncopied_parts
PROTO((tree
, tree
));
170 static tree init_noncopied_parts
PROTO((tree
, tree
));
171 static int safe_from_p
PROTO((rtx
, tree
, int));
172 static int fixed_type_p
PROTO((tree
));
173 static rtx var_rtx
PROTO((tree
));
174 static int get_pointer_alignment
PROTO((tree
, unsigned));
175 static tree string_constant
PROTO((tree
, tree
*));
176 static tree c_strlen
PROTO((tree
));
177 static rtx get_memory_rtx
PROTO((tree
));
178 static rtx expand_builtin
PROTO((tree
, rtx
, rtx
,
179 enum machine_mode
, int));
180 static int apply_args_size
PROTO((void));
181 static int apply_result_size
PROTO((void));
182 static rtx result_vector
PROTO((int, rtx
));
183 static rtx expand_builtin_apply_args
PROTO((void));
184 static rtx expand_builtin_apply
PROTO((rtx
, rtx
, rtx
));
185 static void expand_builtin_return
PROTO((rtx
));
186 static rtx expand_increment
PROTO((tree
, int, int));
187 static void preexpand_calls
PROTO((tree
));
188 static void do_jump_by_parts_greater
PROTO((tree
, int, rtx
, rtx
));
189 static void do_jump_by_parts_equality
PROTO((tree
, rtx
, rtx
));
190 static void do_jump_for_compare
PROTO((rtx
, rtx
, rtx
));
191 static rtx compare
PROTO((tree
, enum rtx_code
, enum rtx_code
));
192 static rtx do_store_flag
PROTO((tree
, rtx
, enum machine_mode
, int));
194 /* Record for each mode whether we can move a register directly to or
195 from an object of that mode in memory. If we can't, we won't try
196 to use that mode directly when accessing a field of that mode. */
198 static char direct_load
[NUM_MACHINE_MODES
];
199 static char direct_store
[NUM_MACHINE_MODES
];
201 /* If a memory-to-memory move would take MOVE_RATIO or more simple
202 move-instruction sequences, we will do a movstr or libcall instead. */
205 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
208 /* If we are optimizing for space (-Os), cut down the default move ratio */
209 #define MOVE_RATIO (optimize_size ? 3 : 15)
213 /* This macro is used to determine whether move_by_pieces should be called
214 to perform a structure copy. */
215 #ifndef MOVE_BY_PIECES_P
216 #define MOVE_BY_PIECES_P(SIZE, ALIGN) (move_by_pieces_ninsns \
217 (SIZE, ALIGN) < MOVE_RATIO)
220 /* This array records the insn_code of insns to perform block moves. */
221 enum insn_code movstr_optab
[NUM_MACHINE_MODES
];
223 /* This array records the insn_code of insns to perform block clears. */
224 enum insn_code clrstr_optab
[NUM_MACHINE_MODES
];
226 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
228 #ifndef SLOW_UNALIGNED_ACCESS
229 #define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
232 /* Register mappings for target machines without register windows. */
233 #ifndef INCOMING_REGNO
234 #define INCOMING_REGNO(OUT) (OUT)
236 #ifndef OUTGOING_REGNO
237 #define OUTGOING_REGNO(IN) (IN)
240 /* This is run once per compilation to set up which modes can be used
241 directly in memory and to initialize the block move optab. */
247 enum machine_mode mode
;
254 /* Since we are on the permanent obstack, we must be sure we save this
255 spot AFTER we call start_sequence, since it will reuse the rtl it
257 free_point
= (char *) oballoc (0);
259 /* Try indexing by frame ptr and try by stack ptr.
260 It is known that on the Convex the stack ptr isn't a valid index.
261 With luck, one or the other is valid on any machine. */
262 mem
= gen_rtx_MEM (VOIDmode
, stack_pointer_rtx
);
263 mem1
= gen_rtx_MEM (VOIDmode
, frame_pointer_rtx
);
265 insn
= emit_insn (gen_rtx_SET (0, NULL_RTX
, NULL_RTX
));
266 pat
= PATTERN (insn
);
268 for (mode
= VOIDmode
; (int) mode
< NUM_MACHINE_MODES
;
269 mode
= (enum machine_mode
) ((int) mode
+ 1))
274 direct_load
[(int) mode
] = direct_store
[(int) mode
] = 0;
275 PUT_MODE (mem
, mode
);
276 PUT_MODE (mem1
, mode
);
278 /* See if there is some register that can be used in this mode and
279 directly loaded or stored from memory. */
281 if (mode
!= VOIDmode
&& mode
!= BLKmode
)
282 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
283 && (direct_load
[(int) mode
] == 0 || direct_store
[(int) mode
] == 0);
286 if (! HARD_REGNO_MODE_OK (regno
, mode
))
289 reg
= gen_rtx_REG (mode
, regno
);
292 SET_DEST (pat
) = reg
;
293 if (recog (pat
, insn
, &num_clobbers
) >= 0)
294 direct_load
[(int) mode
] = 1;
296 SET_SRC (pat
) = mem1
;
297 SET_DEST (pat
) = reg
;
298 if (recog (pat
, insn
, &num_clobbers
) >= 0)
299 direct_load
[(int) mode
] = 1;
302 SET_DEST (pat
) = mem
;
303 if (recog (pat
, insn
, &num_clobbers
) >= 0)
304 direct_store
[(int) mode
] = 1;
307 SET_DEST (pat
) = mem1
;
308 if (recog (pat
, insn
, &num_clobbers
) >= 0)
309 direct_store
[(int) mode
] = 1;
317 /* This is run at the start of compiling a function. */
324 pending_stack_adjust
= 0;
325 inhibit_defer_pop
= 0;
327 apply_args_value
= 0;
331 /* Save all variables describing the current status into the structure *P.
332 This is used before starting a nested function. */
338 p
->pending_chain
= pending_chain
;
339 p
->pending_stack_adjust
= pending_stack_adjust
;
340 p
->inhibit_defer_pop
= inhibit_defer_pop
;
341 p
->saveregs_value
= saveregs_value
;
342 p
->apply_args_value
= apply_args_value
;
343 p
->forced_labels
= forced_labels
;
345 pending_chain
= NULL_RTX
;
346 pending_stack_adjust
= 0;
347 inhibit_defer_pop
= 0;
349 apply_args_value
= 0;
353 /* Restore all variables describing the current status from the structure *P.
354 This is used after a nested function. */
357 restore_expr_status (p
)
360 pending_chain
= p
->pending_chain
;
361 pending_stack_adjust
= p
->pending_stack_adjust
;
362 inhibit_defer_pop
= p
->inhibit_defer_pop
;
363 saveregs_value
= p
->saveregs_value
;
364 apply_args_value
= p
->apply_args_value
;
365 forced_labels
= p
->forced_labels
;
368 /* Manage the queue of increment instructions to be output
369 for POSTINCREMENT_EXPR expressions, etc. */
371 /* Queue up to increment (or change) VAR later. BODY says how:
372 BODY should be the same thing you would pass to emit_insn
373 to increment right away. It will go to emit_insn later on.
375 The value is a QUEUED expression to be used in place of VAR
376 where you want to guarantee the pre-incrementation value of VAR. */
379 enqueue_insn (var
, body
)
382 pending_chain
= gen_rtx_QUEUED (GET_MODE (var
),
383 var
, NULL_RTX
, NULL_RTX
, body
,
385 return pending_chain
;
388 /* Use protect_from_queue to convert a QUEUED expression
389 into something that you can put immediately into an instruction.
390 If the queued incrementation has not happened yet,
391 protect_from_queue returns the variable itself.
392 If the incrementation has happened, protect_from_queue returns a temp
393 that contains a copy of the old value of the variable.
395 Any time an rtx which might possibly be a QUEUED is to be put
396 into an instruction, it must be passed through protect_from_queue first.
397 QUEUED expressions are not meaningful in instructions.
399 Do not pass a value through protect_from_queue and then hold
400 on to it for a while before putting it in an instruction!
401 If the queue is flushed in between, incorrect code will result. */
404 protect_from_queue (x
, modify
)
408 register RTX_CODE code
= GET_CODE (x
);
410 #if 0 /* A QUEUED can hang around after the queue is forced out. */
411 /* Shortcut for most common case. */
412 if (pending_chain
== 0)
418 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
419 use of autoincrement. Make a copy of the contents of the memory
420 location rather than a copy of the address, but not if the value is
421 of mode BLKmode. Don't modify X in place since it might be
423 if (code
== MEM
&& GET_MODE (x
) != BLKmode
424 && GET_CODE (XEXP (x
, 0)) == QUEUED
&& !modify
)
426 register rtx y
= XEXP (x
, 0);
427 register rtx
new = gen_rtx_MEM (GET_MODE (x
), QUEUED_VAR (y
));
429 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x
);
430 MEM_COPY_ATTRIBUTES (new, x
);
431 MEM_ALIAS_SET (new) = MEM_ALIAS_SET (x
);
435 register rtx temp
= gen_reg_rtx (GET_MODE (new));
436 emit_insn_before (gen_move_insn (temp
, new),
442 /* Otherwise, recursively protect the subexpressions of all
443 the kinds of rtx's that can contain a QUEUED. */
446 rtx tem
= protect_from_queue (XEXP (x
, 0), 0);
447 if (tem
!= XEXP (x
, 0))
453 else if (code
== PLUS
|| code
== MULT
)
455 rtx new0
= protect_from_queue (XEXP (x
, 0), 0);
456 rtx new1
= protect_from_queue (XEXP (x
, 1), 0);
457 if (new0
!= XEXP (x
, 0) || new1
!= XEXP (x
, 1))
466 /* If the increment has not happened, use the variable itself. */
467 if (QUEUED_INSN (x
) == 0)
468 return QUEUED_VAR (x
);
469 /* If the increment has happened and a pre-increment copy exists,
471 if (QUEUED_COPY (x
) != 0)
472 return QUEUED_COPY (x
);
473 /* The increment has happened but we haven't set up a pre-increment copy.
474 Set one up now, and use it. */
475 QUEUED_COPY (x
) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x
)));
476 emit_insn_before (gen_move_insn (QUEUED_COPY (x
), QUEUED_VAR (x
)),
478 return QUEUED_COPY (x
);
481 /* Return nonzero if X contains a QUEUED expression:
482 if it contains anything that will be altered by a queued increment.
483 We handle only combinations of MEM, PLUS, MINUS and MULT operators
484 since memory addresses generally contain only those. */
490 register enum rtx_code code
= GET_CODE (x
);
496 return queued_subexp_p (XEXP (x
, 0));
500 return (queued_subexp_p (XEXP (x
, 0))
501 || queued_subexp_p (XEXP (x
, 1)));
507 /* Perform all the pending incrementations. */
513 while ((p
= pending_chain
))
515 rtx body
= QUEUED_BODY (p
);
517 if (GET_CODE (body
) == SEQUENCE
)
519 QUEUED_INSN (p
) = XVECEXP (QUEUED_BODY (p
), 0, 0);
520 emit_insn (QUEUED_BODY (p
));
523 QUEUED_INSN (p
) = emit_insn (QUEUED_BODY (p
));
524 pending_chain
= QUEUED_NEXT (p
);
535 /* Copy data from FROM to TO, where the machine modes are not the same.
536 Both modes may be integer, or both may be floating.
537 UNSIGNEDP should be nonzero if FROM is an unsigned type.
538 This causes zero-extension instead of sign-extension. */
541 convert_move (to
, from
, unsignedp
)
542 register rtx to
, from
;
545 enum machine_mode to_mode
= GET_MODE (to
);
546 enum machine_mode from_mode
= GET_MODE (from
);
547 int to_real
= GET_MODE_CLASS (to_mode
) == MODE_FLOAT
;
548 int from_real
= GET_MODE_CLASS (from_mode
) == MODE_FLOAT
;
552 /* rtx code for making an equivalent value. */
553 enum rtx_code equiv_code
= (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
);
555 to
= protect_from_queue (to
, 1);
556 from
= protect_from_queue (from
, 0);
558 if (to_real
!= from_real
)
561 /* If FROM is a SUBREG that indicates that we have already done at least
562 the required extension, strip it. We don't handle such SUBREGs as
565 if (GET_CODE (from
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (from
)
566 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from
)))
567 >= GET_MODE_SIZE (to_mode
))
568 && SUBREG_PROMOTED_UNSIGNED_P (from
) == unsignedp
)
569 from
= gen_lowpart (to_mode
, from
), from_mode
= to_mode
;
571 if (GET_CODE (to
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (to
))
574 if (to_mode
== from_mode
575 || (from_mode
== VOIDmode
&& CONSTANT_P (from
)))
577 emit_move_insn (to
, from
);
585 if (GET_MODE_BITSIZE (from_mode
) < GET_MODE_BITSIZE (to_mode
))
587 /* Try converting directly if the insn is supported. */
588 if ((code
= can_extend_p (to_mode
, from_mode
, 0))
591 emit_unop_insn (code
, to
, from
, UNKNOWN
);
596 #ifdef HAVE_trunchfqf2
597 if (HAVE_trunchfqf2
&& from_mode
== HFmode
&& to_mode
== QFmode
)
599 emit_unop_insn (CODE_FOR_trunchfqf2
, to
, from
, UNKNOWN
);
603 #ifdef HAVE_trunctqfqf2
604 if (HAVE_trunctqfqf2
&& from_mode
== TQFmode
&& to_mode
== QFmode
)
606 emit_unop_insn (CODE_FOR_trunctqfqf2
, to
, from
, UNKNOWN
);
610 #ifdef HAVE_truncsfqf2
611 if (HAVE_truncsfqf2
&& from_mode
== SFmode
&& to_mode
== QFmode
)
613 emit_unop_insn (CODE_FOR_truncsfqf2
, to
, from
, UNKNOWN
);
617 #ifdef HAVE_truncdfqf2
618 if (HAVE_truncdfqf2
&& from_mode
== DFmode
&& to_mode
== QFmode
)
620 emit_unop_insn (CODE_FOR_truncdfqf2
, to
, from
, UNKNOWN
);
624 #ifdef HAVE_truncxfqf2
625 if (HAVE_truncxfqf2
&& from_mode
== XFmode
&& to_mode
== QFmode
)
627 emit_unop_insn (CODE_FOR_truncxfqf2
, to
, from
, UNKNOWN
);
631 #ifdef HAVE_trunctfqf2
632 if (HAVE_trunctfqf2
&& from_mode
== TFmode
&& to_mode
== QFmode
)
634 emit_unop_insn (CODE_FOR_trunctfqf2
, to
, from
, UNKNOWN
);
639 #ifdef HAVE_trunctqfhf2
640 if (HAVE_trunctqfhf2
&& from_mode
== TQFmode
&& to_mode
== HFmode
)
642 emit_unop_insn (CODE_FOR_trunctqfhf2
, to
, from
, UNKNOWN
);
646 #ifdef HAVE_truncsfhf2
647 if (HAVE_truncsfhf2
&& from_mode
== SFmode
&& to_mode
== HFmode
)
649 emit_unop_insn (CODE_FOR_truncsfhf2
, to
, from
, UNKNOWN
);
653 #ifdef HAVE_truncdfhf2
654 if (HAVE_truncdfhf2
&& from_mode
== DFmode
&& to_mode
== HFmode
)
656 emit_unop_insn (CODE_FOR_truncdfhf2
, to
, from
, UNKNOWN
);
660 #ifdef HAVE_truncxfhf2
661 if (HAVE_truncxfhf2
&& from_mode
== XFmode
&& to_mode
== HFmode
)
663 emit_unop_insn (CODE_FOR_truncxfhf2
, to
, from
, UNKNOWN
);
667 #ifdef HAVE_trunctfhf2
668 if (HAVE_trunctfhf2
&& from_mode
== TFmode
&& to_mode
== HFmode
)
670 emit_unop_insn (CODE_FOR_trunctfhf2
, to
, from
, UNKNOWN
);
675 #ifdef HAVE_truncsftqf2
676 if (HAVE_truncsftqf2
&& from_mode
== SFmode
&& to_mode
== TQFmode
)
678 emit_unop_insn (CODE_FOR_truncsftqf2
, to
, from
, UNKNOWN
);
682 #ifdef HAVE_truncdftqf2
683 if (HAVE_truncdftqf2
&& from_mode
== DFmode
&& to_mode
== TQFmode
)
685 emit_unop_insn (CODE_FOR_truncdftqf2
, to
, from
, UNKNOWN
);
689 #ifdef HAVE_truncxftqf2
690 if (HAVE_truncxftqf2
&& from_mode
== XFmode
&& to_mode
== TQFmode
)
692 emit_unop_insn (CODE_FOR_truncxftqf2
, to
, from
, UNKNOWN
);
696 #ifdef HAVE_trunctftqf2
697 if (HAVE_trunctftqf2
&& from_mode
== TFmode
&& to_mode
== TQFmode
)
699 emit_unop_insn (CODE_FOR_trunctftqf2
, to
, from
, UNKNOWN
);
704 #ifdef HAVE_truncdfsf2
705 if (HAVE_truncdfsf2
&& from_mode
== DFmode
&& to_mode
== SFmode
)
707 emit_unop_insn (CODE_FOR_truncdfsf2
, to
, from
, UNKNOWN
);
711 #ifdef HAVE_truncxfsf2
712 if (HAVE_truncxfsf2
&& from_mode
== XFmode
&& to_mode
== SFmode
)
714 emit_unop_insn (CODE_FOR_truncxfsf2
, to
, from
, UNKNOWN
);
718 #ifdef HAVE_trunctfsf2
719 if (HAVE_trunctfsf2
&& from_mode
== TFmode
&& to_mode
== SFmode
)
721 emit_unop_insn (CODE_FOR_trunctfsf2
, to
, from
, UNKNOWN
);
725 #ifdef HAVE_truncxfdf2
726 if (HAVE_truncxfdf2
&& from_mode
== XFmode
&& to_mode
== DFmode
)
728 emit_unop_insn (CODE_FOR_truncxfdf2
, to
, from
, UNKNOWN
);
732 #ifdef HAVE_trunctfdf2
733 if (HAVE_trunctfdf2
&& from_mode
== TFmode
&& to_mode
== DFmode
)
735 emit_unop_insn (CODE_FOR_trunctfdf2
, to
, from
, UNKNOWN
);
747 libcall
= extendsfdf2_libfunc
;
751 libcall
= extendsfxf2_libfunc
;
755 libcall
= extendsftf2_libfunc
;
767 libcall
= truncdfsf2_libfunc
;
771 libcall
= extenddfxf2_libfunc
;
775 libcall
= extenddftf2_libfunc
;
787 libcall
= truncxfsf2_libfunc
;
791 libcall
= truncxfdf2_libfunc
;
803 libcall
= trunctfsf2_libfunc
;
807 libcall
= trunctfdf2_libfunc
;
819 if (libcall
== (rtx
) 0)
820 /* This conversion is not implemented yet. */
823 value
= emit_library_call_value (libcall
, NULL_RTX
, 1, to_mode
,
825 emit_move_insn (to
, value
);
829 /* Now both modes are integers. */
831 /* Handle expanding beyond a word. */
832 if (GET_MODE_BITSIZE (from_mode
) < GET_MODE_BITSIZE (to_mode
)
833 && GET_MODE_BITSIZE (to_mode
) > BITS_PER_WORD
)
840 enum machine_mode lowpart_mode
;
841 int nwords
= CEIL (GET_MODE_SIZE (to_mode
), UNITS_PER_WORD
);
843 /* Try converting directly if the insn is supported. */
844 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
847 /* If FROM is a SUBREG, put it into a register. Do this
848 so that we always generate the same set of insns for
849 better cse'ing; if an intermediate assignment occurred,
850 we won't be doing the operation directly on the SUBREG. */
851 if (optimize
> 0 && GET_CODE (from
) == SUBREG
)
852 from
= force_reg (from_mode
, from
);
853 emit_unop_insn (code
, to
, from
, equiv_code
);
856 /* Next, try converting via full word. */
857 else if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
858 && ((code
= can_extend_p (to_mode
, word_mode
, unsignedp
))
859 != CODE_FOR_nothing
))
861 if (GET_CODE (to
) == REG
)
862 emit_insn (gen_rtx_CLOBBER (VOIDmode
, to
));
863 convert_move (gen_lowpart (word_mode
, to
), from
, unsignedp
);
864 emit_unop_insn (code
, to
,
865 gen_lowpart (word_mode
, to
), equiv_code
);
869 /* No special multiword conversion insn; do it by hand. */
872 /* Since we will turn this into a no conflict block, we must ensure
873 that the source does not overlap the target. */
875 if (reg_overlap_mentioned_p (to
, from
))
876 from
= force_reg (from_mode
, from
);
878 /* Get a copy of FROM widened to a word, if necessary. */
879 if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
)
880 lowpart_mode
= word_mode
;
882 lowpart_mode
= from_mode
;
884 lowfrom
= convert_to_mode (lowpart_mode
, from
, unsignedp
);
886 lowpart
= gen_lowpart (lowpart_mode
, to
);
887 emit_move_insn (lowpart
, lowfrom
);
889 /* Compute the value to put in each remaining word. */
891 fill_value
= const0_rtx
;
896 && insn_operand_mode
[(int) CODE_FOR_slt
][0] == word_mode
897 && STORE_FLAG_VALUE
== -1)
899 emit_cmp_insn (lowfrom
, const0_rtx
, NE
, NULL_RTX
,
901 fill_value
= gen_reg_rtx (word_mode
);
902 emit_insn (gen_slt (fill_value
));
908 = expand_shift (RSHIFT_EXPR
, lowpart_mode
, lowfrom
,
909 size_int (GET_MODE_BITSIZE (lowpart_mode
) - 1),
911 fill_value
= convert_to_mode (word_mode
, fill_value
, 1);
915 /* Fill the remaining words. */
916 for (i
= GET_MODE_SIZE (lowpart_mode
) / UNITS_PER_WORD
; i
< nwords
; i
++)
918 int index
= (WORDS_BIG_ENDIAN
? nwords
- i
- 1 : i
);
919 rtx subword
= operand_subword (to
, index
, 1, to_mode
);
924 if (fill_value
!= subword
)
925 emit_move_insn (subword
, fill_value
);
928 insns
= get_insns ();
931 emit_no_conflict_block (insns
, to
, from
, NULL_RTX
,
932 gen_rtx_fmt_e (equiv_code
, to_mode
, copy_rtx (from
)));
936 /* Truncating multi-word to a word or less. */
937 if (GET_MODE_BITSIZE (from_mode
) > BITS_PER_WORD
938 && GET_MODE_BITSIZE (to_mode
) <= BITS_PER_WORD
)
940 if (!((GET_CODE (from
) == MEM
941 && ! MEM_VOLATILE_P (from
)
942 && direct_load
[(int) to_mode
]
943 && ! mode_dependent_address_p (XEXP (from
, 0)))
944 || GET_CODE (from
) == REG
945 || GET_CODE (from
) == SUBREG
))
946 from
= force_reg (from_mode
, from
);
947 convert_move (to
, gen_lowpart (word_mode
, from
), 0);
951 /* Handle pointer conversion */ /* SPEE 900220 */
952 if (to_mode
== PQImode
)
954 if (from_mode
!= QImode
)
955 from
= convert_to_mode (QImode
, from
, unsignedp
);
957 #ifdef HAVE_truncqipqi2
958 if (HAVE_truncqipqi2
)
960 emit_unop_insn (CODE_FOR_truncqipqi2
, to
, from
, UNKNOWN
);
963 #endif /* HAVE_truncqipqi2 */
967 if (from_mode
== PQImode
)
969 if (to_mode
!= QImode
)
971 from
= convert_to_mode (QImode
, from
, unsignedp
);
976 #ifdef HAVE_extendpqiqi2
977 if (HAVE_extendpqiqi2
)
979 emit_unop_insn (CODE_FOR_extendpqiqi2
, to
, from
, UNKNOWN
);
982 #endif /* HAVE_extendpqiqi2 */
987 if (to_mode
== PSImode
)
989 if (from_mode
!= SImode
)
990 from
= convert_to_mode (SImode
, from
, unsignedp
);
992 #ifdef HAVE_truncsipsi2
993 if (HAVE_truncsipsi2
)
995 emit_unop_insn (CODE_FOR_truncsipsi2
, to
, from
, UNKNOWN
);
998 #endif /* HAVE_truncsipsi2 */
1002 if (from_mode
== PSImode
)
1004 if (to_mode
!= SImode
)
1006 from
= convert_to_mode (SImode
, from
, unsignedp
);
1011 #ifdef HAVE_extendpsisi2
1012 if (HAVE_extendpsisi2
)
1014 emit_unop_insn (CODE_FOR_extendpsisi2
, to
, from
, UNKNOWN
);
1017 #endif /* HAVE_extendpsisi2 */
1022 if (to_mode
== PDImode
)
1024 if (from_mode
!= DImode
)
1025 from
= convert_to_mode (DImode
, from
, unsignedp
);
1027 #ifdef HAVE_truncdipdi2
1028 if (HAVE_truncdipdi2
)
1030 emit_unop_insn (CODE_FOR_truncdipdi2
, to
, from
, UNKNOWN
);
1033 #endif /* HAVE_truncdipdi2 */
1037 if (from_mode
== PDImode
)
1039 if (to_mode
!= DImode
)
1041 from
= convert_to_mode (DImode
, from
, unsignedp
);
1046 #ifdef HAVE_extendpdidi2
1047 if (HAVE_extendpdidi2
)
1049 emit_unop_insn (CODE_FOR_extendpdidi2
, to
, from
, UNKNOWN
);
1052 #endif /* HAVE_extendpdidi2 */
1057 /* Now follow all the conversions between integers
1058 no more than a word long. */
1060 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1061 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
)
1062 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
1063 GET_MODE_BITSIZE (from_mode
)))
1065 if (!((GET_CODE (from
) == MEM
1066 && ! MEM_VOLATILE_P (from
)
1067 && direct_load
[(int) to_mode
]
1068 && ! mode_dependent_address_p (XEXP (from
, 0)))
1069 || GET_CODE (from
) == REG
1070 || GET_CODE (from
) == SUBREG
))
1071 from
= force_reg (from_mode
, from
);
1072 if (GET_CODE (from
) == REG
&& REGNO (from
) < FIRST_PSEUDO_REGISTER
1073 && ! HARD_REGNO_MODE_OK (REGNO (from
), to_mode
))
1074 from
= copy_to_reg (from
);
1075 emit_move_insn (to
, gen_lowpart (to_mode
, from
));
1079 /* Handle extension. */
1080 if (GET_MODE_BITSIZE (to_mode
) > GET_MODE_BITSIZE (from_mode
))
1082 /* Convert directly if that works. */
1083 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
1084 != CODE_FOR_nothing
)
1086 emit_unop_insn (code
, to
, from
, equiv_code
);
1091 enum machine_mode intermediate
;
1095 /* Search for a mode to convert via. */
1096 for (intermediate
= from_mode
; intermediate
!= VOIDmode
;
1097 intermediate
= GET_MODE_WIDER_MODE (intermediate
))
1098 if (((can_extend_p (to_mode
, intermediate
, unsignedp
)
1099 != CODE_FOR_nothing
)
1100 || (GET_MODE_SIZE (to_mode
) < GET_MODE_SIZE (intermediate
)
1101 && TRULY_NOOP_TRUNCATION (to_mode
, intermediate
)))
1102 && (can_extend_p (intermediate
, from_mode
, unsignedp
)
1103 != CODE_FOR_nothing
))
1105 convert_move (to
, convert_to_mode (intermediate
, from
,
1106 unsignedp
), unsignedp
);
1110 /* No suitable intermediate mode.
1111 Generate what we need with shifts. */
1112 shift_amount
= build_int_2 (GET_MODE_BITSIZE (to_mode
)
1113 - GET_MODE_BITSIZE (from_mode
), 0);
1114 from
= gen_lowpart (to_mode
, force_reg (from_mode
, from
));
1115 tmp
= expand_shift (LSHIFT_EXPR
, to_mode
, from
, shift_amount
,
1117 tmp
= expand_shift (RSHIFT_EXPR
, to_mode
, tmp
, shift_amount
,
1120 emit_move_insn (to
, tmp
);
1125 /* Support special truncate insns for certain modes. */
1127 if (from_mode
== DImode
&& to_mode
== SImode
)
1129 #ifdef HAVE_truncdisi2
1130 if (HAVE_truncdisi2
)
1132 emit_unop_insn (CODE_FOR_truncdisi2
, to
, from
, UNKNOWN
);
1136 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1140 if (from_mode
== DImode
&& to_mode
== HImode
)
1142 #ifdef HAVE_truncdihi2
1143 if (HAVE_truncdihi2
)
1145 emit_unop_insn (CODE_FOR_truncdihi2
, to
, from
, UNKNOWN
);
1149 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1153 if (from_mode
== DImode
&& to_mode
== QImode
)
1155 #ifdef HAVE_truncdiqi2
1156 if (HAVE_truncdiqi2
)
1158 emit_unop_insn (CODE_FOR_truncdiqi2
, to
, from
, UNKNOWN
);
1162 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1166 if (from_mode
== SImode
&& to_mode
== HImode
)
1168 #ifdef HAVE_truncsihi2
1169 if (HAVE_truncsihi2
)
1171 emit_unop_insn (CODE_FOR_truncsihi2
, to
, from
, UNKNOWN
);
1175 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1179 if (from_mode
== SImode
&& to_mode
== QImode
)
1181 #ifdef HAVE_truncsiqi2
1182 if (HAVE_truncsiqi2
)
1184 emit_unop_insn (CODE_FOR_truncsiqi2
, to
, from
, UNKNOWN
);
1188 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1192 if (from_mode
== HImode
&& to_mode
== QImode
)
1194 #ifdef HAVE_trunchiqi2
1195 if (HAVE_trunchiqi2
)
1197 emit_unop_insn (CODE_FOR_trunchiqi2
, to
, from
, UNKNOWN
);
1201 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1205 if (from_mode
== TImode
&& to_mode
== DImode
)
1207 #ifdef HAVE_trunctidi2
1208 if (HAVE_trunctidi2
)
1210 emit_unop_insn (CODE_FOR_trunctidi2
, to
, from
, UNKNOWN
);
1214 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1218 if (from_mode
== TImode
&& to_mode
== SImode
)
1220 #ifdef HAVE_trunctisi2
1221 if (HAVE_trunctisi2
)
1223 emit_unop_insn (CODE_FOR_trunctisi2
, to
, from
, UNKNOWN
);
1227 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1231 if (from_mode
== TImode
&& to_mode
== HImode
)
1233 #ifdef HAVE_trunctihi2
1234 if (HAVE_trunctihi2
)
1236 emit_unop_insn (CODE_FOR_trunctihi2
, to
, from
, UNKNOWN
);
1240 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1244 if (from_mode
== TImode
&& to_mode
== QImode
)
1246 #ifdef HAVE_trunctiqi2
1247 if (HAVE_trunctiqi2
)
1249 emit_unop_insn (CODE_FOR_trunctiqi2
, to
, from
, UNKNOWN
);
1253 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1257 /* Handle truncation of volatile memrefs, and so on;
1258 the things that couldn't be truncated directly,
1259 and for which there was no special instruction. */
1260 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
))
1262 rtx temp
= force_reg (to_mode
, gen_lowpart (to_mode
, from
));
1263 emit_move_insn (to
, temp
);
1267 /* Mode combination is not recognized. */
1271 /* Return an rtx for a value that would result
1272 from converting X to mode MODE.
1273 Both X and MODE may be floating, or both integer.
1274 UNSIGNEDP is nonzero if X is an unsigned value.
1275 This can be done by referring to a part of X in place
1276 or by copying to a new temporary with conversion.
1278 This function *must not* call protect_from_queue
1279 except when putting X into an insn (in which case convert_move does it). */
1282 convert_to_mode (mode
, x
, unsignedp
)
1283 enum machine_mode mode
;
1287 return convert_modes (mode
, VOIDmode
, x
, unsignedp
);
1290 /* Return an rtx for a value that would result
1291 from converting X from mode OLDMODE to mode MODE.
1292 Both modes may be floating, or both integer.
1293 UNSIGNEDP is nonzero if X is an unsigned value.
1295 This can be done by referring to a part of X in place
1296 or by copying to a new temporary with conversion.
1298 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1300 This function *must not* call protect_from_queue
1301 except when putting X into an insn (in which case convert_move does it). */
1304 convert_modes (mode
, oldmode
, x
, unsignedp
)
1305 enum machine_mode mode
, oldmode
;
1311 /* If FROM is a SUBREG that indicates that we have already done at least
1312 the required extension, strip it. */
1314 if (GET_CODE (x
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (x
)
1315 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))) >= GET_MODE_SIZE (mode
)
1316 && SUBREG_PROMOTED_UNSIGNED_P (x
) == unsignedp
)
1317 x
= gen_lowpart (mode
, x
);
1319 if (GET_MODE (x
) != VOIDmode
)
1320 oldmode
= GET_MODE (x
);
1322 if (mode
== oldmode
)
1325 /* There is one case that we must handle specially: If we are converting
1326 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1327 we are to interpret the constant as unsigned, gen_lowpart will do
1328 the wrong if the constant appears negative. What we want to do is
1329 make the high-order word of the constant zero, not all ones. */
1331 if (unsignedp
&& GET_MODE_CLASS (mode
) == MODE_INT
1332 && GET_MODE_BITSIZE (mode
) == 2 * HOST_BITS_PER_WIDE_INT
1333 && GET_CODE (x
) == CONST_INT
&& INTVAL (x
) < 0)
1335 HOST_WIDE_INT val
= INTVAL (x
);
1337 if (oldmode
!= VOIDmode
1338 && HOST_BITS_PER_WIDE_INT
> GET_MODE_BITSIZE (oldmode
))
1340 int width
= GET_MODE_BITSIZE (oldmode
);
1342 /* We need to zero extend VAL. */
1343 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
1346 return immed_double_const (val
, (HOST_WIDE_INT
) 0, mode
);
1349 /* We can do this with a gen_lowpart if both desired and current modes
1350 are integer, and this is either a constant integer, a register, or a
1351 non-volatile MEM. Except for the constant case where MODE is no
1352 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1354 if ((GET_CODE (x
) == CONST_INT
1355 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
1356 || (GET_MODE_CLASS (mode
) == MODE_INT
1357 && GET_MODE_CLASS (oldmode
) == MODE_INT
1358 && (GET_CODE (x
) == CONST_DOUBLE
1359 || (GET_MODE_SIZE (mode
) <= GET_MODE_SIZE (oldmode
)
1360 && ((GET_CODE (x
) == MEM
&& ! MEM_VOLATILE_P (x
)
1361 && direct_load
[(int) mode
])
1362 || (GET_CODE (x
) == REG
1363 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode
),
1364 GET_MODE_BITSIZE (GET_MODE (x
)))))))))
1366 /* ?? If we don't know OLDMODE, we have to assume here that
1367 X does not need sign- or zero-extension. This may not be
1368 the case, but it's the best we can do. */
1369 if (GET_CODE (x
) == CONST_INT
&& oldmode
!= VOIDmode
1370 && GET_MODE_SIZE (mode
) > GET_MODE_SIZE (oldmode
))
1372 HOST_WIDE_INT val
= INTVAL (x
);
1373 int width
= GET_MODE_BITSIZE (oldmode
);
1375 /* We must sign or zero-extend in this case. Start by
1376 zero-extending, then sign extend if we need to. */
1377 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
1379 && (val
& ((HOST_WIDE_INT
) 1 << (width
- 1))))
1380 val
|= (HOST_WIDE_INT
) (-1) << width
;
1382 return GEN_INT (val
);
1385 return gen_lowpart (mode
, x
);
1388 temp
= gen_reg_rtx (mode
);
1389 convert_move (temp
, x
, unsignedp
);
1394 /* This macro is used to determine what the largest unit size that
1395 move_by_pieces can use is. */
1397 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1398 move efficiently, as opposed to MOVE_MAX which is the maximum
1399 number of bhytes we can move with a single instruction. */
1401 #ifndef MOVE_MAX_PIECES
1402 #define MOVE_MAX_PIECES MOVE_MAX
1405 /* Generate several move instructions to copy LEN bytes
1406 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1407 The caller must pass FROM and TO
1408 through protect_from_queue before calling.
1409 ALIGN (in bytes) is maximum alignment we can assume. */
1412 move_by_pieces (to
, from
, len
, align
)
1416 struct move_by_pieces data
;
1417 rtx to_addr
= XEXP (to
, 0), from_addr
= XEXP (from
, 0);
1418 int max_size
= MOVE_MAX_PIECES
+ 1;
1419 enum machine_mode mode
= VOIDmode
, tmode
;
1420 enum insn_code icode
;
1423 data
.to_addr
= to_addr
;
1424 data
.from_addr
= from_addr
;
1428 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
1429 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
1431 = (GET_CODE (from_addr
) == PRE_INC
|| GET_CODE (from_addr
) == PRE_DEC
1432 || GET_CODE (from_addr
) == POST_INC
1433 || GET_CODE (from_addr
) == POST_DEC
);
1435 data
.explicit_inc_from
= 0;
1436 data
.explicit_inc_to
= 0;
1438 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
1439 if (data
.reverse
) data
.offset
= len
;
1442 data
.to_struct
= MEM_IN_STRUCT_P (to
);
1443 data
.from_struct
= MEM_IN_STRUCT_P (from
);
1445 /* If copying requires more than two move insns,
1446 copy addresses to registers (to make displacements shorter)
1447 and use post-increment if available. */
1448 if (!(data
.autinc_from
&& data
.autinc_to
)
1449 && move_by_pieces_ninsns (len
, align
) > 2)
1451 /* Find the mode of the largest move... */
1452 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1453 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1454 if (GET_MODE_SIZE (tmode
) < max_size
)
1457 if (USE_LOAD_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_from
)
1459 data
.from_addr
= copy_addr_to_reg (plus_constant (from_addr
, len
));
1460 data
.autinc_from
= 1;
1461 data
.explicit_inc_from
= -1;
1463 if (USE_LOAD_POST_INCREMENT (mode
) && ! data
.autinc_from
)
1465 data
.from_addr
= copy_addr_to_reg (from_addr
);
1466 data
.autinc_from
= 1;
1467 data
.explicit_inc_from
= 1;
1469 if (!data
.autinc_from
&& CONSTANT_P (from_addr
))
1470 data
.from_addr
= copy_addr_to_reg (from_addr
);
1471 if (USE_STORE_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_to
)
1473 data
.to_addr
= copy_addr_to_reg (plus_constant (to_addr
, len
));
1475 data
.explicit_inc_to
= -1;
1477 if (USE_STORE_POST_INCREMENT (mode
) && ! data
.reverse
&& ! data
.autinc_to
)
1479 data
.to_addr
= copy_addr_to_reg (to_addr
);
1481 data
.explicit_inc_to
= 1;
1483 if (!data
.autinc_to
&& CONSTANT_P (to_addr
))
1484 data
.to_addr
= copy_addr_to_reg (to_addr
);
1487 if (! SLOW_UNALIGNED_ACCESS
1488 || align
> MOVE_MAX
|| align
>= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
)
1491 /* First move what we can in the largest integer mode, then go to
1492 successively smaller modes. */
1494 while (max_size
> 1)
1496 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1497 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1498 if (GET_MODE_SIZE (tmode
) < max_size
)
1501 if (mode
== VOIDmode
)
1504 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1505 if (icode
!= CODE_FOR_nothing
1506 && align
>= MIN (BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
,
1507 GET_MODE_SIZE (mode
)))
1508 move_by_pieces_1 (GEN_FCN (icode
), mode
, &data
);
1510 max_size
= GET_MODE_SIZE (mode
);
1513 /* The code above should have handled everything. */
1518 /* Return number of insns required to move L bytes by pieces.
1519 ALIGN (in bytes) is maximum alignment we can assume. */
1522 move_by_pieces_ninsns (l
, align
)
1526 register int n_insns
= 0;
1527 int max_size
= MOVE_MAX
+ 1;
1529 if (! SLOW_UNALIGNED_ACCESS
1530 || align
> MOVE_MAX
|| align
>= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
)
1533 while (max_size
> 1)
1535 enum machine_mode mode
= VOIDmode
, tmode
;
1536 enum insn_code icode
;
1538 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1539 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1540 if (GET_MODE_SIZE (tmode
) < max_size
)
1543 if (mode
== VOIDmode
)
1546 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1547 if (icode
!= CODE_FOR_nothing
1548 && align
>= MIN (BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
,
1549 GET_MODE_SIZE (mode
)))
1550 n_insns
+= l
/ GET_MODE_SIZE (mode
), l
%= GET_MODE_SIZE (mode
);
1552 max_size
= GET_MODE_SIZE (mode
);
1558 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1559 with move instructions for mode MODE. GENFUN is the gen_... function
1560 to make a move insn for that mode. DATA has all the other info. */
1563 move_by_pieces_1 (genfun
, mode
, data
)
1564 rtx (*genfun
) PROTO ((rtx
, ...));
1565 enum machine_mode mode
;
1566 struct move_by_pieces
*data
;
1568 register int size
= GET_MODE_SIZE (mode
);
1569 register rtx to1
, from1
;
1571 while (data
->len
>= size
)
1573 if (data
->reverse
) data
->offset
-= size
;
1575 to1
= (data
->autinc_to
1576 ? gen_rtx_MEM (mode
, data
->to_addr
)
1577 : copy_rtx (change_address (data
->to
, mode
,
1578 plus_constant (data
->to_addr
,
1580 MEM_IN_STRUCT_P (to1
) = data
->to_struct
;
1583 = (data
->autinc_from
1584 ? gen_rtx_MEM (mode
, data
->from_addr
)
1585 : copy_rtx (change_address (data
->from
, mode
,
1586 plus_constant (data
->from_addr
,
1588 MEM_IN_STRUCT_P (from1
) = data
->from_struct
;
1590 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
1591 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (-size
)));
1592 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_from
< 0)
1593 emit_insn (gen_add2_insn (data
->from_addr
, GEN_INT (-size
)));
1595 emit_insn ((*genfun
) (to1
, from1
));
1596 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
1597 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
1598 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_from
> 0)
1599 emit_insn (gen_add2_insn (data
->from_addr
, GEN_INT (size
)));
1601 if (! data
->reverse
) data
->offset
+= size
;
1607 /* Emit code to move a block Y to a block X.
1608 This may be done with string-move instructions,
1609 with multiple scalar move instructions, or with a library call.
1611 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1613 SIZE is an rtx that says how long they are.
1614 ALIGN is the maximum alignment we can assume they have,
1617 Return the address of the new block, if memcpy is called and returns it,
1621 emit_block_move (x
, y
, size
, align
)
1627 #ifdef TARGET_MEM_FUNCTIONS
1629 tree call_expr
, arg_list
;
1632 if (GET_MODE (x
) != BLKmode
)
1635 if (GET_MODE (y
) != BLKmode
)
1638 x
= protect_from_queue (x
, 1);
1639 y
= protect_from_queue (y
, 0);
1640 size
= protect_from_queue (size
, 0);
1642 if (GET_CODE (x
) != MEM
)
1644 if (GET_CODE (y
) != MEM
)
1649 if (GET_CODE (size
) == CONST_INT
&& MOVE_BY_PIECES_P (INTVAL (size
), align
))
1650 move_by_pieces (x
, y
, INTVAL (size
), align
);
1653 /* Try the most limited insn first, because there's no point
1654 including more than one in the machine description unless
1655 the more limited one has some advantage. */
1657 rtx opalign
= GEN_INT (align
);
1658 enum machine_mode mode
;
1660 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
1661 mode
= GET_MODE_WIDER_MODE (mode
))
1663 enum insn_code code
= movstr_optab
[(int) mode
];
1665 if (code
!= CODE_FOR_nothing
1666 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1667 here because if SIZE is less than the mode mask, as it is
1668 returned by the macro, it will definitely be less than the
1669 actual mode mask. */
1670 && ((GET_CODE (size
) == CONST_INT
1671 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
1672 <= (GET_MODE_MASK (mode
) >> 1)))
1673 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
1674 && (insn_operand_predicate
[(int) code
][0] == 0
1675 || (*insn_operand_predicate
[(int) code
][0]) (x
, BLKmode
))
1676 && (insn_operand_predicate
[(int) code
][1] == 0
1677 || (*insn_operand_predicate
[(int) code
][1]) (y
, BLKmode
))
1678 && (insn_operand_predicate
[(int) code
][3] == 0
1679 || (*insn_operand_predicate
[(int) code
][3]) (opalign
,
1683 rtx last
= get_last_insn ();
1686 op2
= convert_to_mode (mode
, size
, 1);
1687 if (insn_operand_predicate
[(int) code
][2] != 0
1688 && ! (*insn_operand_predicate
[(int) code
][2]) (op2
, mode
))
1689 op2
= copy_to_mode_reg (mode
, op2
);
1691 pat
= GEN_FCN ((int) code
) (x
, y
, op2
, opalign
);
1698 delete_insns_since (last
);
1702 #ifdef TARGET_MEM_FUNCTIONS
1703 /* It is incorrect to use the libcall calling conventions to call
1704 memcpy in this context.
1706 This could be a user call to memcpy and the user may wish to
1707 examine the return value from memcpy.
1709 For targets where libcalls and normal calls have different conventions
1710 for returning pointers, we could end up generating incorrect code.
1712 So instead of using a libcall sequence we build up a suitable
1713 CALL_EXPR and expand the call in the normal fashion. */
1714 if (fn
== NULL_TREE
)
1718 /* This was copied from except.c, I don't know if all this is
1719 necessary in this context or not. */
1720 fn
= get_identifier ("memcpy");
1721 push_obstacks_nochange ();
1722 end_temporary_allocation ();
1723 fntype
= build_pointer_type (void_type_node
);
1724 fntype
= build_function_type (fntype
, NULL_TREE
);
1725 fn
= build_decl (FUNCTION_DECL
, fn
, fntype
);
1726 DECL_EXTERNAL (fn
) = 1;
1727 TREE_PUBLIC (fn
) = 1;
1728 DECL_ARTIFICIAL (fn
) = 1;
1729 make_decl_rtl (fn
, NULL_PTR
, 1);
1730 assemble_external (fn
);
1734 /* We need to make an argument list for the function call.
1736 memcpy has three arguments, the first two are void * addresses and
1737 the last is a size_t byte count for the copy. */
1739 = build_tree_list (NULL_TREE
,
1740 make_tree (build_pointer_type (void_type_node
),
1742 TREE_CHAIN (arg_list
)
1743 = build_tree_list (NULL_TREE
,
1744 make_tree (build_pointer_type (void_type_node
),
1746 TREE_CHAIN (TREE_CHAIN (arg_list
))
1747 = build_tree_list (NULL_TREE
, make_tree (sizetype
, size
));
1748 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list
))) = NULL_TREE
;
1750 /* Now we have to build up the CALL_EXPR itself. */
1751 call_expr
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
1752 call_expr
= build (CALL_EXPR
, TREE_TYPE (TREE_TYPE (fn
)),
1753 call_expr
, arg_list
, NULL_TREE
);
1754 TREE_SIDE_EFFECTS (call_expr
) = 1;
1756 retval
= expand_expr (call_expr
, NULL_RTX
, VOIDmode
, 0);
1758 emit_library_call (bcopy_libfunc
, 0,
1759 VOIDmode
, 3, XEXP (y
, 0), Pmode
,
1761 convert_to_mode (TYPE_MODE (integer_type_node
), size
,
1762 TREE_UNSIGNED (integer_type_node
)),
1763 TYPE_MODE (integer_type_node
));
1770 /* Copy all or part of a value X into registers starting at REGNO.
1771 The number of registers to be filled is NREGS. */
1774 move_block_to_reg (regno
, x
, nregs
, mode
)
1778 enum machine_mode mode
;
1781 #ifdef HAVE_load_multiple
1789 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
1790 x
= validize_mem (force_const_mem (mode
, x
));
1792 /* See if the machine can do this with a load multiple insn. */
1793 #ifdef HAVE_load_multiple
1794 if (HAVE_load_multiple
)
1796 last
= get_last_insn ();
1797 pat
= gen_load_multiple (gen_rtx_REG (word_mode
, regno
), x
,
1805 delete_insns_since (last
);
1809 for (i
= 0; i
< nregs
; i
++)
1810 emit_move_insn (gen_rtx_REG (word_mode
, regno
+ i
),
1811 operand_subword_force (x
, i
, mode
));
1814 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1815 The number of registers to be filled is NREGS. SIZE indicates the number
1816 of bytes in the object X. */
1820 move_block_from_reg (regno
, x
, nregs
, size
)
1827 #ifdef HAVE_store_multiple
1831 enum machine_mode mode
;
1833 /* If SIZE is that of a mode no bigger than a word, just use that
1834 mode's store operation. */
1835 if (size
<= UNITS_PER_WORD
1836 && (mode
= mode_for_size (size
* BITS_PER_UNIT
, MODE_INT
, 0)) != BLKmode
)
1838 emit_move_insn (change_address (x
, mode
, NULL
),
1839 gen_rtx_REG (mode
, regno
));
1843 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1844 to the left before storing to memory. Note that the previous test
1845 doesn't handle all cases (e.g. SIZE == 3). */
1846 if (size
< UNITS_PER_WORD
&& BYTES_BIG_ENDIAN
)
1848 rtx tem
= operand_subword (x
, 0, 1, BLKmode
);
1854 shift
= expand_shift (LSHIFT_EXPR
, word_mode
,
1855 gen_rtx_REG (word_mode
, regno
),
1856 build_int_2 ((UNITS_PER_WORD
- size
)
1857 * BITS_PER_UNIT
, 0), NULL_RTX
, 0);
1858 emit_move_insn (tem
, shift
);
1862 /* See if the machine can do this with a store multiple insn. */
1863 #ifdef HAVE_store_multiple
1864 if (HAVE_store_multiple
)
1866 last
= get_last_insn ();
1867 pat
= gen_store_multiple (x
, gen_rtx_REG (word_mode
, regno
),
1875 delete_insns_since (last
);
1879 for (i
= 0; i
< nregs
; i
++)
1881 rtx tem
= operand_subword (x
, i
, 1, BLKmode
);
1886 emit_move_insn (tem
, gen_rtx_REG (word_mode
, regno
+ i
));
1890 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1891 registers represented by a PARALLEL. SSIZE represents the total size of
1892 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1894 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1895 the balance will be in what would be the low-order memory addresses, i.e.
1896 left justified for big endian, right justified for little endian. This
1897 happens to be true for the targets currently using this support. If this
1898 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1902 emit_group_load (dst
, orig_src
, ssize
, align
)
1909 if (GET_CODE (dst
) != PARALLEL
)
1912 /* Check for a NULL entry, used to indicate that the parameter goes
1913 both on the stack and in registers. */
1914 if (XEXP (XVECEXP (dst
, 0, 0), 0))
1919 tmps
= (rtx
*) alloca (sizeof(rtx
) * XVECLEN (dst
, 0));
1921 /* If we won't be loading directly from memory, protect the real source
1922 from strange tricks we might play. */
1924 if (GET_CODE (src
) != MEM
)
1926 src
= gen_reg_rtx (GET_MODE (orig_src
));
1927 emit_move_insn (src
, orig_src
);
1930 /* Process the pieces. */
1931 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
1933 enum machine_mode mode
= GET_MODE (XEXP (XVECEXP (dst
, 0, i
), 0));
1934 int bytepos
= INTVAL (XEXP (XVECEXP (dst
, 0, i
), 1));
1935 int bytelen
= GET_MODE_SIZE (mode
);
1938 /* Handle trailing fragments that run over the size of the struct. */
1939 if (ssize
>= 0 && bytepos
+ bytelen
> ssize
)
1941 shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
1942 bytelen
= ssize
- bytepos
;
1947 /* Optimize the access just a bit. */
1948 if (GET_CODE (src
) == MEM
1949 && align
*BITS_PER_UNIT
>= GET_MODE_ALIGNMENT (mode
)
1950 && bytepos
*BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
1951 && bytelen
== GET_MODE_SIZE (mode
))
1953 tmps
[i
] = gen_reg_rtx (mode
);
1954 emit_move_insn (tmps
[i
],
1955 change_address (src
, mode
,
1956 plus_constant (XEXP (src
, 0),
1961 tmps
[i
] = extract_bit_field (src
, bytelen
*BITS_PER_UNIT
,
1962 bytepos
*BITS_PER_UNIT
, 1, NULL_RTX
,
1963 mode
, mode
, align
, ssize
);
1966 if (BYTES_BIG_ENDIAN
&& shift
)
1968 expand_binop (mode
, ashl_optab
, tmps
[i
], GEN_INT (shift
),
1969 tmps
[i
], 0, OPTAB_WIDEN
);
1974 /* Copy the extracted pieces into the proper (probable) hard regs. */
1975 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
1976 emit_move_insn (XEXP (XVECEXP (dst
, 0, i
), 0), tmps
[i
]);
1979 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
1980 registers represented by a PARALLEL. SSIZE represents the total size of
1981 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
1984 emit_group_store (orig_dst
, src
, ssize
, align
)
1991 if (GET_CODE (src
) != PARALLEL
)
1994 /* Check for a NULL entry, used to indicate that the parameter goes
1995 both on the stack and in registers. */
1996 if (XEXP (XVECEXP (src
, 0, 0), 0))
2001 tmps
= (rtx
*) alloca (sizeof(rtx
) * XVECLEN (src
, 0));
2003 /* Copy the (probable) hard regs into pseudos. */
2004 for (i
= start
; i
< XVECLEN (src
, 0); i
++)
2006 rtx reg
= XEXP (XVECEXP (src
, 0, i
), 0);
2007 tmps
[i
] = gen_reg_rtx (GET_MODE (reg
));
2008 emit_move_insn (tmps
[i
], reg
);
2012 /* If we won't be storing directly into memory, protect the real destination
2013 from strange tricks we might play. */
2015 if (GET_CODE (dst
) == PARALLEL
)
2019 /* We can get a PARALLEL dst if there is a conditional expression in
2020 a return statement. In that case, the dst and src are the same,
2021 so no action is necessary. */
2022 if (rtx_equal_p (dst
, src
))
2025 /* It is unclear if we can ever reach here, but we may as well handle
2026 it. Allocate a temporary, and split this into a store/load to/from
2029 temp
= assign_stack_temp (GET_MODE (dst
), ssize
, 0);
2030 emit_group_store (temp
, src
, ssize
, align
);
2031 emit_group_load (dst
, temp
, ssize
, align
);
2034 else if (GET_CODE (dst
) != MEM
)
2036 dst
= gen_reg_rtx (GET_MODE (orig_dst
));
2037 /* Make life a bit easier for combine. */
2038 emit_move_insn (dst
, const0_rtx
);
2040 else if (! MEM_IN_STRUCT_P (dst
))
2042 /* store_bit_field requires that memory operations have
2043 mem_in_struct_p set; we might not. */
2045 dst
= copy_rtx (orig_dst
);
2046 MEM_SET_IN_STRUCT_P (dst
, 1);
2049 /* Process the pieces. */
2050 for (i
= start
; i
< XVECLEN (src
, 0); i
++)
2052 int bytepos
= INTVAL (XEXP (XVECEXP (src
, 0, i
), 1));
2053 enum machine_mode mode
= GET_MODE (tmps
[i
]);
2054 int bytelen
= GET_MODE_SIZE (mode
);
2056 /* Handle trailing fragments that run over the size of the struct. */
2057 if (ssize
>= 0 && bytepos
+ bytelen
> ssize
)
2059 if (BYTES_BIG_ENDIAN
)
2061 int shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
2062 expand_binop (mode
, ashr_optab
, tmps
[i
], GEN_INT (shift
),
2063 tmps
[i
], 0, OPTAB_WIDEN
);
2065 bytelen
= ssize
- bytepos
;
2068 /* Optimize the access just a bit. */
2069 if (GET_CODE (dst
) == MEM
2070 && align
*BITS_PER_UNIT
>= GET_MODE_ALIGNMENT (mode
)
2071 && bytepos
*BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
2072 && bytelen
== GET_MODE_SIZE (mode
))
2074 emit_move_insn (change_address (dst
, mode
,
2075 plus_constant (XEXP (dst
, 0),
2081 store_bit_field (dst
, bytelen
*BITS_PER_UNIT
, bytepos
*BITS_PER_UNIT
,
2082 mode
, tmps
[i
], align
, ssize
);
2087 /* Copy from the pseudo into the (probable) hard reg. */
2088 if (GET_CODE (dst
) == REG
)
2089 emit_move_insn (orig_dst
, dst
);
2092 /* Generate code to copy a BLKmode object of TYPE out of a
2093 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2094 is null, a stack temporary is created. TGTBLK is returned.
2096 The primary purpose of this routine is to handle functions
2097 that return BLKmode structures in registers. Some machines
2098 (the PA for example) want to return all small structures
2099 in registers regardless of the structure's alignment.
2103 copy_blkmode_from_reg(tgtblk
,srcreg
,type
)
2108 int bytes
= int_size_in_bytes (type
);
2109 rtx src
= NULL
, dst
= NULL
;
2110 int bitsize
= MIN (TYPE_ALIGN (type
), (unsigned int) BITS_PER_WORD
);
2111 int bitpos
, xbitpos
, big_endian_correction
= 0;
2115 tgtblk
= assign_stack_temp (BLKmode
, bytes
, 0);
2116 MEM_SET_IN_STRUCT_P (tgtblk
, AGGREGATE_TYPE_P (type
));
2117 preserve_temp_slots (tgtblk
);
2120 /* This code assumes srcreg is at least a full word. If it isn't,
2121 copy it into a new pseudo which is a full word. */
2122 if (GET_MODE (srcreg
) != BLKmode
2123 && GET_MODE_SIZE (GET_MODE (srcreg
)) < UNITS_PER_WORD
)
2124 srcreg
= convert_to_mode (word_mode
, srcreg
,
2125 TREE_UNSIGNED (type
));
2127 /* Structures whose size is not a multiple of a word are aligned
2128 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2129 machine, this means we must skip the empty high order bytes when
2130 calculating the bit offset. */
2131 if (BYTES_BIG_ENDIAN
&& bytes
% UNITS_PER_WORD
)
2132 big_endian_correction
= (BITS_PER_WORD
- ((bytes
% UNITS_PER_WORD
)
2135 /* Copy the structure BITSIZE bites at a time.
2137 We could probably emit more efficient code for machines
2138 which do not use strict alignment, but it doesn't seem
2139 worth the effort at the current time. */
2140 for (bitpos
= 0, xbitpos
= big_endian_correction
;
2141 bitpos
< bytes
* BITS_PER_UNIT
;
2142 bitpos
+= bitsize
, xbitpos
+= bitsize
)
2145 /* We need a new source operand each time xbitpos is on a
2146 word boundary and when xbitpos == big_endian_correction
2147 (the first time through). */
2148 if (xbitpos
% BITS_PER_WORD
== 0
2149 || xbitpos
== big_endian_correction
)
2150 src
= operand_subword_force (srcreg
,
2151 xbitpos
/ BITS_PER_WORD
,
2154 /* We need a new destination operand each time bitpos is on
2156 if (bitpos
% BITS_PER_WORD
== 0)
2157 dst
= operand_subword (tgtblk
, bitpos
/ BITS_PER_WORD
, 1, BLKmode
);
2159 /* Use xbitpos for the source extraction (right justified) and
2160 xbitpos for the destination store (left justified). */
2161 store_bit_field (dst
, bitsize
, bitpos
% BITS_PER_WORD
, word_mode
,
2162 extract_bit_field (src
, bitsize
,
2163 xbitpos
% BITS_PER_WORD
, 1,
2164 NULL_RTX
, word_mode
,
2166 bitsize
/ BITS_PER_UNIT
,
2168 bitsize
/ BITS_PER_UNIT
, BITS_PER_WORD
);
2174 /* Add a USE expression for REG to the (possibly empty) list pointed
2175 to by CALL_FUSAGE. REG must denote a hard register. */
2178 use_reg (call_fusage
, reg
)
2179 rtx
*call_fusage
, reg
;
2181 if (GET_CODE (reg
) != REG
2182 || REGNO (reg
) >= FIRST_PSEUDO_REGISTER
)
2186 = gen_rtx_EXPR_LIST (VOIDmode
,
2187 gen_rtx_USE (VOIDmode
, reg
), *call_fusage
);
2190 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2191 starting at REGNO. All of these registers must be hard registers. */
2194 use_regs (call_fusage
, regno
, nregs
)
2201 if (regno
+ nregs
> FIRST_PSEUDO_REGISTER
)
2204 for (i
= 0; i
< nregs
; i
++)
2205 use_reg (call_fusage
, gen_rtx_REG (reg_raw_mode
[regno
+ i
], regno
+ i
));
2208 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2209 PARALLEL REGS. This is for calls that pass values in multiple
2210 non-contiguous locations. The Irix 6 ABI has examples of this. */
2213 use_group_regs (call_fusage
, regs
)
2219 for (i
= 0; i
< XVECLEN (regs
, 0); i
++)
2221 rtx reg
= XEXP (XVECEXP (regs
, 0, i
), 0);
2223 /* A NULL entry means the parameter goes both on the stack and in
2224 registers. This can also be a MEM for targets that pass values
2225 partially on the stack and partially in registers. */
2226 if (reg
!= 0 && GET_CODE (reg
) == REG
)
2227 use_reg (call_fusage
, reg
);
2231 /* Generate several move instructions to clear LEN bytes of block TO.
2232 (A MEM rtx with BLKmode). The caller must pass TO through
2233 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
2237 clear_by_pieces (to
, len
, align
)
2241 struct clear_by_pieces data
;
2242 rtx to_addr
= XEXP (to
, 0);
2243 int max_size
= MOVE_MAX_PIECES
+ 1;
2244 enum machine_mode mode
= VOIDmode
, tmode
;
2245 enum insn_code icode
;
2248 data
.to_addr
= to_addr
;
2251 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
2252 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
2254 data
.explicit_inc_to
= 0;
2256 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
2257 if (data
.reverse
) data
.offset
= len
;
2260 data
.to_struct
= MEM_IN_STRUCT_P (to
);
2262 /* If copying requires more than two move insns,
2263 copy addresses to registers (to make displacements shorter)
2264 and use post-increment if available. */
2266 && move_by_pieces_ninsns (len
, align
) > 2)
2268 /* Determine the main mode we'll be using */
2269 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2270 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2271 if (GET_MODE_SIZE (tmode
) < max_size
)
2274 if (USE_STORE_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_to
)
2276 data
.to_addr
= copy_addr_to_reg (plus_constant (to_addr
, len
));
2278 data
.explicit_inc_to
= -1;
2280 if (USE_STORE_POST_INCREMENT (mode
) && ! data
.reverse
&& ! data
.autinc_to
)
2282 data
.to_addr
= copy_addr_to_reg (to_addr
);
2284 data
.explicit_inc_to
= 1;
2286 if (!data
.autinc_to
&& CONSTANT_P (to_addr
))
2287 data
.to_addr
= copy_addr_to_reg (to_addr
);
2290 if (! SLOW_UNALIGNED_ACCESS
2291 || align
> MOVE_MAX
|| align
>= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
)
2294 /* First move what we can in the largest integer mode, then go to
2295 successively smaller modes. */
2297 while (max_size
> 1)
2299 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2300 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
2301 if (GET_MODE_SIZE (tmode
) < max_size
)
2304 if (mode
== VOIDmode
)
2307 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
2308 if (icode
!= CODE_FOR_nothing
2309 && align
>= MIN (BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
,
2310 GET_MODE_SIZE (mode
)))
2311 clear_by_pieces_1 (GEN_FCN (icode
), mode
, &data
);
2313 max_size
= GET_MODE_SIZE (mode
);
2316 /* The code above should have handled everything. */
2321 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2322 with move instructions for mode MODE. GENFUN is the gen_... function
2323 to make a move insn for that mode. DATA has all the other info. */
2326 clear_by_pieces_1 (genfun
, mode
, data
)
2327 rtx (*genfun
) PROTO ((rtx
, ...));
2328 enum machine_mode mode
;
2329 struct clear_by_pieces
*data
;
2331 register int size
= GET_MODE_SIZE (mode
);
2334 while (data
->len
>= size
)
2336 if (data
->reverse
) data
->offset
-= size
;
2338 to1
= (data
->autinc_to
2339 ? gen_rtx_MEM (mode
, data
->to_addr
)
2340 : copy_rtx (change_address (data
->to
, mode
,
2341 plus_constant (data
->to_addr
,
2343 MEM_IN_STRUCT_P (to1
) = data
->to_struct
;
2345 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
2346 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (-size
)));
2348 emit_insn ((*genfun
) (to1
, const0_rtx
));
2349 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
2350 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
2352 if (! data
->reverse
) data
->offset
+= size
;
2358 /* Write zeros through the storage of OBJECT.
2359 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
2360 the maximum alignment we can is has, measured in bytes.
2362 If we call a function that returns the length of the block, return it. */
2365 clear_storage (object
, size
, align
)
2370 #ifdef TARGET_MEM_FUNCTIONS
2372 tree call_expr
, arg_list
;
2376 if (GET_MODE (object
) == BLKmode
)
2378 object
= protect_from_queue (object
, 1);
2379 size
= protect_from_queue (size
, 0);
2381 if (GET_CODE (size
) == CONST_INT
2382 && MOVE_BY_PIECES_P (INTVAL (size
), align
))
2383 clear_by_pieces (object
, INTVAL (size
), align
);
2387 /* Try the most limited insn first, because there's no point
2388 including more than one in the machine description unless
2389 the more limited one has some advantage. */
2391 rtx opalign
= GEN_INT (align
);
2392 enum machine_mode mode
;
2394 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
2395 mode
= GET_MODE_WIDER_MODE (mode
))
2397 enum insn_code code
= clrstr_optab
[(int) mode
];
2399 if (code
!= CODE_FOR_nothing
2400 /* We don't need MODE to be narrower than
2401 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2402 the mode mask, as it is returned by the macro, it will
2403 definitely be less than the actual mode mask. */
2404 && ((GET_CODE (size
) == CONST_INT
2405 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
2406 <= (GET_MODE_MASK (mode
) >> 1)))
2407 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
2408 && (insn_operand_predicate
[(int) code
][0] == 0
2409 || (*insn_operand_predicate
[(int) code
][0]) (object
,
2411 && (insn_operand_predicate
[(int) code
][2] == 0
2412 || (*insn_operand_predicate
[(int) code
][2]) (opalign
,
2416 rtx last
= get_last_insn ();
2419 op1
= convert_to_mode (mode
, size
, 1);
2420 if (insn_operand_predicate
[(int) code
][1] != 0
2421 && ! (*insn_operand_predicate
[(int) code
][1]) (op1
,
2423 op1
= copy_to_mode_reg (mode
, op1
);
2425 pat
= GEN_FCN ((int) code
) (object
, op1
, opalign
);
2432 delete_insns_since (last
);
2437 #ifdef TARGET_MEM_FUNCTIONS
2438 /* It is incorrect to use the libcall calling conventions to call
2439 memset in this context.
2441 This could be a user call to memset and the user may wish to
2442 examine the return value from memset.
2444 For targets where libcalls and normal calls have different conventions
2445 for returning pointers, we could end up generating incorrect code.
2447 So instead of using a libcall sequence we build up a suitable
2448 CALL_EXPR and expand the call in the normal fashion. */
2449 if (fn
== NULL_TREE
)
2453 /* This was copied from except.c, I don't know if all this is
2454 necessary in this context or not. */
2455 fn
= get_identifier ("memset");
2456 push_obstacks_nochange ();
2457 end_temporary_allocation ();
2458 fntype
= build_pointer_type (void_type_node
);
2459 fntype
= build_function_type (fntype
, NULL_TREE
);
2460 fn
= build_decl (FUNCTION_DECL
, fn
, fntype
);
2461 DECL_EXTERNAL (fn
) = 1;
2462 TREE_PUBLIC (fn
) = 1;
2463 DECL_ARTIFICIAL (fn
) = 1;
2464 make_decl_rtl (fn
, NULL_PTR
, 1);
2465 assemble_external (fn
);
2469 /* We need to make an argument list for the function call.
2471 memset has three arguments, the first is a void * addresses, the
2472 second a integer with the initialization value, the last is a size_t
2473 byte count for the copy. */
2475 = build_tree_list (NULL_TREE
,
2476 make_tree (build_pointer_type (void_type_node
),
2478 TREE_CHAIN (arg_list
)
2479 = build_tree_list (NULL_TREE
,
2480 make_tree (integer_type_node
, const0_rtx
));
2481 TREE_CHAIN (TREE_CHAIN (arg_list
))
2482 = build_tree_list (NULL_TREE
, make_tree (sizetype
, size
));
2483 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list
))) = NULL_TREE
;
2485 /* Now we have to build up the CALL_EXPR itself. */
2486 call_expr
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2487 call_expr
= build (CALL_EXPR
, TREE_TYPE (TREE_TYPE (fn
)),
2488 call_expr
, arg_list
, NULL_TREE
);
2489 TREE_SIDE_EFFECTS (call_expr
) = 1;
2491 retval
= expand_expr (call_expr
, NULL_RTX
, VOIDmode
, 0);
2493 emit_library_call (bzero_libfunc
, 0,
2495 XEXP (object
, 0), Pmode
,
2497 (TYPE_MODE (integer_type_node
), size
,
2498 TREE_UNSIGNED (integer_type_node
)),
2499 TYPE_MODE (integer_type_node
));
2504 emit_move_insn (object
, CONST0_RTX (GET_MODE (object
)));
2509 /* Generate code to copy Y into X.
2510 Both Y and X must have the same mode, except that
2511 Y can be a constant with VOIDmode.
2512 This mode cannot be BLKmode; use emit_block_move for that.
2514 Return the last instruction emitted. */
2517 emit_move_insn (x
, y
)
2520 enum machine_mode mode
= GET_MODE (x
);
2522 x
= protect_from_queue (x
, 1);
2523 y
= protect_from_queue (y
, 0);
2525 if (mode
== BLKmode
|| (GET_MODE (y
) != mode
&& GET_MODE (y
) != VOIDmode
))
2528 /* Never force constant_p_rtx to memory. */
2529 if (GET_CODE (y
) == CONSTANT_P_RTX
)
2531 else if (CONSTANT_P (y
) && ! LEGITIMATE_CONSTANT_P (y
))
2532 y
= force_const_mem (mode
, y
);
2534 /* If X or Y are memory references, verify that their addresses are valid
2536 if (GET_CODE (x
) == MEM
2537 && ((! memory_address_p (GET_MODE (x
), XEXP (x
, 0))
2538 && ! push_operand (x
, GET_MODE (x
)))
2540 && CONSTANT_ADDRESS_P (XEXP (x
, 0)))))
2541 x
= change_address (x
, VOIDmode
, XEXP (x
, 0));
2543 if (GET_CODE (y
) == MEM
2544 && (! memory_address_p (GET_MODE (y
), XEXP (y
, 0))
2546 && CONSTANT_ADDRESS_P (XEXP (y
, 0)))))
2547 y
= change_address (y
, VOIDmode
, XEXP (y
, 0));
2549 if (mode
== BLKmode
)
2552 return emit_move_insn_1 (x
, y
);
2555 /* Low level part of emit_move_insn.
2556 Called just like emit_move_insn, but assumes X and Y
2557 are basically valid. */
2560 emit_move_insn_1 (x
, y
)
2563 enum machine_mode mode
= GET_MODE (x
);
2564 enum machine_mode submode
;
2565 enum mode_class
class = GET_MODE_CLASS (mode
);
2568 if (mov_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
2570 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) mode
].insn_code
) (x
, y
));
2572 /* Expand complex moves by moving real part and imag part, if possible. */
2573 else if ((class == MODE_COMPLEX_FLOAT
|| class == MODE_COMPLEX_INT
)
2574 && BLKmode
!= (submode
= mode_for_size ((GET_MODE_UNIT_SIZE (mode
)
2576 (class == MODE_COMPLEX_INT
2577 ? MODE_INT
: MODE_FLOAT
),
2579 && (mov_optab
->handlers
[(int) submode
].insn_code
2580 != CODE_FOR_nothing
))
2582 /* Don't split destination if it is a stack push. */
2583 int stack
= push_operand (x
, GET_MODE (x
));
2585 /* If this is a stack, push the highpart first, so it
2586 will be in the argument order.
2588 In that case, change_address is used only to convert
2589 the mode, not to change the address. */
2592 /* Note that the real part always precedes the imag part in memory
2593 regardless of machine's endianness. */
2594 #ifdef STACK_GROWS_DOWNWARD
2595 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2596 (gen_rtx_MEM (submode
, (XEXP (x
, 0))),
2597 gen_imagpart (submode
, y
)));
2598 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2599 (gen_rtx_MEM (submode
, (XEXP (x
, 0))),
2600 gen_realpart (submode
, y
)));
2602 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2603 (gen_rtx_MEM (submode
, (XEXP (x
, 0))),
2604 gen_realpart (submode
, y
)));
2605 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2606 (gen_rtx_MEM (submode
, (XEXP (x
, 0))),
2607 gen_imagpart (submode
, y
)));
2612 /* Show the output dies here. This is necessary for pseudos;
2613 hard regs shouldn't appear here except as return values.
2614 We never want to emit such a clobber after reload. */
2616 && ! (reload_in_progress
|| reload_completed
))
2618 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
2621 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2622 (gen_realpart (submode
, x
), gen_realpart (submode
, y
)));
2623 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2624 (gen_imagpart (submode
, x
), gen_imagpart (submode
, y
)));
2627 return get_last_insn ();
2630 /* This will handle any multi-word mode that lacks a move_insn pattern.
2631 However, you will get better code if you define such patterns,
2632 even if they must turn into multiple assembler instructions. */
2633 else if (GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
2637 #ifdef PUSH_ROUNDING
2639 /* If X is a push on the stack, do the push now and replace
2640 X with a reference to the stack pointer. */
2641 if (push_operand (x
, GET_MODE (x
)))
2643 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x
))));
2644 x
= change_address (x
, VOIDmode
, stack_pointer_rtx
);
2648 /* Show the output dies here. This is necessary for pseudos;
2649 hard regs shouldn't appear here except as return values.
2650 We never want to emit such a clobber after reload. */
2652 && ! (reload_in_progress
|| reload_completed
))
2654 emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
2658 i
< (GET_MODE_SIZE (mode
) + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
;
2661 rtx xpart
= operand_subword (x
, i
, 1, mode
);
2662 rtx ypart
= operand_subword (y
, i
, 1, mode
);
2664 /* If we can't get a part of Y, put Y into memory if it is a
2665 constant. Otherwise, force it into a register. If we still
2666 can't get a part of Y, abort. */
2667 if (ypart
== 0 && CONSTANT_P (y
))
2669 y
= force_const_mem (mode
, y
);
2670 ypart
= operand_subword (y
, i
, 1, mode
);
2672 else if (ypart
== 0)
2673 ypart
= operand_subword_force (y
, i
, mode
);
2675 if (xpart
== 0 || ypart
== 0)
2678 last_insn
= emit_move_insn (xpart
, ypart
);
2687 /* Pushing data onto the stack. */
2689 /* Push a block of length SIZE (perhaps variable)
2690 and return an rtx to address the beginning of the block.
2691 Note that it is not possible for the value returned to be a QUEUED.
2692 The value may be virtual_outgoing_args_rtx.
2694 EXTRA is the number of bytes of padding to push in addition to SIZE.
2695 BELOW nonzero means this padding comes at low addresses;
2696 otherwise, the padding comes at high addresses. */
2699 push_block (size
, extra
, below
)
2705 size
= convert_modes (Pmode
, ptr_mode
, size
, 1);
2706 if (CONSTANT_P (size
))
2707 anti_adjust_stack (plus_constant (size
, extra
));
2708 else if (GET_CODE (size
) == REG
&& extra
== 0)
2709 anti_adjust_stack (size
);
2712 rtx temp
= copy_to_mode_reg (Pmode
, size
);
2714 temp
= expand_binop (Pmode
, add_optab
, temp
, GEN_INT (extra
),
2715 temp
, 0, OPTAB_LIB_WIDEN
);
2716 anti_adjust_stack (temp
);
2719 #if defined (STACK_GROWS_DOWNWARD) \
2720 || (defined (ARGS_GROW_DOWNWARD) \
2721 && !defined (ACCUMULATE_OUTGOING_ARGS))
2723 /* Return the lowest stack address when STACK or ARGS grow downward and
2724 we are not aaccumulating outgoing arguments (the c4x port uses such
2726 temp
= virtual_outgoing_args_rtx
;
2727 if (extra
!= 0 && below
)
2728 temp
= plus_constant (temp
, extra
);
2730 if (GET_CODE (size
) == CONST_INT
)
2731 temp
= plus_constant (virtual_outgoing_args_rtx
,
2732 - INTVAL (size
) - (below
? 0 : extra
));
2733 else if (extra
!= 0 && !below
)
2734 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
2735 negate_rtx (Pmode
, plus_constant (size
, extra
)));
2737 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
2738 negate_rtx (Pmode
, size
));
2741 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT
), temp
);
2747 return gen_rtx_fmt_e (STACK_PUSH_CODE
, Pmode
, stack_pointer_rtx
);
2750 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2751 block of SIZE bytes. */
2754 get_push_address (size
)
2759 if (STACK_PUSH_CODE
== POST_DEC
)
2760 temp
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
, GEN_INT (size
));
2761 else if (STACK_PUSH_CODE
== POST_INC
)
2762 temp
= gen_rtx_MINUS (Pmode
, stack_pointer_rtx
, GEN_INT (size
));
2764 temp
= stack_pointer_rtx
;
2766 return copy_to_reg (temp
);
2769 /* Generate code to push X onto the stack, assuming it has mode MODE and
2771 MODE is redundant except when X is a CONST_INT (since they don't
2773 SIZE is an rtx for the size of data to be copied (in bytes),
2774 needed only if X is BLKmode.
2776 ALIGN (in bytes) is maximum alignment we can assume.
2778 If PARTIAL and REG are both nonzero, then copy that many of the first
2779 words of X into registers starting with REG, and push the rest of X.
2780 The amount of space pushed is decreased by PARTIAL words,
2781 rounded *down* to a multiple of PARM_BOUNDARY.
2782 REG must be a hard register in this case.
2783 If REG is zero but PARTIAL is not, take any all others actions for an
2784 argument partially in registers, but do not actually load any
2787 EXTRA is the amount in bytes of extra space to leave next to this arg.
2788 This is ignored if an argument block has already been allocated.
2790 On a machine that lacks real push insns, ARGS_ADDR is the address of
2791 the bottom of the argument block for this call. We use indexing off there
2792 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2793 argument block has not been preallocated.
2795 ARGS_SO_FAR is the size of args previously pushed for this call.
2797 REG_PARM_STACK_SPACE is nonzero if functions require stack space
2798 for arguments passed in registers. If nonzero, it will be the number
2799 of bytes required. */
2802 emit_push_insn (x
, mode
, type
, size
, align
, partial
, reg
, extra
,
2803 args_addr
, args_so_far
, reg_parm_stack_space
)
2805 enum machine_mode mode
;
2814 int reg_parm_stack_space
;
2817 enum direction stack_direction
2818 #ifdef STACK_GROWS_DOWNWARD
2824 /* Decide where to pad the argument: `downward' for below,
2825 `upward' for above, or `none' for don't pad it.
2826 Default is below for small data on big-endian machines; else above. */
2827 enum direction where_pad
= FUNCTION_ARG_PADDING (mode
, type
);
2829 /* Invert direction if stack is post-update. */
2830 if (STACK_PUSH_CODE
== POST_INC
|| STACK_PUSH_CODE
== POST_DEC
)
2831 if (where_pad
!= none
)
2832 where_pad
= (where_pad
== downward
? upward
: downward
);
2834 xinner
= x
= protect_from_queue (x
, 0);
2836 if (mode
== BLKmode
)
2838 /* Copy a block into the stack, entirely or partially. */
2841 int used
= partial
* UNITS_PER_WORD
;
2842 int offset
= used
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
2850 /* USED is now the # of bytes we need not copy to the stack
2851 because registers will take care of them. */
2854 xinner
= change_address (xinner
, BLKmode
,
2855 plus_constant (XEXP (xinner
, 0), used
));
2857 /* If the partial register-part of the arg counts in its stack size,
2858 skip the part of stack space corresponding to the registers.
2859 Otherwise, start copying to the beginning of the stack space,
2860 by setting SKIP to 0. */
2861 skip
= (reg_parm_stack_space
== 0) ? 0 : used
;
2863 #ifdef PUSH_ROUNDING
2864 /* Do it with several push insns if that doesn't take lots of insns
2865 and if there is no difficulty with push insns that skip bytes
2866 on the stack for alignment purposes. */
2868 && GET_CODE (size
) == CONST_INT
2870 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size
) - used
, align
))
2871 /* Here we avoid the case of a structure whose weak alignment
2872 forces many pushes of a small amount of data,
2873 and such small pushes do rounding that causes trouble. */
2874 && ((! SLOW_UNALIGNED_ACCESS
)
2875 || align
>= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
2876 || PUSH_ROUNDING (align
) == align
)
2877 && PUSH_ROUNDING (INTVAL (size
)) == INTVAL (size
))
2879 /* Push padding now if padding above and stack grows down,
2880 or if padding below and stack grows up.
2881 But if space already allocated, this has already been done. */
2882 if (extra
&& args_addr
== 0
2883 && where_pad
!= none
&& where_pad
!= stack_direction
)
2884 anti_adjust_stack (GEN_INT (extra
));
2886 move_by_pieces (gen_rtx_MEM (BLKmode
, gen_push_operand ()), xinner
,
2887 INTVAL (size
) - used
, align
);
2889 if (current_function_check_memory_usage
&& ! in_check_memory_usage
)
2893 in_check_memory_usage
= 1;
2894 temp
= get_push_address (INTVAL(size
) - used
);
2895 if (GET_CODE (x
) == MEM
&& type
&& AGGREGATE_TYPE_P (type
))
2896 emit_library_call (chkr_copy_bitmap_libfunc
, 1, VOIDmode
, 3,
2898 XEXP (xinner
, 0), ptr_mode
,
2899 GEN_INT (INTVAL(size
) - used
),
2900 TYPE_MODE (sizetype
));
2902 emit_library_call (chkr_set_right_libfunc
, 1, VOIDmode
, 3,
2904 GEN_INT (INTVAL(size
) - used
),
2905 TYPE_MODE (sizetype
),
2906 GEN_INT (MEMORY_USE_RW
),
2907 TYPE_MODE (integer_type_node
));
2908 in_check_memory_usage
= 0;
2912 #endif /* PUSH_ROUNDING */
2914 /* Otherwise make space on the stack and copy the data
2915 to the address of that space. */
2917 /* Deduct words put into registers from the size we must copy. */
2920 if (GET_CODE (size
) == CONST_INT
)
2921 size
= GEN_INT (INTVAL (size
) - used
);
2923 size
= expand_binop (GET_MODE (size
), sub_optab
, size
,
2924 GEN_INT (used
), NULL_RTX
, 0,
2928 /* Get the address of the stack space.
2929 In this case, we do not deal with EXTRA separately.
2930 A single stack adjust will do. */
2933 temp
= push_block (size
, extra
, where_pad
== downward
);
2936 else if (GET_CODE (args_so_far
) == CONST_INT
)
2937 temp
= memory_address (BLKmode
,
2938 plus_constant (args_addr
,
2939 skip
+ INTVAL (args_so_far
)));
2941 temp
= memory_address (BLKmode
,
2942 plus_constant (gen_rtx_PLUS (Pmode
,
2946 if (current_function_check_memory_usage
&& ! in_check_memory_usage
)
2950 in_check_memory_usage
= 1;
2951 target
= copy_to_reg (temp
);
2952 if (GET_CODE (x
) == MEM
&& type
&& AGGREGATE_TYPE_P (type
))
2953 emit_library_call (chkr_copy_bitmap_libfunc
, 1, VOIDmode
, 3,
2955 XEXP (xinner
, 0), ptr_mode
,
2956 size
, TYPE_MODE (sizetype
));
2958 emit_library_call (chkr_set_right_libfunc
, 1, VOIDmode
, 3,
2960 size
, TYPE_MODE (sizetype
),
2961 GEN_INT (MEMORY_USE_RW
),
2962 TYPE_MODE (integer_type_node
));
2963 in_check_memory_usage
= 0;
2966 /* TEMP is the address of the block. Copy the data there. */
2967 if (GET_CODE (size
) == CONST_INT
2968 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size
), align
)))
2970 move_by_pieces (gen_rtx_MEM (BLKmode
, temp
), xinner
,
2971 INTVAL (size
), align
);
2976 rtx opalign
= GEN_INT (align
);
2977 enum machine_mode mode
;
2978 rtx target
= gen_rtx_MEM (BLKmode
, temp
);
2980 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2982 mode
= GET_MODE_WIDER_MODE (mode
))
2984 enum insn_code code
= movstr_optab
[(int) mode
];
2986 if (code
!= CODE_FOR_nothing
2987 && ((GET_CODE (size
) == CONST_INT
2988 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
2989 <= (GET_MODE_MASK (mode
) >> 1)))
2990 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
2991 && (insn_operand_predicate
[(int) code
][0] == 0
2992 || ((*insn_operand_predicate
[(int) code
][0])
2994 && (insn_operand_predicate
[(int) code
][1] == 0
2995 || ((*insn_operand_predicate
[(int) code
][1])
2997 && (insn_operand_predicate
[(int) code
][3] == 0
2998 || ((*insn_operand_predicate
[(int) code
][3])
2999 (opalign
, VOIDmode
))))
3001 rtx op2
= convert_to_mode (mode
, size
, 1);
3002 rtx last
= get_last_insn ();
3005 if (insn_operand_predicate
[(int) code
][2] != 0
3006 && ! ((*insn_operand_predicate
[(int) code
][2])
3008 op2
= copy_to_mode_reg (mode
, op2
);
3010 pat
= GEN_FCN ((int) code
) (target
, xinner
,
3018 delete_insns_since (last
);
3023 #ifndef ACCUMULATE_OUTGOING_ARGS
3024 /* If the source is referenced relative to the stack pointer,
3025 copy it to another register to stabilize it. We do not need
3026 to do this if we know that we won't be changing sp. */
3028 if (reg_mentioned_p (virtual_stack_dynamic_rtx
, temp
)
3029 || reg_mentioned_p (virtual_outgoing_args_rtx
, temp
))
3030 temp
= copy_to_reg (temp
);
3033 /* Make inhibit_defer_pop nonzero around the library call
3034 to force it to pop the bcopy-arguments right away. */
3036 #ifdef TARGET_MEM_FUNCTIONS
3037 emit_library_call (memcpy_libfunc
, 0,
3038 VOIDmode
, 3, temp
, Pmode
, XEXP (xinner
, 0), Pmode
,
3039 convert_to_mode (TYPE_MODE (sizetype
),
3040 size
, TREE_UNSIGNED (sizetype
)),
3041 TYPE_MODE (sizetype
));
3043 emit_library_call (bcopy_libfunc
, 0,
3044 VOIDmode
, 3, XEXP (xinner
, 0), Pmode
, temp
, Pmode
,
3045 convert_to_mode (TYPE_MODE (integer_type_node
),
3047 TREE_UNSIGNED (integer_type_node
)),
3048 TYPE_MODE (integer_type_node
));
3053 else if (partial
> 0)
3055 /* Scalar partly in registers. */
3057 int size
= GET_MODE_SIZE (mode
) / UNITS_PER_WORD
;
3060 /* # words of start of argument
3061 that we must make space for but need not store. */
3062 int offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_WORD
);
3063 int args_offset
= INTVAL (args_so_far
);
3066 /* Push padding now if padding above and stack grows down,
3067 or if padding below and stack grows up.
3068 But if space already allocated, this has already been done. */
3069 if (extra
&& args_addr
== 0
3070 && where_pad
!= none
&& where_pad
!= stack_direction
)
3071 anti_adjust_stack (GEN_INT (extra
));
3073 /* If we make space by pushing it, we might as well push
3074 the real data. Otherwise, we can leave OFFSET nonzero
3075 and leave the space uninitialized. */
3079 /* Now NOT_STACK gets the number of words that we don't need to
3080 allocate on the stack. */
3081 not_stack
= partial
- offset
;
3083 /* If the partial register-part of the arg counts in its stack size,
3084 skip the part of stack space corresponding to the registers.
3085 Otherwise, start copying to the beginning of the stack space,
3086 by setting SKIP to 0. */
3087 skip
= (reg_parm_stack_space
== 0) ? 0 : not_stack
;
3089 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
3090 x
= validize_mem (force_const_mem (mode
, x
));
3092 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3093 SUBREGs of such registers are not allowed. */
3094 if ((GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
3095 && GET_MODE_CLASS (GET_MODE (x
)) != MODE_INT
))
3096 x
= copy_to_reg (x
);
3098 /* Loop over all the words allocated on the stack for this arg. */
3099 /* We can do it by words, because any scalar bigger than a word
3100 has a size a multiple of a word. */
3101 #ifndef PUSH_ARGS_REVERSED
3102 for (i
= not_stack
; i
< size
; i
++)
3104 for (i
= size
- 1; i
>= not_stack
; i
--)
3106 if (i
>= not_stack
+ offset
)
3107 emit_push_insn (operand_subword_force (x
, i
, mode
),
3108 word_mode
, NULL_TREE
, NULL_RTX
, align
, 0, NULL_RTX
,
3110 GEN_INT (args_offset
+ ((i
- not_stack
+ skip
)
3112 reg_parm_stack_space
);
3117 rtx target
= NULL_RTX
;
3119 /* Push padding now if padding above and stack grows down,
3120 or if padding below and stack grows up.
3121 But if space already allocated, this has already been done. */
3122 if (extra
&& args_addr
== 0
3123 && where_pad
!= none
&& where_pad
!= stack_direction
)
3124 anti_adjust_stack (GEN_INT (extra
));
3126 #ifdef PUSH_ROUNDING
3128 addr
= gen_push_operand ();
3132 if (GET_CODE (args_so_far
) == CONST_INT
)
3134 = memory_address (mode
,
3135 plus_constant (args_addr
,
3136 INTVAL (args_so_far
)));
3138 addr
= memory_address (mode
, gen_rtx_PLUS (Pmode
, args_addr
,
3143 emit_move_insn (gen_rtx_MEM (mode
, addr
), x
);
3145 if (current_function_check_memory_usage
&& ! in_check_memory_usage
)
3147 in_check_memory_usage
= 1;
3149 target
= get_push_address (GET_MODE_SIZE (mode
));
3151 if (GET_CODE (x
) == MEM
&& type
&& AGGREGATE_TYPE_P (type
))
3152 emit_library_call (chkr_copy_bitmap_libfunc
, 1, VOIDmode
, 3,
3154 XEXP (x
, 0), ptr_mode
,
3155 GEN_INT (GET_MODE_SIZE (mode
)),
3156 TYPE_MODE (sizetype
));
3158 emit_library_call (chkr_set_right_libfunc
, 1, VOIDmode
, 3,
3160 GEN_INT (GET_MODE_SIZE (mode
)),
3161 TYPE_MODE (sizetype
),
3162 GEN_INT (MEMORY_USE_RW
),
3163 TYPE_MODE (integer_type_node
));
3164 in_check_memory_usage
= 0;
3169 /* If part should go in registers, copy that part
3170 into the appropriate registers. Do this now, at the end,
3171 since mem-to-mem copies above may do function calls. */
3172 if (partial
> 0 && reg
!= 0)
3174 /* Handle calls that pass values in multiple non-contiguous locations.
3175 The Irix 6 ABI has examples of this. */
3176 if (GET_CODE (reg
) == PARALLEL
)
3177 emit_group_load (reg
, x
, -1, align
); /* ??? size? */
3179 move_block_to_reg (REGNO (reg
), x
, partial
, mode
);
3182 if (extra
&& args_addr
== 0 && where_pad
== stack_direction
)
3183 anti_adjust_stack (GEN_INT (extra
));
3186 /* Expand an assignment that stores the value of FROM into TO.
3187 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3188 (This may contain a QUEUED rtx;
3189 if the value is constant, this rtx is a constant.)
3190 Otherwise, the returned value is NULL_RTX.
3192 SUGGEST_REG is no longer actually used.
3193 It used to mean, copy the value through a register
3194 and return that register, if that is possible.
3195 We now use WANT_VALUE to decide whether to do this. */
3198 expand_assignment (to
, from
, want_value
, suggest_reg
)
3203 register rtx to_rtx
= 0;
3206 /* Don't crash if the lhs of the assignment was erroneous. */
3208 if (TREE_CODE (to
) == ERROR_MARK
)
3210 result
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
3211 return want_value
? result
: NULL_RTX
;
3214 /* Assignment of a structure component needs special treatment
3215 if the structure component's rtx is not simply a MEM.
3216 Assignment of an array element at a constant index, and assignment of
3217 an array element in an unaligned packed structure field, has the same
3220 if (TREE_CODE (to
) == COMPONENT_REF
|| TREE_CODE (to
) == BIT_FIELD_REF
3221 || TREE_CODE (to
) == ARRAY_REF
)
3223 enum machine_mode mode1
;
3233 tem
= get_inner_reference (to
, &bitsize
, &bitpos
, &offset
, &mode1
,
3234 &unsignedp
, &volatilep
, &alignment
);
3236 /* If we are going to use store_bit_field and extract_bit_field,
3237 make sure to_rtx will be safe for multiple use. */
3239 if (mode1
== VOIDmode
&& want_value
)
3240 tem
= stabilize_reference (tem
);
3242 to_rtx
= expand_expr (tem
, NULL_RTX
, VOIDmode
, EXPAND_MEMORY_USE_DONT
);
3245 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
3247 if (GET_CODE (to_rtx
) != MEM
)
3250 if (GET_MODE (offset_rtx
) != ptr_mode
)
3252 #ifdef POINTERS_EXTEND_UNSIGNED
3253 offset_rtx
= convert_memory_address (ptr_mode
, offset_rtx
);
3255 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
3259 /* A constant address in TO_RTX can have VOIDmode, we must not try
3260 to call force_reg for that case. Avoid that case. */
3261 if (GET_CODE (to_rtx
) == MEM
3262 && GET_MODE (to_rtx
) == BLKmode
3263 && GET_MODE (XEXP (to_rtx
, 0)) != VOIDmode
3265 && (bitpos
% bitsize
) == 0
3266 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
3267 && (alignment
* BITS_PER_UNIT
) == GET_MODE_ALIGNMENT (mode1
))
3269 rtx temp
= change_address (to_rtx
, mode1
,
3270 plus_constant (XEXP (to_rtx
, 0),
3273 if (GET_CODE (XEXP (temp
, 0)) == REG
)
3276 to_rtx
= change_address (to_rtx
, mode1
,
3277 force_reg (GET_MODE (XEXP (temp
, 0)),
3282 to_rtx
= change_address (to_rtx
, VOIDmode
,
3283 gen_rtx_PLUS (ptr_mode
, XEXP (to_rtx
, 0),
3284 force_reg (ptr_mode
, offset_rtx
)));
3288 if (GET_CODE (to_rtx
) == MEM
)
3290 /* When the offset is zero, to_rtx is the address of the
3291 structure we are storing into, and hence may be shared.
3292 We must make a new MEM before setting the volatile bit. */
3294 to_rtx
= copy_rtx (to_rtx
);
3296 MEM_VOLATILE_P (to_rtx
) = 1;
3298 #if 0 /* This was turned off because, when a field is volatile
3299 in an object which is not volatile, the object may be in a register,
3300 and then we would abort over here. */
3306 if (TREE_CODE (to
) == COMPONENT_REF
3307 && TREE_READONLY (TREE_OPERAND (to
, 1)))
3310 to_rtx
= copy_rtx (to_rtx
);
3312 RTX_UNCHANGING_P (to_rtx
) = 1;
3315 /* Check the access. */
3316 if (current_function_check_memory_usage
&& GET_CODE (to_rtx
) == MEM
)
3321 enum machine_mode best_mode
;
3323 best_mode
= get_best_mode (bitsize
, bitpos
,
3324 TYPE_ALIGN (TREE_TYPE (tem
)),
3326 if (best_mode
== VOIDmode
)
3329 best_mode_size
= GET_MODE_BITSIZE (best_mode
);
3330 to_addr
= plus_constant (XEXP (to_rtx
, 0), (bitpos
/ BITS_PER_UNIT
));
3331 size
= CEIL ((bitpos
% best_mode_size
) + bitsize
, best_mode_size
);
3332 size
*= GET_MODE_SIZE (best_mode
);
3334 /* Check the access right of the pointer. */
3336 emit_library_call (chkr_check_addr_libfunc
, 1, VOIDmode
, 3,
3338 GEN_INT (size
), TYPE_MODE (sizetype
),
3339 GEN_INT (MEMORY_USE_WO
),
3340 TYPE_MODE (integer_type_node
));
3343 result
= store_field (to_rtx
, bitsize
, bitpos
, mode1
, from
,
3345 /* Spurious cast makes HPUX compiler happy. */
3346 ? (enum machine_mode
) TYPE_MODE (TREE_TYPE (to
))
3349 /* Required alignment of containing datum. */
3351 int_size_in_bytes (TREE_TYPE (tem
)),
3352 get_alias_set (to
));
3353 preserve_temp_slots (result
);
3357 /* If the value is meaningful, convert RESULT to the proper mode.
3358 Otherwise, return nothing. */
3359 return (want_value
? convert_modes (TYPE_MODE (TREE_TYPE (to
)),
3360 TYPE_MODE (TREE_TYPE (from
)),
3362 TREE_UNSIGNED (TREE_TYPE (to
)))
3366 /* If the rhs is a function call and its value is not an aggregate,
3367 call the function before we start to compute the lhs.
3368 This is needed for correct code for cases such as
3369 val = setjmp (buf) on machines where reference to val
3370 requires loading up part of an address in a separate insn.
3372 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
3373 a promoted variable where the zero- or sign- extension needs to be done.
3374 Handling this in the normal way is safe because no computation is done
3376 if (TREE_CODE (from
) == CALL_EXPR
&& ! aggregate_value_p (from
)
3377 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from
))) == INTEGER_CST
3378 && ! (TREE_CODE (to
) == VAR_DECL
&& GET_CODE (DECL_RTL (to
)) == REG
))
3383 value
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
3385 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_MEMORY_USE_WO
);
3387 /* Handle calls that return values in multiple non-contiguous locations.
3388 The Irix 6 ABI has examples of this. */
3389 if (GET_CODE (to_rtx
) == PARALLEL
)
3390 emit_group_load (to_rtx
, value
, int_size_in_bytes (TREE_TYPE (from
)),
3391 TYPE_ALIGN (TREE_TYPE (from
)) / BITS_PER_UNIT
);
3392 else if (GET_MODE (to_rtx
) == BLKmode
)
3393 emit_block_move (to_rtx
, value
, expr_size (from
),
3394 TYPE_ALIGN (TREE_TYPE (from
)) / BITS_PER_UNIT
);
3396 emit_move_insn (to_rtx
, value
);
3397 preserve_temp_slots (to_rtx
);
3400 return want_value
? to_rtx
: NULL_RTX
;
3403 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3404 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3408 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_MEMORY_USE_WO
);
3409 if (GET_CODE (to_rtx
) == MEM
)
3410 MEM_ALIAS_SET (to_rtx
) = get_alias_set (to
);
3413 /* Don't move directly into a return register. */
3414 if (TREE_CODE (to
) == RESULT_DECL
&& GET_CODE (to_rtx
) == REG
)
3419 temp
= expand_expr (from
, 0, GET_MODE (to_rtx
), 0);
3420 emit_move_insn (to_rtx
, temp
);
3421 preserve_temp_slots (to_rtx
);
3424 return want_value
? to_rtx
: NULL_RTX
;
3427 /* In case we are returning the contents of an object which overlaps
3428 the place the value is being stored, use a safe function when copying
3429 a value through a pointer into a structure value return block. */
3430 if (TREE_CODE (to
) == RESULT_DECL
&& TREE_CODE (from
) == INDIRECT_REF
3431 && current_function_returns_struct
3432 && !current_function_returns_pcc_struct
)
3437 size
= expr_size (from
);
3438 from_rtx
= expand_expr (from
, NULL_RTX
, VOIDmode
,
3439 EXPAND_MEMORY_USE_DONT
);
3441 /* Copy the rights of the bitmap. */
3442 if (current_function_check_memory_usage
)
3443 emit_library_call (chkr_copy_bitmap_libfunc
, 1, VOIDmode
, 3,
3444 XEXP (to_rtx
, 0), ptr_mode
,
3445 XEXP (from_rtx
, 0), ptr_mode
,
3446 convert_to_mode (TYPE_MODE (sizetype
),
3447 size
, TREE_UNSIGNED (sizetype
)),
3448 TYPE_MODE (sizetype
));
3450 #ifdef TARGET_MEM_FUNCTIONS
3451 emit_library_call (memcpy_libfunc
, 0,
3452 VOIDmode
, 3, XEXP (to_rtx
, 0), Pmode
,
3453 XEXP (from_rtx
, 0), Pmode
,
3454 convert_to_mode (TYPE_MODE (sizetype
),
3455 size
, TREE_UNSIGNED (sizetype
)),
3456 TYPE_MODE (sizetype
));
3458 emit_library_call (bcopy_libfunc
, 0,
3459 VOIDmode
, 3, XEXP (from_rtx
, 0), Pmode
,
3460 XEXP (to_rtx
, 0), Pmode
,
3461 convert_to_mode (TYPE_MODE (integer_type_node
),
3462 size
, TREE_UNSIGNED (integer_type_node
)),
3463 TYPE_MODE (integer_type_node
));
3466 preserve_temp_slots (to_rtx
);
3469 return want_value
? to_rtx
: NULL_RTX
;
3472 /* Compute FROM and store the value in the rtx we got. */
3475 result
= store_expr (from
, to_rtx
, want_value
);
3476 preserve_temp_slots (result
);
3479 return want_value
? result
: NULL_RTX
;
3482 /* Generate code for computing expression EXP,
3483 and storing the value into TARGET.
3484 TARGET may contain a QUEUED rtx.
3486 If WANT_VALUE is nonzero, return a copy of the value
3487 not in TARGET, so that we can be sure to use the proper
3488 value in a containing expression even if TARGET has something
3489 else stored in it. If possible, we copy the value through a pseudo
3490 and return that pseudo. Or, if the value is constant, we try to
3491 return the constant. In some cases, we return a pseudo
3492 copied *from* TARGET.
3494 If the mode is BLKmode then we may return TARGET itself.
3495 It turns out that in BLKmode it doesn't cause a problem.
3496 because C has no operators that could combine two different
3497 assignments into the same BLKmode object with different values
3498 with no sequence point. Will other languages need this to
3501 If WANT_VALUE is 0, we return NULL, to make sure
3502 to catch quickly any cases where the caller uses the value
3503 and fails to set WANT_VALUE. */
3506 store_expr (exp
, target
, want_value
)
3508 register rtx target
;
3512 int dont_return_target
= 0;
3514 if (TREE_CODE (exp
) == COMPOUND_EXPR
)
3516 /* Perform first part of compound expression, then assign from second
3518 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
3520 return store_expr (TREE_OPERAND (exp
, 1), target
, want_value
);
3522 else if (TREE_CODE (exp
) == COND_EXPR
&& GET_MODE (target
) == BLKmode
)
3524 /* For conditional expression, get safe form of the target. Then
3525 test the condition, doing the appropriate assignment on either
3526 side. This avoids the creation of unnecessary temporaries.
3527 For non-BLKmode, it is more efficient not to do this. */
3529 rtx lab1
= gen_label_rtx (), lab2
= gen_label_rtx ();
3532 target
= protect_from_queue (target
, 1);
3534 do_pending_stack_adjust ();
3536 jumpifnot (TREE_OPERAND (exp
, 0), lab1
);
3537 start_cleanup_deferral ();
3538 store_expr (TREE_OPERAND (exp
, 1), target
, 0);
3539 end_cleanup_deferral ();
3541 emit_jump_insn (gen_jump (lab2
));
3544 start_cleanup_deferral ();
3545 store_expr (TREE_OPERAND (exp
, 2), target
, 0);
3546 end_cleanup_deferral ();
3551 return want_value
? target
: NULL_RTX
;
3553 else if (queued_subexp_p (target
))
3554 /* If target contains a postincrement, let's not risk
3555 using it as the place to generate the rhs. */
3557 if (GET_MODE (target
) != BLKmode
&& GET_MODE (target
) != VOIDmode
)
3559 /* Expand EXP into a new pseudo. */
3560 temp
= gen_reg_rtx (GET_MODE (target
));
3561 temp
= expand_expr (exp
, temp
, GET_MODE (target
), 0);
3564 temp
= expand_expr (exp
, NULL_RTX
, GET_MODE (target
), 0);
3566 /* If target is volatile, ANSI requires accessing the value
3567 *from* the target, if it is accessed. So make that happen.
3568 In no case return the target itself. */
3569 if (! MEM_VOLATILE_P (target
) && want_value
)
3570 dont_return_target
= 1;
3572 else if (want_value
&& GET_CODE (target
) == MEM
&& ! MEM_VOLATILE_P (target
)
3573 && GET_MODE (target
) != BLKmode
)
3574 /* If target is in memory and caller wants value in a register instead,
3575 arrange that. Pass TARGET as target for expand_expr so that,
3576 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3577 We know expand_expr will not use the target in that case.
3578 Don't do this if TARGET is volatile because we are supposed
3579 to write it and then read it. */
3581 temp
= expand_expr (exp
, cse_not_expected
? NULL_RTX
: target
,
3582 GET_MODE (target
), 0);
3583 if (GET_MODE (temp
) != BLKmode
&& GET_MODE (temp
) != VOIDmode
)
3584 temp
= copy_to_reg (temp
);
3585 dont_return_target
= 1;
3587 else if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
3588 /* If this is an scalar in a register that is stored in a wider mode
3589 than the declared mode, compute the result into its declared mode
3590 and then convert to the wider mode. Our value is the computed
3593 /* If we don't want a value, we can do the conversion inside EXP,
3594 which will often result in some optimizations. Do the conversion
3595 in two steps: first change the signedness, if needed, then
3596 the extend. But don't do this if the type of EXP is a subtype
3597 of something else since then the conversion might involve
3598 more than just converting modes. */
3599 if (! want_value
&& INTEGRAL_TYPE_P (TREE_TYPE (exp
))
3600 && TREE_TYPE (TREE_TYPE (exp
)) == 0)
3602 if (TREE_UNSIGNED (TREE_TYPE (exp
))
3603 != SUBREG_PROMOTED_UNSIGNED_P (target
))
3606 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target
),
3610 exp
= convert (type_for_mode (GET_MODE (SUBREG_REG (target
)),
3611 SUBREG_PROMOTED_UNSIGNED_P (target
)),
3615 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
3617 /* If TEMP is a volatile MEM and we want a result value, make
3618 the access now so it gets done only once. Likewise if
3619 it contains TARGET. */
3620 if (GET_CODE (temp
) == MEM
&& want_value
3621 && (MEM_VOLATILE_P (temp
)
3622 || reg_mentioned_p (SUBREG_REG (target
), XEXP (temp
, 0))))
3623 temp
= copy_to_reg (temp
);
3625 /* If TEMP is a VOIDmode constant, use convert_modes to make
3626 sure that we properly convert it. */
3627 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
)
3628 temp
= convert_modes (GET_MODE (SUBREG_REG (target
)),
3629 TYPE_MODE (TREE_TYPE (exp
)), temp
,
3630 SUBREG_PROMOTED_UNSIGNED_P (target
));
3632 convert_move (SUBREG_REG (target
), temp
,
3633 SUBREG_PROMOTED_UNSIGNED_P (target
));
3634 return want_value
? temp
: NULL_RTX
;
3638 temp
= expand_expr (exp
, target
, GET_MODE (target
), 0);
3639 /* Return TARGET if it's a specified hardware register.
3640 If TARGET is a volatile mem ref, either return TARGET
3641 or return a reg copied *from* TARGET; ANSI requires this.
3643 Otherwise, if TEMP is not TARGET, return TEMP
3644 if it is constant (for efficiency),
3645 or if we really want the correct value. */
3646 if (!(target
&& GET_CODE (target
) == REG
3647 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)
3648 && !(GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
))
3649 && ! rtx_equal_p (temp
, target
)
3650 && (CONSTANT_P (temp
) || want_value
))
3651 dont_return_target
= 1;
3654 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3655 the same as that of TARGET, adjust the constant. This is needed, for
3656 example, in case it is a CONST_DOUBLE and we want only a word-sized
3658 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
3659 && TREE_CODE (exp
) != ERROR_MARK
3660 && GET_MODE (target
) != TYPE_MODE (TREE_TYPE (exp
)))
3661 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
3662 temp
, TREE_UNSIGNED (TREE_TYPE (exp
)));
3664 if (current_function_check_memory_usage
3665 && GET_CODE (target
) == MEM
3666 && AGGREGATE_TYPE_P (TREE_TYPE (exp
)))
3668 if (GET_CODE (temp
) == MEM
)
3669 emit_library_call (chkr_copy_bitmap_libfunc
, 1, VOIDmode
, 3,
3670 XEXP (target
, 0), ptr_mode
,
3671 XEXP (temp
, 0), ptr_mode
,
3672 expr_size (exp
), TYPE_MODE (sizetype
));
3674 emit_library_call (chkr_check_addr_libfunc
, 1, VOIDmode
, 3,
3675 XEXP (target
, 0), ptr_mode
,
3676 expr_size (exp
), TYPE_MODE (sizetype
),
3677 GEN_INT (MEMORY_USE_WO
),
3678 TYPE_MODE (integer_type_node
));
3681 /* If value was not generated in the target, store it there.
3682 Convert the value to TARGET's type first if nec. */
3683 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
3684 one or both of them are volatile memory refs, we have to distinguish
3686 - expand_expr has used TARGET. In this case, we must not generate
3687 another copy. This can be detected by TARGET being equal according
3689 - expand_expr has not used TARGET - that means that the source just
3690 happens to have the same RTX form. Since temp will have been created
3691 by expand_expr, it will compare unequal according to == .
3692 We must generate a copy in this case, to reach the correct number
3693 of volatile memory references. */
3695 if ((! rtx_equal_p (temp
, target
)
3696 || (temp
!= target
&& (side_effects_p (temp
)
3697 || side_effects_p (target
))))
3698 && TREE_CODE (exp
) != ERROR_MARK
)
3700 target
= protect_from_queue (target
, 1);
3701 if (GET_MODE (temp
) != GET_MODE (target
)
3702 && GET_MODE (temp
) != VOIDmode
)
3704 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (exp
));
3705 if (dont_return_target
)
3707 /* In this case, we will return TEMP,
3708 so make sure it has the proper mode.
3709 But don't forget to store the value into TARGET. */
3710 temp
= convert_to_mode (GET_MODE (target
), temp
, unsignedp
);
3711 emit_move_insn (target
, temp
);
3714 convert_move (target
, temp
, unsignedp
);
3717 else if (GET_MODE (temp
) == BLKmode
&& TREE_CODE (exp
) == STRING_CST
)
3719 /* Handle copying a string constant into an array.
3720 The string constant may be shorter than the array.
3721 So copy just the string's actual length, and clear the rest. */
3725 /* Get the size of the data type of the string,
3726 which is actually the size of the target. */
3727 size
= expr_size (exp
);
3728 if (GET_CODE (size
) == CONST_INT
3729 && INTVAL (size
) < TREE_STRING_LENGTH (exp
))
3730 emit_block_move (target
, temp
, size
,
3731 TYPE_ALIGN (TREE_TYPE (exp
)) / BITS_PER_UNIT
);
3734 /* Compute the size of the data to copy from the string. */
3736 = size_binop (MIN_EXPR
,
3737 make_tree (sizetype
, size
),
3739 build_int_2 (TREE_STRING_LENGTH (exp
), 0)));
3740 rtx copy_size_rtx
= expand_expr (copy_size
, NULL_RTX
,
3744 /* Copy that much. */
3745 emit_block_move (target
, temp
, copy_size_rtx
,
3746 TYPE_ALIGN (TREE_TYPE (exp
)) / BITS_PER_UNIT
);
3748 /* Figure out how much is left in TARGET that we have to clear.
3749 Do all calculations in ptr_mode. */
3751 addr
= XEXP (target
, 0);
3752 addr
= convert_modes (ptr_mode
, Pmode
, addr
, 1);
3754 if (GET_CODE (copy_size_rtx
) == CONST_INT
)
3756 addr
= plus_constant (addr
, TREE_STRING_LENGTH (exp
));
3757 size
= plus_constant (size
, - TREE_STRING_LENGTH (exp
));
3761 addr
= force_reg (ptr_mode
, addr
);
3762 addr
= expand_binop (ptr_mode
, add_optab
, addr
,
3763 copy_size_rtx
, NULL_RTX
, 0,
3766 size
= expand_binop (ptr_mode
, sub_optab
, size
,
3767 copy_size_rtx
, NULL_RTX
, 0,
3770 label
= gen_label_rtx ();
3771 emit_cmp_and_jump_insns (size
, const0_rtx
, LT
, NULL_RTX
,
3772 GET_MODE (size
), 0, 0, label
);
3775 if (size
!= const0_rtx
)
3777 /* Be sure we can write on ADDR. */
3778 if (current_function_check_memory_usage
)
3779 emit_library_call (chkr_check_addr_libfunc
, 1, VOIDmode
, 3,
3781 size
, TYPE_MODE (sizetype
),
3782 GEN_INT (MEMORY_USE_WO
),
3783 TYPE_MODE (integer_type_node
));
3784 #ifdef TARGET_MEM_FUNCTIONS
3785 emit_library_call (memset_libfunc
, 0, VOIDmode
, 3,
3787 const0_rtx
, TYPE_MODE (integer_type_node
),
3788 convert_to_mode (TYPE_MODE (sizetype
),
3790 TREE_UNSIGNED (sizetype
)),
3791 TYPE_MODE (sizetype
));
3793 emit_library_call (bzero_libfunc
, 0, VOIDmode
, 2,
3795 convert_to_mode (TYPE_MODE (integer_type_node
),
3797 TREE_UNSIGNED (integer_type_node
)),
3798 TYPE_MODE (integer_type_node
));
3806 /* Handle calls that return values in multiple non-contiguous locations.
3807 The Irix 6 ABI has examples of this. */
3808 else if (GET_CODE (target
) == PARALLEL
)
3809 emit_group_load (target
, temp
, int_size_in_bytes (TREE_TYPE (exp
)),
3810 TYPE_ALIGN (TREE_TYPE (exp
)) / BITS_PER_UNIT
);
3811 else if (GET_MODE (temp
) == BLKmode
)
3812 emit_block_move (target
, temp
, expr_size (exp
),
3813 TYPE_ALIGN (TREE_TYPE (exp
)) / BITS_PER_UNIT
);
3815 emit_move_insn (target
, temp
);
3818 /* If we don't want a value, return NULL_RTX. */
3822 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3823 ??? The latter test doesn't seem to make sense. */
3824 else if (dont_return_target
&& GET_CODE (temp
) != MEM
)
3827 /* Return TARGET itself if it is a hard register. */
3828 else if (want_value
&& GET_MODE (target
) != BLKmode
3829 && ! (GET_CODE (target
) == REG
3830 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
3831 return copy_to_reg (target
);
3837 /* Return 1 if EXP just contains zeros. */
3845 switch (TREE_CODE (exp
))
3849 case NON_LVALUE_EXPR
:
3850 return is_zeros_p (TREE_OPERAND (exp
, 0));
3853 return TREE_INT_CST_LOW (exp
) == 0 && TREE_INT_CST_HIGH (exp
) == 0;
3857 is_zeros_p (TREE_REALPART (exp
)) && is_zeros_p (TREE_IMAGPART (exp
));
3860 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp
), dconst0
);
3863 if (TREE_TYPE (exp
) && TREE_CODE (TREE_TYPE (exp
)) == SET_TYPE
)
3864 return CONSTRUCTOR_ELTS (exp
) == NULL_TREE
;
3865 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
3866 if (! is_zeros_p (TREE_VALUE (elt
)))
3876 /* Return 1 if EXP contains mostly (3/4) zeros. */
3879 mostly_zeros_p (exp
)
3882 if (TREE_CODE (exp
) == CONSTRUCTOR
)
3884 int elts
= 0, zeros
= 0;
3885 tree elt
= CONSTRUCTOR_ELTS (exp
);
3886 if (TREE_TYPE (exp
) && TREE_CODE (TREE_TYPE (exp
)) == SET_TYPE
)
3888 /* If there are no ranges of true bits, it is all zero. */
3889 return elt
== NULL_TREE
;
3891 for (; elt
; elt
= TREE_CHAIN (elt
))
3893 /* We do not handle the case where the index is a RANGE_EXPR,
3894 so the statistic will be somewhat inaccurate.
3895 We do make a more accurate count in store_constructor itself,
3896 so since this function is only used for nested array elements,
3897 this should be close enough. */
3898 if (mostly_zeros_p (TREE_VALUE (elt
)))
3903 return 4 * zeros
>= 3 * elts
;
3906 return is_zeros_p (exp
);
3909 /* Helper function for store_constructor.
3910 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
3911 TYPE is the type of the CONSTRUCTOR, not the element type.
3912 CLEARED is as for store_constructor.
3914 This provides a recursive shortcut back to store_constructor when it isn't
3915 necessary to go through store_field. This is so that we can pass through
3916 the cleared field to let store_constructor know that we may not have to
3917 clear a substructure if the outer structure has already been cleared. */
3920 store_constructor_field (target
, bitsize
, bitpos
,
3921 mode
, exp
, type
, cleared
)
3923 int bitsize
, bitpos
;
3924 enum machine_mode mode
;
3928 if (TREE_CODE (exp
) == CONSTRUCTOR
3929 && bitpos
% BITS_PER_UNIT
== 0
3930 /* If we have a non-zero bitpos for a register target, then we just
3931 let store_field do the bitfield handling. This is unlikely to
3932 generate unnecessary clear instructions anyways. */
3933 && (bitpos
== 0 || GET_CODE (target
) == MEM
))
3936 target
= change_address (target
, VOIDmode
,
3937 plus_constant (XEXP (target
, 0),
3938 bitpos
/ BITS_PER_UNIT
));
3939 store_constructor (exp
, target
, cleared
);
3942 store_field (target
, bitsize
, bitpos
, mode
, exp
,
3943 VOIDmode
, 0, TYPE_ALIGN (type
) / BITS_PER_UNIT
,
3944 int_size_in_bytes (type
), 0);
3947 /* Store the value of constructor EXP into the rtx TARGET.
3948 TARGET is either a REG or a MEM.
3949 CLEARED is true if TARGET is known to have been zero'd. */
3952 store_constructor (exp
, target
, cleared
)
3957 tree type
= TREE_TYPE (exp
);
3958 rtx exp_size
= expr_size (exp
);
3960 /* We know our target cannot conflict, since safe_from_p has been called. */
3962 /* Don't try copying piece by piece into a hard register
3963 since that is vulnerable to being clobbered by EXP.
3964 Instead, construct in a pseudo register and then copy it all. */
3965 if (GET_CODE (target
) == REG
&& REGNO (target
) < FIRST_PSEUDO_REGISTER
)
3967 rtx temp
= gen_reg_rtx (GET_MODE (target
));
3968 store_constructor (exp
, temp
, 0);
3969 emit_move_insn (target
, temp
);
3974 if (TREE_CODE (type
) == RECORD_TYPE
|| TREE_CODE (type
) == UNION_TYPE
3975 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
3979 /* Inform later passes that the whole union value is dead. */
3980 if (TREE_CODE (type
) == UNION_TYPE
3981 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
3982 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
3984 /* If we are building a static constructor into a register,
3985 set the initial value as zero so we can fold the value into
3986 a constant. But if more than one register is involved,
3987 this probably loses. */
3988 else if (GET_CODE (target
) == REG
&& TREE_STATIC (exp
)
3989 && GET_MODE_SIZE (GET_MODE (target
)) <= UNITS_PER_WORD
)
3992 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
3997 /* If the constructor has fewer fields than the structure
3998 or if we are initializing the structure to mostly zeros,
3999 clear the whole structure first. */
4000 else if ((list_length (CONSTRUCTOR_ELTS (exp
))
4001 != list_length (TYPE_FIELDS (type
)))
4002 || mostly_zeros_p (exp
))
4005 clear_storage (target
, expr_size (exp
),
4006 TYPE_ALIGN (type
) / BITS_PER_UNIT
);
4011 /* Inform later passes that the old value is dead. */
4012 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
4014 /* Store each element of the constructor into
4015 the corresponding field of TARGET. */
4017 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
4019 register tree field
= TREE_PURPOSE (elt
);
4020 tree value
= TREE_VALUE (elt
);
4021 register enum machine_mode mode
;
4025 tree pos
, constant
= 0, offset
= 0;
4026 rtx to_rtx
= target
;
4028 /* Just ignore missing fields.
4029 We cleared the whole structure, above,
4030 if any fields are missing. */
4034 if (cleared
&& is_zeros_p (TREE_VALUE (elt
)))
4037 bitsize
= TREE_INT_CST_LOW (DECL_SIZE (field
));
4038 unsignedp
= TREE_UNSIGNED (field
);
4039 mode
= DECL_MODE (field
);
4040 if (DECL_BIT_FIELD (field
))
4043 pos
= DECL_FIELD_BITPOS (field
);
4044 if (TREE_CODE (pos
) == INTEGER_CST
)
4046 else if (TREE_CODE (pos
) == PLUS_EXPR
4047 && TREE_CODE (TREE_OPERAND (pos
, 1)) == INTEGER_CST
)
4048 constant
= TREE_OPERAND (pos
, 1), offset
= TREE_OPERAND (pos
, 0);
4053 bitpos
= TREE_INT_CST_LOW (constant
);
4059 if (contains_placeholder_p (offset
))
4060 offset
= build (WITH_RECORD_EXPR
, sizetype
,
4061 offset
, make_tree (TREE_TYPE (exp
), target
));
4063 offset
= size_binop (FLOOR_DIV_EXPR
, offset
,
4064 size_int (BITS_PER_UNIT
));
4066 offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
4067 if (GET_CODE (to_rtx
) != MEM
)
4070 if (GET_MODE (offset_rtx
) != ptr_mode
)
4072 #ifdef POINTERS_EXTEND_UNSIGNED
4073 offset_rtx
= convert_memory_address (ptr_mode
, offset_rtx
);
4075 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
4080 = change_address (to_rtx
, VOIDmode
,
4081 gen_rtx_PLUS (ptr_mode
, XEXP (to_rtx
, 0),
4082 force_reg (ptr_mode
, offset_rtx
)));
4084 if (TREE_READONLY (field
))
4086 if (GET_CODE (to_rtx
) == MEM
)
4087 to_rtx
= copy_rtx (to_rtx
);
4089 RTX_UNCHANGING_P (to_rtx
) = 1;
4092 #ifdef WORD_REGISTER_OPERATIONS
4093 /* If this initializes a field that is smaller than a word, at the
4094 start of a word, try to widen it to a full word.
4095 This special case allows us to output C++ member function
4096 initializations in a form that the optimizers can understand. */
4098 && GET_CODE (target
) == REG
4099 && bitsize
< BITS_PER_WORD
4100 && bitpos
% BITS_PER_WORD
== 0
4101 && GET_MODE_CLASS (mode
) == MODE_INT
4102 && TREE_CODE (value
) == INTEGER_CST
4103 && GET_CODE (exp_size
) == CONST_INT
4104 && bitpos
+ BITS_PER_WORD
<= INTVAL (exp_size
) * BITS_PER_UNIT
)
4106 tree type
= TREE_TYPE (value
);
4107 if (TYPE_PRECISION (type
) < BITS_PER_WORD
)
4109 type
= type_for_size (BITS_PER_WORD
, TREE_UNSIGNED (type
));
4110 value
= convert (type
, value
);
4112 if (BYTES_BIG_ENDIAN
)
4114 = fold (build (LSHIFT_EXPR
, type
, value
,
4115 build_int_2 (BITS_PER_WORD
- bitsize
, 0)));
4116 bitsize
= BITS_PER_WORD
;
4120 store_constructor_field (to_rtx
, bitsize
, bitpos
,
4121 mode
, value
, type
, cleared
);
4124 else if (TREE_CODE (type
) == ARRAY_TYPE
)
4129 tree domain
= TYPE_DOMAIN (type
);
4130 HOST_WIDE_INT minelt
= TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain
));
4131 HOST_WIDE_INT maxelt
= TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain
));
4132 tree elttype
= TREE_TYPE (type
);
4134 /* If the constructor has fewer elements than the array,
4135 clear the whole array first. Similarly if this is
4136 static constructor of a non-BLKmode object. */
4137 if (cleared
|| (GET_CODE (target
) == REG
&& TREE_STATIC (exp
)))
4141 HOST_WIDE_INT count
= 0, zero_count
= 0;
4143 /* This loop is a more accurate version of the loop in
4144 mostly_zeros_p (it handles RANGE_EXPR in an index).
4145 It is also needed to check for missing elements. */
4146 for (elt
= CONSTRUCTOR_ELTS (exp
);
4148 elt
= TREE_CHAIN (elt
))
4150 tree index
= TREE_PURPOSE (elt
);
4151 HOST_WIDE_INT this_node_count
;
4152 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
4154 tree lo_index
= TREE_OPERAND (index
, 0);
4155 tree hi_index
= TREE_OPERAND (index
, 1);
4156 if (TREE_CODE (lo_index
) != INTEGER_CST
4157 || TREE_CODE (hi_index
) != INTEGER_CST
)
4162 this_node_count
= TREE_INT_CST_LOW (hi_index
)
4163 - TREE_INT_CST_LOW (lo_index
) + 1;
4166 this_node_count
= 1;
4167 count
+= this_node_count
;
4168 if (mostly_zeros_p (TREE_VALUE (elt
)))
4169 zero_count
+= this_node_count
;
4171 /* Clear the entire array first if there are any missing elements,
4172 or if the incidence of zero elements is >= 75%. */
4173 if (count
< maxelt
- minelt
+ 1
4174 || 4 * zero_count
>= 3 * count
)
4180 clear_storage (target
, expr_size (exp
),
4181 TYPE_ALIGN (type
) / BITS_PER_UNIT
);
4185 /* Inform later passes that the old value is dead. */
4186 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
4188 /* Store each element of the constructor into
4189 the corresponding element of TARGET, determined
4190 by counting the elements. */
4191 for (elt
= CONSTRUCTOR_ELTS (exp
), i
= 0;
4193 elt
= TREE_CHAIN (elt
), i
++)
4195 register enum machine_mode mode
;
4199 tree value
= TREE_VALUE (elt
);
4200 tree index
= TREE_PURPOSE (elt
);
4201 rtx xtarget
= target
;
4203 if (cleared
&& is_zeros_p (value
))
4206 mode
= TYPE_MODE (elttype
);
4207 bitsize
= GET_MODE_BITSIZE (mode
);
4208 unsignedp
= TREE_UNSIGNED (elttype
);
4210 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
4212 tree lo_index
= TREE_OPERAND (index
, 0);
4213 tree hi_index
= TREE_OPERAND (index
, 1);
4214 rtx index_r
, pos_rtx
, addr
, hi_r
, loop_top
, loop_end
;
4215 struct nesting
*loop
;
4216 HOST_WIDE_INT lo
, hi
, count
;
4219 /* If the range is constant and "small", unroll the loop. */
4220 if (TREE_CODE (lo_index
) == INTEGER_CST
4221 && TREE_CODE (hi_index
) == INTEGER_CST
4222 && (lo
= TREE_INT_CST_LOW (lo_index
),
4223 hi
= TREE_INT_CST_LOW (hi_index
),
4224 count
= hi
- lo
+ 1,
4225 (GET_CODE (target
) != MEM
4227 || (TREE_CODE (TYPE_SIZE (elttype
)) == INTEGER_CST
4228 && TREE_INT_CST_LOW (TYPE_SIZE (elttype
)) * count
4231 lo
-= minelt
; hi
-= minelt
;
4232 for (; lo
<= hi
; lo
++)
4234 bitpos
= lo
* TREE_INT_CST_LOW (TYPE_SIZE (elttype
));
4235 store_constructor_field (target
, bitsize
, bitpos
,
4236 mode
, value
, type
, cleared
);
4241 hi_r
= expand_expr (hi_index
, NULL_RTX
, VOIDmode
, 0);
4242 loop_top
= gen_label_rtx ();
4243 loop_end
= gen_label_rtx ();
4245 unsignedp
= TREE_UNSIGNED (domain
);
4247 index
= build_decl (VAR_DECL
, NULL_TREE
, domain
);
4249 DECL_RTL (index
) = index_r
4250 = gen_reg_rtx (promote_mode (domain
, DECL_MODE (index
),
4253 if (TREE_CODE (value
) == SAVE_EXPR
4254 && SAVE_EXPR_RTL (value
) == 0)
4256 /* Make sure value gets expanded once before the
4258 expand_expr (value
, const0_rtx
, VOIDmode
, 0);
4261 store_expr (lo_index
, index_r
, 0);
4262 loop
= expand_start_loop (0);
4264 /* Assign value to element index. */
4265 position
= size_binop (EXACT_DIV_EXPR
, TYPE_SIZE (elttype
),
4266 size_int (BITS_PER_UNIT
));
4267 position
= size_binop (MULT_EXPR
,
4268 size_binop (MINUS_EXPR
, index
,
4269 TYPE_MIN_VALUE (domain
)),
4271 pos_rtx
= expand_expr (position
, 0, VOIDmode
, 0);
4272 addr
= gen_rtx_PLUS (Pmode
, XEXP (target
, 0), pos_rtx
);
4273 xtarget
= change_address (target
, mode
, addr
);
4274 if (TREE_CODE (value
) == CONSTRUCTOR
)
4275 store_constructor (value
, xtarget
, cleared
);
4277 store_expr (value
, xtarget
, 0);
4279 expand_exit_loop_if_false (loop
,
4280 build (LT_EXPR
, integer_type_node
,
4283 expand_increment (build (PREINCREMENT_EXPR
,
4285 index
, integer_one_node
), 0, 0);
4287 emit_label (loop_end
);
4289 /* Needed by stupid register allocation. to extend the
4290 lifetime of pseudo-regs used by target past the end
4292 emit_insn (gen_rtx_USE (GET_MODE (target
), target
));
4295 else if ((index
!= 0 && TREE_CODE (index
) != INTEGER_CST
)
4296 || TREE_CODE (TYPE_SIZE (elttype
)) != INTEGER_CST
)
4302 index
= size_int (i
);
4305 index
= size_binop (MINUS_EXPR
, index
,
4306 TYPE_MIN_VALUE (domain
));
4307 position
= size_binop (EXACT_DIV_EXPR
, TYPE_SIZE (elttype
),
4308 size_int (BITS_PER_UNIT
));
4309 position
= size_binop (MULT_EXPR
, index
, position
);
4310 pos_rtx
= expand_expr (position
, 0, VOIDmode
, 0);
4311 addr
= gen_rtx_PLUS (Pmode
, XEXP (target
, 0), pos_rtx
);
4312 xtarget
= change_address (target
, mode
, addr
);
4313 store_expr (value
, xtarget
, 0);
4318 bitpos
= ((TREE_INT_CST_LOW (index
) - minelt
)
4319 * TREE_INT_CST_LOW (TYPE_SIZE (elttype
)));
4321 bitpos
= (i
* TREE_INT_CST_LOW (TYPE_SIZE (elttype
)));
4322 store_constructor_field (target
, bitsize
, bitpos
,
4323 mode
, value
, type
, cleared
);
4327 /* set constructor assignments */
4328 else if (TREE_CODE (type
) == SET_TYPE
)
4330 tree elt
= CONSTRUCTOR_ELTS (exp
);
4331 int nbytes
= int_size_in_bytes (type
), nbits
;
4332 tree domain
= TYPE_DOMAIN (type
);
4333 tree domain_min
, domain_max
, bitlength
;
4335 /* The default implementation strategy is to extract the constant
4336 parts of the constructor, use that to initialize the target,
4337 and then "or" in whatever non-constant ranges we need in addition.
4339 If a large set is all zero or all ones, it is
4340 probably better to set it using memset (if available) or bzero.
4341 Also, if a large set has just a single range, it may also be
4342 better to first clear all the first clear the set (using
4343 bzero/memset), and set the bits we want. */
4345 /* Check for all zeros. */
4346 if (elt
== NULL_TREE
)
4349 clear_storage (target
, expr_size (exp
),
4350 TYPE_ALIGN (type
) / BITS_PER_UNIT
);
4354 domain_min
= convert (sizetype
, TYPE_MIN_VALUE (domain
));
4355 domain_max
= convert (sizetype
, TYPE_MAX_VALUE (domain
));
4356 bitlength
= size_binop (PLUS_EXPR
,
4357 size_binop (MINUS_EXPR
, domain_max
, domain_min
),
4360 if (nbytes
< 0 || TREE_CODE (bitlength
) != INTEGER_CST
)
4362 nbits
= TREE_INT_CST_LOW (bitlength
);
4364 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4365 are "complicated" (more than one range), initialize (the
4366 constant parts) by copying from a constant. */
4367 if (GET_MODE (target
) != BLKmode
|| nbits
<= 2 * BITS_PER_WORD
4368 || (nbytes
<= 32 && TREE_CHAIN (elt
) != NULL_TREE
))
4370 int set_word_size
= TYPE_ALIGN (TREE_TYPE (exp
));
4371 enum machine_mode mode
= mode_for_size (set_word_size
, MODE_INT
, 1);
4372 char *bit_buffer
= (char *) alloca (nbits
);
4373 HOST_WIDE_INT word
= 0;
4376 int offset
= 0; /* In bytes from beginning of set. */
4377 elt
= get_set_constructor_bits (exp
, bit_buffer
, nbits
);
4380 if (bit_buffer
[ibit
])
4382 if (BYTES_BIG_ENDIAN
)
4383 word
|= (1 << (set_word_size
- 1 - bit_pos
));
4385 word
|= 1 << bit_pos
;
4388 if (bit_pos
>= set_word_size
|| ibit
== nbits
)
4390 if (word
!= 0 || ! cleared
)
4392 rtx datum
= GEN_INT (word
);
4394 /* The assumption here is that it is safe to use
4395 XEXP if the set is multi-word, but not if
4396 it's single-word. */
4397 if (GET_CODE (target
) == MEM
)
4399 to_rtx
= plus_constant (XEXP (target
, 0), offset
);
4400 to_rtx
= change_address (target
, mode
, to_rtx
);
4402 else if (offset
== 0)
4406 emit_move_insn (to_rtx
, datum
);
4412 offset
+= set_word_size
/ BITS_PER_UNIT
;
4418 /* Don't bother clearing storage if the set is all ones. */
4419 if (TREE_CHAIN (elt
) != NULL_TREE
4420 || (TREE_PURPOSE (elt
) == NULL_TREE
4422 : (TREE_CODE (TREE_VALUE (elt
)) != INTEGER_CST
4423 || TREE_CODE (TREE_PURPOSE (elt
)) != INTEGER_CST
4424 || (TREE_INT_CST_LOW (TREE_VALUE (elt
))
4425 - TREE_INT_CST_LOW (TREE_PURPOSE (elt
)) + 1
4427 clear_storage (target
, expr_size (exp
),
4428 TYPE_ALIGN (type
) / BITS_PER_UNIT
);
4431 for (; elt
!= NULL_TREE
; elt
= TREE_CHAIN (elt
))
4433 /* start of range of element or NULL */
4434 tree startbit
= TREE_PURPOSE (elt
);
4435 /* end of range of element, or element value */
4436 tree endbit
= TREE_VALUE (elt
);
4437 #ifdef TARGET_MEM_FUNCTIONS
4438 HOST_WIDE_INT startb
, endb
;
4440 rtx bitlength_rtx
, startbit_rtx
, endbit_rtx
, targetx
;
4442 bitlength_rtx
= expand_expr (bitlength
,
4443 NULL_RTX
, MEM
, EXPAND_CONST_ADDRESS
);
4445 /* handle non-range tuple element like [ expr ] */
4446 if (startbit
== NULL_TREE
)
4448 startbit
= save_expr (endbit
);
4451 startbit
= convert (sizetype
, startbit
);
4452 endbit
= convert (sizetype
, endbit
);
4453 if (! integer_zerop (domain_min
))
4455 startbit
= size_binop (MINUS_EXPR
, startbit
, domain_min
);
4456 endbit
= size_binop (MINUS_EXPR
, endbit
, domain_min
);
4458 startbit_rtx
= expand_expr (startbit
, NULL_RTX
, MEM
,
4459 EXPAND_CONST_ADDRESS
);
4460 endbit_rtx
= expand_expr (endbit
, NULL_RTX
, MEM
,
4461 EXPAND_CONST_ADDRESS
);
4465 targetx
= assign_stack_temp (GET_MODE (target
),
4466 GET_MODE_SIZE (GET_MODE (target
)),
4468 emit_move_insn (targetx
, target
);
4470 else if (GET_CODE (target
) == MEM
)
4475 #ifdef TARGET_MEM_FUNCTIONS
4476 /* Optimization: If startbit and endbit are
4477 constants divisible by BITS_PER_UNIT,
4478 call memset instead. */
4479 if (TREE_CODE (startbit
) == INTEGER_CST
4480 && TREE_CODE (endbit
) == INTEGER_CST
4481 && (startb
= TREE_INT_CST_LOW (startbit
)) % BITS_PER_UNIT
== 0
4482 && (endb
= TREE_INT_CST_LOW (endbit
) + 1) % BITS_PER_UNIT
== 0)
4484 emit_library_call (memset_libfunc
, 0,
4486 plus_constant (XEXP (targetx
, 0),
4487 startb
/ BITS_PER_UNIT
),
4489 constm1_rtx
, TYPE_MODE (integer_type_node
),
4490 GEN_INT ((endb
- startb
) / BITS_PER_UNIT
),
4491 TYPE_MODE (sizetype
));
4496 emit_library_call (gen_rtx_SYMBOL_REF (Pmode
, "__setbits"),
4497 0, VOIDmode
, 4, XEXP (targetx
, 0), Pmode
,
4498 bitlength_rtx
, TYPE_MODE (sizetype
),
4499 startbit_rtx
, TYPE_MODE (sizetype
),
4500 endbit_rtx
, TYPE_MODE (sizetype
));
4503 emit_move_insn (target
, targetx
);
4511 /* Store the value of EXP (an expression tree)
4512 into a subfield of TARGET which has mode MODE and occupies
4513 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4514 If MODE is VOIDmode, it means that we are storing into a bit-field.
4516 If VALUE_MODE is VOIDmode, return nothing in particular.
4517 UNSIGNEDP is not used in this case.
4519 Otherwise, return an rtx for the value stored. This rtx
4520 has mode VALUE_MODE if that is convenient to do.
4521 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4523 ALIGN is the alignment that TARGET is known to have, measured in bytes.
4524 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
4526 ALIAS_SET is the alias set for the destination. This value will
4527 (in general) be different from that for TARGET, since TARGET is a
4528 reference to the containing structure. */
4531 store_field (target
, bitsize
, bitpos
, mode
, exp
, value_mode
,
4532 unsignedp
, align
, total_size
, alias_set
)
4534 int bitsize
, bitpos
;
4535 enum machine_mode mode
;
4537 enum machine_mode value_mode
;
4543 HOST_WIDE_INT width_mask
= 0;
4545 if (TREE_CODE (exp
) == ERROR_MARK
)
4548 if (bitsize
< HOST_BITS_PER_WIDE_INT
)
4549 width_mask
= ((HOST_WIDE_INT
) 1 << bitsize
) - 1;
4551 /* If we are storing into an unaligned field of an aligned union that is
4552 in a register, we may have the mode of TARGET being an integer mode but
4553 MODE == BLKmode. In that case, get an aligned object whose size and
4554 alignment are the same as TARGET and store TARGET into it (we can avoid
4555 the store if the field being stored is the entire width of TARGET). Then
4556 call ourselves recursively to store the field into a BLKmode version of
4557 that object. Finally, load from the object into TARGET. This is not
4558 very efficient in general, but should only be slightly more expensive
4559 than the otherwise-required unaligned accesses. Perhaps this can be
4560 cleaned up later. */
4563 && (GET_CODE (target
) == REG
|| GET_CODE (target
) == SUBREG
))
4565 rtx object
= assign_stack_temp (GET_MODE (target
),
4566 GET_MODE_SIZE (GET_MODE (target
)), 0);
4567 rtx blk_object
= copy_rtx (object
);
4569 MEM_SET_IN_STRUCT_P (object
, 1);
4570 MEM_SET_IN_STRUCT_P (blk_object
, 1);
4571 PUT_MODE (blk_object
, BLKmode
);
4573 if (bitsize
!= GET_MODE_BITSIZE (GET_MODE (target
)))
4574 emit_move_insn (object
, target
);
4576 store_field (blk_object
, bitsize
, bitpos
, mode
, exp
, VOIDmode
, 0,
4577 align
, total_size
, alias_set
);
4579 /* Even though we aren't returning target, we need to
4580 give it the updated value. */
4581 emit_move_insn (target
, object
);
4586 /* If the structure is in a register or if the component
4587 is a bit field, we cannot use addressing to access it.
4588 Use bit-field techniques or SUBREG to store in it. */
4590 if (mode
== VOIDmode
4591 || (mode
!= BLKmode
&& ! direct_store
[(int) mode
]
4592 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
4593 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
)
4594 || GET_CODE (target
) == REG
4595 || GET_CODE (target
) == SUBREG
4596 /* If the field isn't aligned enough to store as an ordinary memref,
4597 store it as a bit field. */
4598 || (SLOW_UNALIGNED_ACCESS
4599 && align
* BITS_PER_UNIT
< GET_MODE_ALIGNMENT (mode
))
4600 || (SLOW_UNALIGNED_ACCESS
&& bitpos
% GET_MODE_ALIGNMENT (mode
) != 0))
4602 rtx temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
4604 /* If BITSIZE is narrower than the size of the type of EXP
4605 we will be narrowing TEMP. Normally, what's wanted are the
4606 low-order bits. However, if EXP's type is a record and this is
4607 big-endian machine, we want the upper BITSIZE bits. */
4608 if (BYTES_BIG_ENDIAN
&& GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
4609 && bitsize
< GET_MODE_BITSIZE (GET_MODE (temp
))
4610 && TREE_CODE (TREE_TYPE (exp
)) == RECORD_TYPE
)
4611 temp
= expand_shift (RSHIFT_EXPR
, GET_MODE (temp
), temp
,
4612 size_int (GET_MODE_BITSIZE (GET_MODE (temp
))
4616 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4618 if (mode
!= VOIDmode
&& mode
!= BLKmode
4619 && mode
!= TYPE_MODE (TREE_TYPE (exp
)))
4620 temp
= convert_modes (mode
, TYPE_MODE (TREE_TYPE (exp
)), temp
, 1);
4622 /* If the modes of TARGET and TEMP are both BLKmode, both
4623 must be in memory and BITPOS must be aligned on a byte
4624 boundary. If so, we simply do a block copy. */
4625 if (GET_MODE (target
) == BLKmode
&& GET_MODE (temp
) == BLKmode
)
4627 if (GET_CODE (target
) != MEM
|| GET_CODE (temp
) != MEM
4628 || bitpos
% BITS_PER_UNIT
!= 0)
4631 target
= change_address (target
, VOIDmode
,
4632 plus_constant (XEXP (target
, 0),
4633 bitpos
/ BITS_PER_UNIT
));
4635 emit_block_move (target
, temp
,
4636 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
4640 return value_mode
== VOIDmode
? const0_rtx
: target
;
4643 /* Store the value in the bitfield. */
4644 store_bit_field (target
, bitsize
, bitpos
, mode
, temp
, align
, total_size
);
4645 if (value_mode
!= VOIDmode
)
4647 /* The caller wants an rtx for the value. */
4648 /* If possible, avoid refetching from the bitfield itself. */
4650 && ! (GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
)))
4653 enum machine_mode tmode
;
4656 return expand_and (temp
, GEN_INT (width_mask
), NULL_RTX
);
4657 tmode
= GET_MODE (temp
);
4658 if (tmode
== VOIDmode
)
4660 count
= build_int_2 (GET_MODE_BITSIZE (tmode
) - bitsize
, 0);
4661 temp
= expand_shift (LSHIFT_EXPR
, tmode
, temp
, count
, 0, 0);
4662 return expand_shift (RSHIFT_EXPR
, tmode
, temp
, count
, 0, 0);
4664 return extract_bit_field (target
, bitsize
, bitpos
, unsignedp
,
4665 NULL_RTX
, value_mode
, 0, align
,
4672 rtx addr
= XEXP (target
, 0);
4675 /* If a value is wanted, it must be the lhs;
4676 so make the address stable for multiple use. */
4678 if (value_mode
!= VOIDmode
&& GET_CODE (addr
) != REG
4679 && ! CONSTANT_ADDRESS_P (addr
)
4680 /* A frame-pointer reference is already stable. */
4681 && ! (GET_CODE (addr
) == PLUS
4682 && GET_CODE (XEXP (addr
, 1)) == CONST_INT
4683 && (XEXP (addr
, 0) == virtual_incoming_args_rtx
4684 || XEXP (addr
, 0) == virtual_stack_vars_rtx
)))
4685 addr
= copy_to_reg (addr
);
4687 /* Now build a reference to just the desired component. */
4689 to_rtx
= copy_rtx (change_address (target
, mode
,
4690 plus_constant (addr
,
4692 / BITS_PER_UNIT
))));
4693 MEM_SET_IN_STRUCT_P (to_rtx
, 1);
4694 MEM_ALIAS_SET (to_rtx
) = alias_set
;
4696 return store_expr (exp
, to_rtx
, value_mode
!= VOIDmode
);
4700 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4701 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
4702 ARRAY_REFs and find the ultimate containing object, which we return.
4704 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4705 bit position, and *PUNSIGNEDP to the signedness of the field.
4706 If the position of the field is variable, we store a tree
4707 giving the variable offset (in units) in *POFFSET.
4708 This offset is in addition to the bit position.
4709 If the position is not variable, we store 0 in *POFFSET.
4710 We set *PALIGNMENT to the alignment in bytes of the address that will be
4711 computed. This is the alignment of the thing we return if *POFFSET
4712 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
4714 If any of the extraction expressions is volatile,
4715 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4717 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4718 is a mode that can be used to access the field. In that case, *PBITSIZE
4721 If the field describes a variable-sized object, *PMODE is set to
4722 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
4723 this case, but the address of the object can be found. */
4726 get_inner_reference (exp
, pbitsize
, pbitpos
, poffset
, pmode
,
4727 punsignedp
, pvolatilep
, palignment
)
4732 enum machine_mode
*pmode
;
4737 tree orig_exp
= exp
;
4739 enum machine_mode mode
= VOIDmode
;
4740 tree offset
= integer_zero_node
;
4741 unsigned int alignment
= BIGGEST_ALIGNMENT
;
4743 if (TREE_CODE (exp
) == COMPONENT_REF
)
4745 size_tree
= DECL_SIZE (TREE_OPERAND (exp
, 1));
4746 if (! DECL_BIT_FIELD (TREE_OPERAND (exp
, 1)))
4747 mode
= DECL_MODE (TREE_OPERAND (exp
, 1));
4748 *punsignedp
= TREE_UNSIGNED (TREE_OPERAND (exp
, 1));
4750 else if (TREE_CODE (exp
) == BIT_FIELD_REF
)
4752 size_tree
= TREE_OPERAND (exp
, 1);
4753 *punsignedp
= TREE_UNSIGNED (exp
);
4757 mode
= TYPE_MODE (TREE_TYPE (exp
));
4758 if (mode
== BLKmode
)
4759 size_tree
= TYPE_SIZE (TREE_TYPE (exp
));
4761 *pbitsize
= GET_MODE_BITSIZE (mode
);
4762 *punsignedp
= TREE_UNSIGNED (TREE_TYPE (exp
));
4767 if (TREE_CODE (size_tree
) != INTEGER_CST
)
4768 mode
= BLKmode
, *pbitsize
= -1;
4770 *pbitsize
= TREE_INT_CST_LOW (size_tree
);
4773 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4774 and find the ultimate containing object. */
4780 if (TREE_CODE (exp
) == COMPONENT_REF
|| TREE_CODE (exp
) == BIT_FIELD_REF
)
4782 tree pos
= (TREE_CODE (exp
) == COMPONENT_REF
4783 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp
, 1))
4784 : TREE_OPERAND (exp
, 2));
4785 tree constant
= integer_zero_node
, var
= pos
;
4787 /* If this field hasn't been filled in yet, don't go
4788 past it. This should only happen when folding expressions
4789 made during type construction. */
4793 /* Assume here that the offset is a multiple of a unit.
4794 If not, there should be an explicitly added constant. */
4795 if (TREE_CODE (pos
) == PLUS_EXPR
4796 && TREE_CODE (TREE_OPERAND (pos
, 1)) == INTEGER_CST
)
4797 constant
= TREE_OPERAND (pos
, 1), var
= TREE_OPERAND (pos
, 0);
4798 else if (TREE_CODE (pos
) == INTEGER_CST
)
4799 constant
= pos
, var
= integer_zero_node
;
4801 *pbitpos
+= TREE_INT_CST_LOW (constant
);
4802 offset
= size_binop (PLUS_EXPR
, offset
,
4803 size_binop (EXACT_DIV_EXPR
, var
,
4804 size_int (BITS_PER_UNIT
)));
4807 else if (TREE_CODE (exp
) == ARRAY_REF
)
4809 /* This code is based on the code in case ARRAY_REF in expand_expr
4810 below. We assume here that the size of an array element is
4811 always an integral multiple of BITS_PER_UNIT. */
4813 tree index
= TREE_OPERAND (exp
, 1);
4814 tree domain
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp
, 0)));
4816 = domain
? TYPE_MIN_VALUE (domain
) : integer_zero_node
;
4817 tree index_type
= TREE_TYPE (index
);
4820 if (TYPE_PRECISION (index_type
) != TYPE_PRECISION (sizetype
))
4822 index
= convert (type_for_size (TYPE_PRECISION (sizetype
), 0),
4824 index_type
= TREE_TYPE (index
);
4827 /* Optimize the special-case of a zero lower bound.
4829 We convert the low_bound to sizetype to avoid some problems
4830 with constant folding. (E.g. suppose the lower bound is 1,
4831 and its mode is QI. Without the conversion, (ARRAY
4832 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
4833 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
4835 But sizetype isn't quite right either (especially if
4836 the lowbound is negative). FIXME */
4838 if (! integer_zerop (low_bound
))
4839 index
= fold (build (MINUS_EXPR
, index_type
, index
,
4840 convert (sizetype
, low_bound
)));
4842 if (TREE_CODE (index
) == INTEGER_CST
)
4844 index
= convert (sbitsizetype
, index
);
4845 index_type
= TREE_TYPE (index
);
4848 xindex
= fold (build (MULT_EXPR
, sbitsizetype
, index
,
4849 convert (sbitsizetype
,
4850 TYPE_SIZE (TREE_TYPE (exp
)))));
4852 if (TREE_CODE (xindex
) == INTEGER_CST
4853 && TREE_INT_CST_HIGH (xindex
) == 0)
4854 *pbitpos
+= TREE_INT_CST_LOW (xindex
);
4857 /* Either the bit offset calculated above is not constant, or
4858 it overflowed. In either case, redo the multiplication
4859 against the size in units. This is especially important
4860 in the non-constant case to avoid a division at runtime. */
4861 xindex
= fold (build (MULT_EXPR
, ssizetype
, index
,
4863 TYPE_SIZE_UNIT (TREE_TYPE (exp
)))));
4865 if (contains_placeholder_p (xindex
))
4866 xindex
= build (WITH_RECORD_EXPR
, sizetype
, xindex
, exp
);
4868 offset
= size_binop (PLUS_EXPR
, offset
, xindex
);
4871 else if (TREE_CODE (exp
) != NON_LVALUE_EXPR
4872 && ! ((TREE_CODE (exp
) == NOP_EXPR
4873 || TREE_CODE (exp
) == CONVERT_EXPR
)
4874 && ! (TREE_CODE (TREE_TYPE (exp
)) == UNION_TYPE
4875 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))
4877 && (TYPE_MODE (TREE_TYPE (exp
))
4878 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))))
4881 /* If any reference in the chain is volatile, the effect is volatile. */
4882 if (TREE_THIS_VOLATILE (exp
))
4885 /* If the offset is non-constant already, then we can't assume any
4886 alignment more than the alignment here. */
4887 if (! integer_zerop (offset
))
4888 alignment
= MIN (alignment
, TYPE_ALIGN (TREE_TYPE (exp
)));
4890 exp
= TREE_OPERAND (exp
, 0);
4893 if (TREE_CODE_CLASS (TREE_CODE (exp
)) == 'd')
4894 alignment
= MIN (alignment
, DECL_ALIGN (exp
));
4895 else if (TREE_TYPE (exp
) != 0)
4896 alignment
= MIN (alignment
, TYPE_ALIGN (TREE_TYPE (exp
)));
4898 if (integer_zerop (offset
))
4901 if (offset
!= 0 && contains_placeholder_p (offset
))
4902 offset
= build (WITH_RECORD_EXPR
, sizetype
, offset
, orig_exp
);
4906 *palignment
= alignment
/ BITS_PER_UNIT
;
4910 /* Subroutine of expand_exp: compute memory_usage from modifier. */
4911 static enum memory_use_mode
4912 get_memory_usage_from_modifier (modifier
)
4913 enum expand_modifier modifier
;
4919 return MEMORY_USE_RO
;
4921 case EXPAND_MEMORY_USE_WO
:
4922 return MEMORY_USE_WO
;
4924 case EXPAND_MEMORY_USE_RW
:
4925 return MEMORY_USE_RW
;
4927 case EXPAND_MEMORY_USE_DONT
:
4928 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
4929 MEMORY_USE_DONT, because they are modifiers to a call of
4930 expand_expr in the ADDR_EXPR case of expand_expr. */
4931 case EXPAND_CONST_ADDRESS
:
4932 case EXPAND_INITIALIZER
:
4933 return MEMORY_USE_DONT
;
4934 case EXPAND_MEMORY_USE_BAD
:
4940 /* Given an rtx VALUE that may contain additions and multiplications,
4941 return an equivalent value that just refers to a register or memory.
4942 This is done by generating instructions to perform the arithmetic
4943 and returning a pseudo-register containing the value.
4945 The returned value may be a REG, SUBREG, MEM or constant. */
4948 force_operand (value
, target
)
4951 register optab binoptab
= 0;
4952 /* Use a temporary to force order of execution of calls to
4956 /* Use subtarget as the target for operand 0 of a binary operation. */
4957 register rtx subtarget
= (target
!= 0 && GET_CODE (target
) == REG
? target
: 0);
4959 /* Check for a PIC address load. */
4961 && (GET_CODE (value
) == PLUS
|| GET_CODE (value
) == MINUS
)
4962 && XEXP (value
, 0) == pic_offset_table_rtx
4963 && (GET_CODE (XEXP (value
, 1)) == SYMBOL_REF
4964 || GET_CODE (XEXP (value
, 1)) == LABEL_REF
4965 || GET_CODE (XEXP (value
, 1)) == CONST
))
4968 subtarget
= gen_reg_rtx (GET_MODE (value
));
4969 emit_move_insn (subtarget
, value
);
4973 if (GET_CODE (value
) == PLUS
)
4974 binoptab
= add_optab
;
4975 else if (GET_CODE (value
) == MINUS
)
4976 binoptab
= sub_optab
;
4977 else if (GET_CODE (value
) == MULT
)
4979 op2
= XEXP (value
, 1);
4980 if (!CONSTANT_P (op2
)
4981 && !(GET_CODE (op2
) == REG
&& op2
!= subtarget
))
4983 tmp
= force_operand (XEXP (value
, 0), subtarget
);
4984 return expand_mult (GET_MODE (value
), tmp
,
4985 force_operand (op2
, NULL_RTX
),
4991 op2
= XEXP (value
, 1);
4992 if (!CONSTANT_P (op2
)
4993 && !(GET_CODE (op2
) == REG
&& op2
!= subtarget
))
4995 if (binoptab
== sub_optab
&& GET_CODE (op2
) == CONST_INT
)
4997 binoptab
= add_optab
;
4998 op2
= negate_rtx (GET_MODE (value
), op2
);
5001 /* Check for an addition with OP2 a constant integer and our first
5002 operand a PLUS of a virtual register and something else. In that
5003 case, we want to emit the sum of the virtual register and the
5004 constant first and then add the other value. This allows virtual
5005 register instantiation to simply modify the constant rather than
5006 creating another one around this addition. */
5007 if (binoptab
== add_optab
&& GET_CODE (op2
) == CONST_INT
5008 && GET_CODE (XEXP (value
, 0)) == PLUS
5009 && GET_CODE (XEXP (XEXP (value
, 0), 0)) == REG
5010 && REGNO (XEXP (XEXP (value
, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5011 && REGNO (XEXP (XEXP (value
, 0), 0)) <= LAST_VIRTUAL_REGISTER
)
5013 rtx temp
= expand_binop (GET_MODE (value
), binoptab
,
5014 XEXP (XEXP (value
, 0), 0), op2
,
5015 subtarget
, 0, OPTAB_LIB_WIDEN
);
5016 return expand_binop (GET_MODE (value
), binoptab
, temp
,
5017 force_operand (XEXP (XEXP (value
, 0), 1), 0),
5018 target
, 0, OPTAB_LIB_WIDEN
);
5021 tmp
= force_operand (XEXP (value
, 0), subtarget
);
5022 return expand_binop (GET_MODE (value
), binoptab
, tmp
,
5023 force_operand (op2
, NULL_RTX
),
5024 target
, 0, OPTAB_LIB_WIDEN
);
5025 /* We give UNSIGNEDP = 0 to expand_binop
5026 because the only operations we are expanding here are signed ones. */
5031 /* Subroutine of expand_expr:
5032 save the non-copied parts (LIST) of an expr (LHS), and return a list
5033 which can restore these values to their previous values,
5034 should something modify their storage. */
5037 save_noncopied_parts (lhs
, list
)
5044 for (tail
= list
; tail
; tail
= TREE_CHAIN (tail
))
5045 if (TREE_CODE (TREE_VALUE (tail
)) == TREE_LIST
)
5046 parts
= chainon (parts
, save_noncopied_parts (lhs
, TREE_VALUE (tail
)));
5049 tree part
= TREE_VALUE (tail
);
5050 tree part_type
= TREE_TYPE (part
);
5051 tree to_be_saved
= build (COMPONENT_REF
, part_type
, lhs
, part
);
5052 rtx target
= assign_temp (part_type
, 0, 1, 1);
5053 if (! memory_address_p (TYPE_MODE (part_type
), XEXP (target
, 0)))
5054 target
= change_address (target
, TYPE_MODE (part_type
), NULL_RTX
);
5055 parts
= tree_cons (to_be_saved
,
5056 build (RTL_EXPR
, part_type
, NULL_TREE
,
5059 store_expr (TREE_PURPOSE (parts
), RTL_EXPR_RTL (TREE_VALUE (parts
)), 0);
5064 /* Subroutine of expand_expr:
5065 record the non-copied parts (LIST) of an expr (LHS), and return a list
5066 which specifies the initial values of these parts. */
5069 init_noncopied_parts (lhs
, list
)
5076 for (tail
= list
; tail
; tail
= TREE_CHAIN (tail
))
5077 if (TREE_CODE (TREE_VALUE (tail
)) == TREE_LIST
)
5078 parts
= chainon (parts
, init_noncopied_parts (lhs
, TREE_VALUE (tail
)));
5081 tree part
= TREE_VALUE (tail
);
5082 tree part_type
= TREE_TYPE (part
);
5083 tree to_be_initialized
= build (COMPONENT_REF
, part_type
, lhs
, part
);
5084 parts
= tree_cons (TREE_PURPOSE (tail
), to_be_initialized
, parts
);
5089 /* Subroutine of expand_expr: return nonzero iff there is no way that
5090 EXP can reference X, which is being modified. TOP_P is nonzero if this
5091 call is going to be used to determine whether we need a temporary
5092 for EXP, as opposed to a recursive call to this function.
5094 It is always safe for this routine to return zero since it merely
5095 searches for optimization opportunities. */
5098 safe_from_p (x
, exp
, top_p
)
5105 static int save_expr_count
;
5106 static int save_expr_size
= 0;
5107 static tree
*save_expr_rewritten
;
5108 static tree save_expr_trees
[256];
5111 /* If EXP has varying size, we MUST use a target since we currently
5112 have no way of allocating temporaries of variable size
5113 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5114 So we assume here that something at a higher level has prevented a
5115 clash. This is somewhat bogus, but the best we can do. Only
5116 do this when X is BLKmode and when we are at the top level. */
5117 || (top_p
&& TREE_TYPE (exp
) != 0 && TYPE_SIZE (TREE_TYPE (exp
)) != 0
5118 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
5119 && (TREE_CODE (TREE_TYPE (exp
)) != ARRAY_TYPE
5120 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)) == NULL_TREE
5121 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)))
5123 && GET_MODE (x
) == BLKmode
))
5126 if (top_p
&& save_expr_size
== 0)
5130 save_expr_count
= 0;
5131 save_expr_size
= sizeof (save_expr_trees
) / sizeof (save_expr_trees
[0]);
5132 save_expr_rewritten
= &save_expr_trees
[0];
5134 rtn
= safe_from_p (x
, exp
, 1);
5136 for (i
= 0; i
< save_expr_count
; ++i
)
5138 if (TREE_CODE (save_expr_trees
[i
]) != ERROR_MARK
)
5140 TREE_SET_CODE (save_expr_trees
[i
], SAVE_EXPR
);
5148 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5149 find the underlying pseudo. */
5150 if (GET_CODE (x
) == SUBREG
)
5153 if (GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
5157 /* If X is a location in the outgoing argument area, it is always safe. */
5158 if (GET_CODE (x
) == MEM
5159 && (XEXP (x
, 0) == virtual_outgoing_args_rtx
5160 || (GET_CODE (XEXP (x
, 0)) == PLUS
5161 && XEXP (XEXP (x
, 0), 0) == virtual_outgoing_args_rtx
)))
5164 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
5167 exp_rtl
= DECL_RTL (exp
);
5174 if (TREE_CODE (exp
) == TREE_LIST
)
5175 return ((TREE_VALUE (exp
) == 0
5176 || safe_from_p (x
, TREE_VALUE (exp
), 0))
5177 && (TREE_CHAIN (exp
) == 0
5178 || safe_from_p (x
, TREE_CHAIN (exp
), 0)));
5179 else if (TREE_CODE (exp
) == ERROR_MARK
)
5180 return 1; /* An already-visited SAVE_EXPR? */
5185 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
5189 return (safe_from_p (x
, TREE_OPERAND (exp
, 0), 0)
5190 && safe_from_p (x
, TREE_OPERAND (exp
, 1), 0));
5194 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5195 the expression. If it is set, we conflict iff we are that rtx or
5196 both are in memory. Otherwise, we check all operands of the
5197 expression recursively. */
5199 switch (TREE_CODE (exp
))
5202 return (staticp (TREE_OPERAND (exp
, 0))
5203 || safe_from_p (x
, TREE_OPERAND (exp
, 0), 0)
5204 || TREE_STATIC (exp
));
5207 if (GET_CODE (x
) == MEM
)
5212 exp_rtl
= CALL_EXPR_RTL (exp
);
5215 /* Assume that the call will clobber all hard registers and
5217 if ((GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
5218 || GET_CODE (x
) == MEM
)
5225 /* If a sequence exists, we would have to scan every instruction
5226 in the sequence to see if it was safe. This is probably not
5228 if (RTL_EXPR_SEQUENCE (exp
))
5231 exp_rtl
= RTL_EXPR_RTL (exp
);
5234 case WITH_CLEANUP_EXPR
:
5235 exp_rtl
= RTL_EXPR_RTL (exp
);
5238 case CLEANUP_POINT_EXPR
:
5239 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
5242 exp_rtl
= SAVE_EXPR_RTL (exp
);
5246 /* This SAVE_EXPR might appear many times in the top-level
5247 safe_from_p() expression, and if it has a complex
5248 subexpression, examining it multiple times could result
5249 in a combinatorial explosion. E.g. on an Alpha
5250 running at least 200MHz, a Fortran test case compiled with
5251 optimization took about 28 minutes to compile -- even though
5252 it was only a few lines long, and the complicated line causing
5253 so much time to be spent in the earlier version of safe_from_p()
5254 had only 293 or so unique nodes.
5256 So, turn this SAVE_EXPR into an ERROR_MARK for now, but remember
5257 where it is so we can turn it back in the top-level safe_from_p()
5260 /* For now, don't bother re-sizing the array. */
5261 if (save_expr_count
>= save_expr_size
)
5263 save_expr_rewritten
[save_expr_count
++] = exp
;
5265 nops
= tree_code_length
[(int) SAVE_EXPR
];
5266 for (i
= 0; i
< nops
; i
++)
5268 tree operand
= TREE_OPERAND (exp
, i
);
5269 if (operand
== NULL_TREE
)
5271 TREE_SET_CODE (exp
, ERROR_MARK
);
5272 if (!safe_from_p (x
, operand
, 0))
5274 TREE_SET_CODE (exp
, SAVE_EXPR
);
5276 TREE_SET_CODE (exp
, ERROR_MARK
);
5280 /* The only operand we look at is operand 1. The rest aren't
5281 part of the expression. */
5282 return safe_from_p (x
, TREE_OPERAND (exp
, 1), 0);
5284 case METHOD_CALL_EXPR
:
5285 /* This takes a rtx argument, but shouldn't appear here. */
5292 /* If we have an rtx, we do not need to scan our operands. */
5296 nops
= tree_code_length
[(int) TREE_CODE (exp
)];
5297 for (i
= 0; i
< nops
; i
++)
5298 if (TREE_OPERAND (exp
, i
) != 0
5299 && ! safe_from_p (x
, TREE_OPERAND (exp
, i
), 0))
5303 /* If we have an rtl, find any enclosed object. Then see if we conflict
5307 if (GET_CODE (exp_rtl
) == SUBREG
)
5309 exp_rtl
= SUBREG_REG (exp_rtl
);
5310 if (GET_CODE (exp_rtl
) == REG
5311 && REGNO (exp_rtl
) < FIRST_PSEUDO_REGISTER
)
5315 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5316 are memory and EXP is not readonly. */
5317 return ! (rtx_equal_p (x
, exp_rtl
)
5318 || (GET_CODE (x
) == MEM
&& GET_CODE (exp_rtl
) == MEM
5319 && ! TREE_READONLY (exp
)));
5322 /* If we reach here, it is safe. */
5326 /* Subroutine of expand_expr: return nonzero iff EXP is an
5327 expression whose type is statically determinable. */
5333 if (TREE_CODE (exp
) == PARM_DECL
5334 || TREE_CODE (exp
) == VAR_DECL
5335 || TREE_CODE (exp
) == CALL_EXPR
|| TREE_CODE (exp
) == TARGET_EXPR
5336 || TREE_CODE (exp
) == COMPONENT_REF
5337 || TREE_CODE (exp
) == ARRAY_REF
)
5342 /* Subroutine of expand_expr: return rtx if EXP is a
5343 variable or parameter; else return 0. */
5350 switch (TREE_CODE (exp
))
5354 return DECL_RTL (exp
);
5360 #ifdef MAX_INTEGER_COMPUTATION_MODE
5362 check_max_integer_computation_mode (exp
)
5365 enum tree_code code
= TREE_CODE (exp
);
5366 enum machine_mode mode
;
5368 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5369 if (code
== NOP_EXPR
5370 && TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
)
5373 /* First check the type of the overall operation. We need only look at
5374 unary, binary and relational operations. */
5375 if (TREE_CODE_CLASS (code
) == '1'
5376 || TREE_CODE_CLASS (code
) == '2'
5377 || TREE_CODE_CLASS (code
) == '<')
5379 mode
= TYPE_MODE (TREE_TYPE (exp
));
5380 if (GET_MODE_CLASS (mode
) == MODE_INT
5381 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
5382 fatal ("unsupported wide integer operation");
5385 /* Check operand of a unary op. */
5386 if (TREE_CODE_CLASS (code
) == '1')
5388 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
5389 if (GET_MODE_CLASS (mode
) == MODE_INT
5390 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
5391 fatal ("unsupported wide integer operation");
5394 /* Check operands of a binary/comparison op. */
5395 if (TREE_CODE_CLASS (code
) == '2' || TREE_CODE_CLASS (code
) == '<')
5397 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
5398 if (GET_MODE_CLASS (mode
) == MODE_INT
5399 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
5400 fatal ("unsupported wide integer operation");
5402 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1)));
5403 if (GET_MODE_CLASS (mode
) == MODE_INT
5404 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
5405 fatal ("unsupported wide integer operation");
5411 /* expand_expr: generate code for computing expression EXP.
5412 An rtx for the computed value is returned. The value is never null.
5413 In the case of a void EXP, const0_rtx is returned.
5415 The value may be stored in TARGET if TARGET is nonzero.
5416 TARGET is just a suggestion; callers must assume that
5417 the rtx returned may not be the same as TARGET.
5419 If TARGET is CONST0_RTX, it means that the value will be ignored.
5421 If TMODE is not VOIDmode, it suggests generating the
5422 result in mode TMODE. But this is done only when convenient.
5423 Otherwise, TMODE is ignored and the value generated in its natural mode.
5424 TMODE is just a suggestion; callers must assume that
5425 the rtx returned may not have mode TMODE.
5427 Note that TARGET may have neither TMODE nor MODE. In that case, it
5428 probably will not be used.
5430 If MODIFIER is EXPAND_SUM then when EXP is an addition
5431 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5432 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5433 products as above, or REG or MEM, or constant.
5434 Ordinarily in such cases we would output mul or add instructions
5435 and then return a pseudo reg containing the sum.
5437 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5438 it also marks a label as absolutely required (it can't be dead).
5439 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
5440 This is used for outputting expressions used in initializers.
5442 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5443 with a constant address even if that address is not normally legitimate.
5444 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
5447 expand_expr (exp
, target
, tmode
, modifier
)
5450 enum machine_mode tmode
;
5451 enum expand_modifier modifier
;
5453 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
5454 This is static so it will be accessible to our recursive callees. */
5455 static tree placeholder_list
= 0;
5456 register rtx op0
, op1
, temp
;
5457 tree type
= TREE_TYPE (exp
);
5458 int unsignedp
= TREE_UNSIGNED (type
);
5459 register enum machine_mode mode
;
5460 register enum tree_code code
= TREE_CODE (exp
);
5462 rtx subtarget
, original_target
;
5465 /* Used by check-memory-usage to make modifier read only. */
5466 enum expand_modifier ro_modifier
;
5468 /* Handle ERROR_MARK before anybody tries to access its type. */
5469 if (TREE_CODE (exp
) == ERROR_MARK
)
5471 op0
= CONST0_RTX (tmode
);
5477 mode
= TYPE_MODE (type
);
5478 /* Use subtarget as the target for operand 0 of a binary operation. */
5479 subtarget
= (target
!= 0 && GET_CODE (target
) == REG
? target
: 0);
5480 original_target
= target
;
5481 ignore
= (target
== const0_rtx
5482 || ((code
== NON_LVALUE_EXPR
|| code
== NOP_EXPR
5483 || code
== CONVERT_EXPR
|| code
== REFERENCE_EXPR
5484 || code
== COND_EXPR
)
5485 && TREE_CODE (type
) == VOID_TYPE
));
5487 /* Make a read-only version of the modifier. */
5488 if (modifier
== EXPAND_NORMAL
|| modifier
== EXPAND_SUM
5489 || modifier
== EXPAND_CONST_ADDRESS
|| modifier
== EXPAND_INITIALIZER
)
5490 ro_modifier
= modifier
;
5492 ro_modifier
= EXPAND_NORMAL
;
5494 /* Don't use hard regs as subtargets, because the combiner
5495 can only handle pseudo regs. */
5496 if (subtarget
&& REGNO (subtarget
) < FIRST_PSEUDO_REGISTER
)
5498 /* Avoid subtargets inside loops,
5499 since they hide some invariant expressions. */
5500 if (preserve_subexpressions_p ())
5503 /* If we are going to ignore this result, we need only do something
5504 if there is a side-effect somewhere in the expression. If there
5505 is, short-circuit the most common cases here. Note that we must
5506 not call expand_expr with anything but const0_rtx in case this
5507 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
5511 if (! TREE_SIDE_EFFECTS (exp
))
5514 /* Ensure we reference a volatile object even if value is ignored. */
5515 if (TREE_THIS_VOLATILE (exp
)
5516 && TREE_CODE (exp
) != FUNCTION_DECL
5517 && mode
!= VOIDmode
&& mode
!= BLKmode
)
5519 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, ro_modifier
);
5520 if (GET_CODE (temp
) == MEM
)
5521 temp
= copy_to_reg (temp
);
5525 if (TREE_CODE_CLASS (code
) == '1')
5526 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
,
5527 VOIDmode
, ro_modifier
);
5528 else if (TREE_CODE_CLASS (code
) == '2'
5529 || TREE_CODE_CLASS (code
) == '<')
5531 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, ro_modifier
);
5532 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
, ro_modifier
);
5535 else if ((code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
)
5536 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 1)))
5537 /* If the second operand has no side effects, just evaluate
5539 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
,
5540 VOIDmode
, ro_modifier
);
5545 #ifdef MAX_INTEGER_COMPUTATION_MODE
5547 && TREE_CODE (exp
) != INTEGER_CST
5548 && TREE_CODE (exp
) != PARM_DECL
5549 && TREE_CODE (exp
) != ARRAY_REF
5550 && TREE_CODE (exp
) != COMPONENT_REF
5551 && TREE_CODE (exp
) != BIT_FIELD_REF
5552 && TREE_CODE (exp
) != INDIRECT_REF
5553 && TREE_CODE (exp
) != CALL_EXPR
5554 && TREE_CODE (exp
) != VAR_DECL
)
5556 enum machine_mode mode
= GET_MODE (target
);
5558 if (GET_MODE_CLASS (mode
) == MODE_INT
5559 && mode
> MAX_INTEGER_COMPUTATION_MODE
)
5560 fatal ("unsupported wide integer operation");
5563 if (TREE_CODE (exp
) != INTEGER_CST
5564 && TREE_CODE (exp
) != PARM_DECL
5565 && TREE_CODE (exp
) != ARRAY_REF
5566 && TREE_CODE (exp
) != COMPONENT_REF
5567 && TREE_CODE (exp
) != BIT_FIELD_REF
5568 && TREE_CODE (exp
) != INDIRECT_REF
5569 && TREE_CODE (exp
) != VAR_DECL
5570 && TREE_CODE (exp
) != CALL_EXPR
5571 && GET_MODE_CLASS (tmode
) == MODE_INT
5572 && tmode
> MAX_INTEGER_COMPUTATION_MODE
)
5573 fatal ("unsupported wide integer operation");
5575 check_max_integer_computation_mode (exp
);
5578 /* If will do cse, generate all results into pseudo registers
5579 since 1) that allows cse to find more things
5580 and 2) otherwise cse could produce an insn the machine
5583 if (! cse_not_expected
&& mode
!= BLKmode
&& target
5584 && (GET_CODE (target
) != REG
|| REGNO (target
) < FIRST_PSEUDO_REGISTER
))
5591 tree function
= decl_function_context (exp
);
5592 /* Handle using a label in a containing function. */
5593 if (function
!= current_function_decl
5594 && function
!= inline_function_decl
&& function
!= 0)
5596 struct function
*p
= find_function_data (function
);
5597 /* Allocate in the memory associated with the function
5598 that the label is in. */
5599 push_obstacks (p
->function_obstack
,
5600 p
->function_maybepermanent_obstack
);
5602 p
->forced_labels
= gen_rtx_EXPR_LIST (VOIDmode
,
5609 if (modifier
== EXPAND_INITIALIZER
)
5610 forced_labels
= gen_rtx_EXPR_LIST (VOIDmode
,
5614 temp
= gen_rtx_MEM (FUNCTION_MODE
,
5615 gen_rtx_LABEL_REF (Pmode
, label_rtx (exp
)));
5616 if (function
!= current_function_decl
5617 && function
!= inline_function_decl
&& function
!= 0)
5618 LABEL_REF_NONLOCAL_P (XEXP (temp
, 0)) = 1;
5623 if (DECL_RTL (exp
) == 0)
5625 error_with_decl (exp
, "prior parameter's size depends on `%s'");
5626 return CONST0_RTX (mode
);
5629 /* ... fall through ... */
5632 /* If a static var's type was incomplete when the decl was written,
5633 but the type is complete now, lay out the decl now. */
5634 if (DECL_SIZE (exp
) == 0 && TYPE_SIZE (TREE_TYPE (exp
)) != 0
5635 && (TREE_STATIC (exp
) || DECL_EXTERNAL (exp
)))
5637 push_obstacks_nochange ();
5638 end_temporary_allocation ();
5639 layout_decl (exp
, 0);
5640 PUT_MODE (DECL_RTL (exp
), DECL_MODE (exp
));
5644 /* Although static-storage variables start off initialized, according to
5645 ANSI C, a memcpy could overwrite them with uninitialized values. So
5646 we check them too. This also lets us check for read-only variables
5647 accessed via a non-const declaration, in case it won't be detected
5648 any other way (e.g., in an embedded system or OS kernel without
5651 Aggregates are not checked here; they're handled elsewhere. */
5652 if (current_function_check_memory_usage
&& code
== VAR_DECL
5653 && GET_CODE (DECL_RTL (exp
)) == MEM
5654 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp
)))
5656 enum memory_use_mode memory_usage
;
5657 memory_usage
= get_memory_usage_from_modifier (modifier
);
5659 if (memory_usage
!= MEMORY_USE_DONT
)
5660 emit_library_call (chkr_check_addr_libfunc
, 1, VOIDmode
, 3,
5661 XEXP (DECL_RTL (exp
), 0), ptr_mode
,
5662 GEN_INT (int_size_in_bytes (type
)),
5663 TYPE_MODE (sizetype
),
5664 GEN_INT (memory_usage
),
5665 TYPE_MODE (integer_type_node
));
5668 /* ... fall through ... */
5672 if (DECL_RTL (exp
) == 0)
5675 /* Ensure variable marked as used even if it doesn't go through
5676 a parser. If it hasn't be used yet, write out an external
5678 if (! TREE_USED (exp
))
5680 assemble_external (exp
);
5681 TREE_USED (exp
) = 1;
5684 /* Show we haven't gotten RTL for this yet. */
5687 /* Handle variables inherited from containing functions. */
5688 context
= decl_function_context (exp
);
5690 /* We treat inline_function_decl as an alias for the current function
5691 because that is the inline function whose vars, types, etc.
5692 are being merged into the current function.
5693 See expand_inline_function. */
5695 if (context
!= 0 && context
!= current_function_decl
5696 && context
!= inline_function_decl
5697 /* If var is static, we don't need a static chain to access it. */
5698 && ! (GET_CODE (DECL_RTL (exp
)) == MEM
5699 && CONSTANT_P (XEXP (DECL_RTL (exp
), 0))))
5703 /* Mark as non-local and addressable. */
5704 DECL_NONLOCAL (exp
) = 1;
5705 if (DECL_NO_STATIC_CHAIN (current_function_decl
))
5707 mark_addressable (exp
);
5708 if (GET_CODE (DECL_RTL (exp
)) != MEM
)
5710 addr
= XEXP (DECL_RTL (exp
), 0);
5711 if (GET_CODE (addr
) == MEM
)
5712 addr
= gen_rtx_MEM (Pmode
,
5713 fix_lexical_addr (XEXP (addr
, 0), exp
));
5715 addr
= fix_lexical_addr (addr
, exp
);
5716 temp
= change_address (DECL_RTL (exp
), mode
, addr
);
5719 /* This is the case of an array whose size is to be determined
5720 from its initializer, while the initializer is still being parsed.
5723 else if (GET_CODE (DECL_RTL (exp
)) == MEM
5724 && GET_CODE (XEXP (DECL_RTL (exp
), 0)) == REG
)
5725 temp
= change_address (DECL_RTL (exp
), GET_MODE (DECL_RTL (exp
)),
5726 XEXP (DECL_RTL (exp
), 0));
5728 /* If DECL_RTL is memory, we are in the normal case and either
5729 the address is not valid or it is not a register and -fforce-addr
5730 is specified, get the address into a register. */
5732 else if (GET_CODE (DECL_RTL (exp
)) == MEM
5733 && modifier
!= EXPAND_CONST_ADDRESS
5734 && modifier
!= EXPAND_SUM
5735 && modifier
!= EXPAND_INITIALIZER
5736 && (! memory_address_p (DECL_MODE (exp
),
5737 XEXP (DECL_RTL (exp
), 0))
5739 && GET_CODE (XEXP (DECL_RTL (exp
), 0)) != REG
)))
5740 temp
= change_address (DECL_RTL (exp
), VOIDmode
,
5741 copy_rtx (XEXP (DECL_RTL (exp
), 0)));
5743 /* If we got something, return it. But first, set the alignment
5744 the address is a register. */
5747 if (GET_CODE (temp
) == MEM
&& GET_CODE (XEXP (temp
, 0)) == REG
)
5748 mark_reg_pointer (XEXP (temp
, 0),
5749 DECL_ALIGN (exp
) / BITS_PER_UNIT
);
5754 /* If the mode of DECL_RTL does not match that of the decl, it
5755 must be a promoted value. We return a SUBREG of the wanted mode,
5756 but mark it so that we know that it was already extended. */
5758 if (GET_CODE (DECL_RTL (exp
)) == REG
5759 && GET_MODE (DECL_RTL (exp
)) != mode
)
5761 /* Get the signedness used for this variable. Ensure we get the
5762 same mode we got when the variable was declared. */
5763 if (GET_MODE (DECL_RTL (exp
))
5764 != promote_mode (type
, DECL_MODE (exp
), &unsignedp
, 0))
5767 temp
= gen_rtx_SUBREG (mode
, DECL_RTL (exp
), 0);
5768 SUBREG_PROMOTED_VAR_P (temp
) = 1;
5769 SUBREG_PROMOTED_UNSIGNED_P (temp
) = unsignedp
;
5773 return DECL_RTL (exp
);
5776 return immed_double_const (TREE_INT_CST_LOW (exp
),
5777 TREE_INT_CST_HIGH (exp
),
5781 return expand_expr (DECL_INITIAL (exp
), target
, VOIDmode
,
5782 EXPAND_MEMORY_USE_BAD
);
5785 /* If optimized, generate immediate CONST_DOUBLE
5786 which will be turned into memory by reload if necessary.
5788 We used to force a register so that loop.c could see it. But
5789 this does not allow gen_* patterns to perform optimizations with
5790 the constants. It also produces two insns in cases like "x = 1.0;".
5791 On most machines, floating-point constants are not permitted in
5792 many insns, so we'd end up copying it to a register in any case.
5794 Now, we do the copying in expand_binop, if appropriate. */
5795 return immed_real_const (exp
);
5799 if (! TREE_CST_RTL (exp
))
5800 output_constant_def (exp
);
5802 /* TREE_CST_RTL probably contains a constant address.
5803 On RISC machines where a constant address isn't valid,
5804 make some insns to get that address into a register. */
5805 if (GET_CODE (TREE_CST_RTL (exp
)) == MEM
5806 && modifier
!= EXPAND_CONST_ADDRESS
5807 && modifier
!= EXPAND_INITIALIZER
5808 && modifier
!= EXPAND_SUM
5809 && (! memory_address_p (mode
, XEXP (TREE_CST_RTL (exp
), 0))
5811 && GET_CODE (XEXP (TREE_CST_RTL (exp
), 0)) != REG
)))
5812 return change_address (TREE_CST_RTL (exp
), VOIDmode
,
5813 copy_rtx (XEXP (TREE_CST_RTL (exp
), 0)));
5814 return TREE_CST_RTL (exp
);
5816 case EXPR_WITH_FILE_LOCATION
:
5819 char *saved_input_filename
= input_filename
;
5820 int saved_lineno
= lineno
;
5821 input_filename
= EXPR_WFL_FILENAME (exp
);
5822 lineno
= EXPR_WFL_LINENO (exp
);
5823 if (EXPR_WFL_EMIT_LINE_NOTE (exp
))
5824 emit_line_note (input_filename
, lineno
);
5825 /* Possibly avoid switching back and force here */
5826 to_return
= expand_expr (EXPR_WFL_NODE (exp
), target
, tmode
, modifier
);
5827 input_filename
= saved_input_filename
;
5828 lineno
= saved_lineno
;
5833 context
= decl_function_context (exp
);
5835 /* If this SAVE_EXPR was at global context, assume we are an
5836 initialization function and move it into our context. */
5838 SAVE_EXPR_CONTEXT (exp
) = current_function_decl
;
5840 /* We treat inline_function_decl as an alias for the current function
5841 because that is the inline function whose vars, types, etc.
5842 are being merged into the current function.
5843 See expand_inline_function. */
5844 if (context
== current_function_decl
|| context
== inline_function_decl
)
5847 /* If this is non-local, handle it. */
5850 /* The following call just exists to abort if the context is
5851 not of a containing function. */
5852 find_function_data (context
);
5854 temp
= SAVE_EXPR_RTL (exp
);
5855 if (temp
&& GET_CODE (temp
) == REG
)
5857 put_var_into_stack (exp
);
5858 temp
= SAVE_EXPR_RTL (exp
);
5860 if (temp
== 0 || GET_CODE (temp
) != MEM
)
5862 return change_address (temp
, mode
,
5863 fix_lexical_addr (XEXP (temp
, 0), exp
));
5865 if (SAVE_EXPR_RTL (exp
) == 0)
5867 if (mode
== VOIDmode
)
5870 temp
= assign_temp (type
, 3, 0, 0);
5872 SAVE_EXPR_RTL (exp
) = temp
;
5873 if (!optimize
&& GET_CODE (temp
) == REG
)
5874 save_expr_regs
= gen_rtx_EXPR_LIST (VOIDmode
, temp
,
5877 /* If the mode of TEMP does not match that of the expression, it
5878 must be a promoted value. We pass store_expr a SUBREG of the
5879 wanted mode but mark it so that we know that it was already
5880 extended. Note that `unsignedp' was modified above in
5883 if (GET_CODE (temp
) == REG
&& GET_MODE (temp
) != mode
)
5885 temp
= gen_rtx_SUBREG (mode
, SAVE_EXPR_RTL (exp
), 0);
5886 SUBREG_PROMOTED_VAR_P (temp
) = 1;
5887 SUBREG_PROMOTED_UNSIGNED_P (temp
) = unsignedp
;
5890 if (temp
== const0_rtx
)
5891 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
5892 EXPAND_MEMORY_USE_BAD
);
5894 store_expr (TREE_OPERAND (exp
, 0), temp
, 0);
5896 TREE_USED (exp
) = 1;
5899 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
5900 must be a promoted value. We return a SUBREG of the wanted mode,
5901 but mark it so that we know that it was already extended. */
5903 if (GET_CODE (SAVE_EXPR_RTL (exp
)) == REG
5904 && GET_MODE (SAVE_EXPR_RTL (exp
)) != mode
)
5906 /* Compute the signedness and make the proper SUBREG. */
5907 promote_mode (type
, mode
, &unsignedp
, 0);
5908 temp
= gen_rtx_SUBREG (mode
, SAVE_EXPR_RTL (exp
), 0);
5909 SUBREG_PROMOTED_VAR_P (temp
) = 1;
5910 SUBREG_PROMOTED_UNSIGNED_P (temp
) = unsignedp
;
5914 return SAVE_EXPR_RTL (exp
);
5919 temp
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
5920 TREE_OPERAND (exp
, 0) = unsave_expr_now (TREE_OPERAND (exp
, 0));
5924 case PLACEHOLDER_EXPR
:
5926 tree placeholder_expr
;
5928 /* If there is an object on the head of the placeholder list,
5929 see if some object in it of type TYPE or a pointer to it. For
5930 further information, see tree.def. */
5931 for (placeholder_expr
= placeholder_list
;
5932 placeholder_expr
!= 0;
5933 placeholder_expr
= TREE_CHAIN (placeholder_expr
))
5935 tree need_type
= TYPE_MAIN_VARIANT (type
);
5937 tree old_list
= placeholder_list
;
5940 /* Find the outermost reference that is of the type we want.
5941 If none, see if any object has a type that is a pointer to
5942 the type we want. */
5943 for (elt
= TREE_PURPOSE (placeholder_expr
);
5944 elt
!= 0 && object
== 0;
5946 = ((TREE_CODE (elt
) == COMPOUND_EXPR
5947 || TREE_CODE (elt
) == COND_EXPR
)
5948 ? TREE_OPERAND (elt
, 1)
5949 : (TREE_CODE_CLASS (TREE_CODE (elt
)) == 'r'
5950 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '1'
5951 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '2'
5952 || TREE_CODE_CLASS (TREE_CODE (elt
)) == 'e')
5953 ? TREE_OPERAND (elt
, 0) : 0))
5954 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt
)) == need_type
)
5957 for (elt
= TREE_PURPOSE (placeholder_expr
);
5958 elt
!= 0 && object
== 0;
5960 = ((TREE_CODE (elt
) == COMPOUND_EXPR
5961 || TREE_CODE (elt
) == COND_EXPR
)
5962 ? TREE_OPERAND (elt
, 1)
5963 : (TREE_CODE_CLASS (TREE_CODE (elt
)) == 'r'
5964 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '1'
5965 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '2'
5966 || TREE_CODE_CLASS (TREE_CODE (elt
)) == 'e')
5967 ? TREE_OPERAND (elt
, 0) : 0))
5968 if (POINTER_TYPE_P (TREE_TYPE (elt
))
5969 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt
)))
5971 object
= build1 (INDIRECT_REF
, need_type
, elt
);
5975 /* Expand this object skipping the list entries before
5976 it was found in case it is also a PLACEHOLDER_EXPR.
5977 In that case, we want to translate it using subsequent
5979 placeholder_list
= TREE_CHAIN (placeholder_expr
);
5980 temp
= expand_expr (object
, original_target
, tmode
,
5982 placeholder_list
= old_list
;
5988 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
5991 case WITH_RECORD_EXPR
:
5992 /* Put the object on the placeholder list, expand our first operand,
5993 and pop the list. */
5994 placeholder_list
= tree_cons (TREE_OPERAND (exp
, 1), NULL_TREE
,
5996 target
= expand_expr (TREE_OPERAND (exp
, 0), original_target
,
5997 tmode
, ro_modifier
);
5998 placeholder_list
= TREE_CHAIN (placeholder_list
);
6002 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == LABEL_DECL
)
6003 expand_goto (TREE_OPERAND (exp
, 0));
6005 expand_computed_goto (TREE_OPERAND (exp
, 0));
6009 expand_exit_loop_if_false (NULL_PTR
,
6010 invert_truthvalue (TREE_OPERAND (exp
, 0)));
6013 case LABELED_BLOCK_EXPR
:
6014 if (LABELED_BLOCK_BODY (exp
))
6015 expand_expr_stmt (LABELED_BLOCK_BODY (exp
));
6016 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp
)));
6019 case EXIT_BLOCK_EXPR
:
6020 if (EXIT_BLOCK_RETURN (exp
))
6021 sorry ("returned value in block_exit_expr");
6022 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp
)));
6027 expand_start_loop (1);
6028 expand_expr_stmt (TREE_OPERAND (exp
, 0));
6036 tree vars
= TREE_OPERAND (exp
, 0);
6037 int vars_need_expansion
= 0;
6039 /* Need to open a binding contour here because
6040 if there are any cleanups they must be contained here. */
6041 expand_start_bindings (0);
6043 /* Mark the corresponding BLOCK for output in its proper place. */
6044 if (TREE_OPERAND (exp
, 2) != 0
6045 && ! TREE_USED (TREE_OPERAND (exp
, 2)))
6046 insert_block (TREE_OPERAND (exp
, 2));
6048 /* If VARS have not yet been expanded, expand them now. */
6051 if (DECL_RTL (vars
) == 0)
6053 vars_need_expansion
= 1;
6056 expand_decl_init (vars
);
6057 vars
= TREE_CHAIN (vars
);
6060 temp
= expand_expr (TREE_OPERAND (exp
, 1), target
, tmode
, ro_modifier
);
6062 expand_end_bindings (TREE_OPERAND (exp
, 0), 0, 0);
6068 if (RTL_EXPR_SEQUENCE (exp
))
6070 if (RTL_EXPR_SEQUENCE (exp
) == const0_rtx
)
6072 emit_insns (RTL_EXPR_SEQUENCE (exp
));
6073 RTL_EXPR_SEQUENCE (exp
) = const0_rtx
;
6075 preserve_rtl_expr_result (RTL_EXPR_RTL (exp
));
6076 free_temps_for_rtl_expr (exp
);
6077 return RTL_EXPR_RTL (exp
);
6080 /* If we don't need the result, just ensure we evaluate any
6085 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
6086 expand_expr (TREE_VALUE (elt
), const0_rtx
, VOIDmode
,
6087 EXPAND_MEMORY_USE_BAD
);
6091 /* All elts simple constants => refer to a constant in memory. But
6092 if this is a non-BLKmode mode, let it store a field at a time
6093 since that should make a CONST_INT or CONST_DOUBLE when we
6094 fold. Likewise, if we have a target we can use, it is best to
6095 store directly into the target unless the type is large enough
6096 that memcpy will be used. If we are making an initializer and
6097 all operands are constant, put it in memory as well. */
6098 else if ((TREE_STATIC (exp
)
6099 && ((mode
== BLKmode
6100 && ! (target
!= 0 && safe_from_p (target
, exp
, 1)))
6101 || TREE_ADDRESSABLE (exp
)
6102 || (TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
6103 && (!MOVE_BY_PIECES_P
6104 (TREE_INT_CST_LOW (TYPE_SIZE (type
))/BITS_PER_UNIT
,
6105 TYPE_ALIGN (type
) / BITS_PER_UNIT
))
6106 && ! mostly_zeros_p (exp
))))
6107 || (modifier
== EXPAND_INITIALIZER
&& TREE_CONSTANT (exp
)))
6109 rtx constructor
= output_constant_def (exp
);
6110 if (modifier
!= EXPAND_CONST_ADDRESS
6111 && modifier
!= EXPAND_INITIALIZER
6112 && modifier
!= EXPAND_SUM
6113 && (! memory_address_p (GET_MODE (constructor
),
6114 XEXP (constructor
, 0))
6116 && GET_CODE (XEXP (constructor
, 0)) != REG
)))
6117 constructor
= change_address (constructor
, VOIDmode
,
6118 XEXP (constructor
, 0));
6124 /* Handle calls that pass values in multiple non-contiguous
6125 locations. The Irix 6 ABI has examples of this. */
6126 if (target
== 0 || ! safe_from_p (target
, exp
, 1)
6127 || GET_CODE (target
) == PARALLEL
)
6129 if (mode
!= BLKmode
&& ! TREE_ADDRESSABLE (exp
))
6130 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
6132 target
= assign_temp (type
, 0, 1, 1);
6135 if (TREE_READONLY (exp
))
6137 if (GET_CODE (target
) == MEM
)
6138 target
= copy_rtx (target
);
6140 RTX_UNCHANGING_P (target
) = 1;
6143 store_constructor (exp
, target
, 0);
6149 tree exp1
= TREE_OPERAND (exp
, 0);
6152 tree string
= string_constant (exp1
, &index
);
6155 /* Try to optimize reads from const strings. */
6157 && TREE_CODE (string
) == STRING_CST
6158 && TREE_CODE (index
) == INTEGER_CST
6159 && !TREE_INT_CST_HIGH (index
)
6160 && (i
= TREE_INT_CST_LOW (index
)) < TREE_STRING_LENGTH (string
)
6161 && GET_MODE_CLASS (mode
) == MODE_INT
6162 && GET_MODE_SIZE (mode
) == 1
6163 && modifier
!= EXPAND_MEMORY_USE_WO
)
6164 return GEN_INT (TREE_STRING_POINTER (string
)[i
]);
6166 op0
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
6167 op0
= memory_address (mode
, op0
);
6169 if (current_function_check_memory_usage
&& !AGGREGATE_TYPE_P (TREE_TYPE (exp
)))
6171 enum memory_use_mode memory_usage
;
6172 memory_usage
= get_memory_usage_from_modifier (modifier
);
6174 if (memory_usage
!= MEMORY_USE_DONT
)
6176 in_check_memory_usage
= 1;
6177 emit_library_call (chkr_check_addr_libfunc
, 1, VOIDmode
, 3,
6179 GEN_INT (int_size_in_bytes (type
)),
6180 TYPE_MODE (sizetype
),
6181 GEN_INT (memory_usage
),
6182 TYPE_MODE (integer_type_node
));
6183 in_check_memory_usage
= 0;
6187 temp
= gen_rtx_MEM (mode
, op0
);
6188 /* If address was computed by addition,
6189 mark this as an element of an aggregate. */
6190 if (TREE_CODE (exp1
) == PLUS_EXPR
6191 || (TREE_CODE (exp1
) == SAVE_EXPR
6192 && TREE_CODE (TREE_OPERAND (exp1
, 0)) == PLUS_EXPR
)
6193 || AGGREGATE_TYPE_P (TREE_TYPE (exp
))
6194 || (TREE_CODE (exp1
) == ADDR_EXPR
6195 && (exp2
= TREE_OPERAND (exp1
, 0))
6196 && AGGREGATE_TYPE_P (TREE_TYPE (exp2
))))
6197 MEM_SET_IN_STRUCT_P (temp
, 1);
6199 MEM_VOLATILE_P (temp
) = TREE_THIS_VOLATILE (exp
) | flag_volatile
;
6200 MEM_ALIAS_SET (temp
) = get_alias_set (exp
);
6202 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6203 here, because, in C and C++, the fact that a location is accessed
6204 through a pointer to const does not mean that the value there can
6205 never change. Languages where it can never change should
6206 also set TREE_STATIC. */
6207 RTX_UNCHANGING_P (temp
) = TREE_READONLY (exp
) & TREE_STATIC (exp
);
6212 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) != ARRAY_TYPE
)
6216 tree array
= TREE_OPERAND (exp
, 0);
6217 tree domain
= TYPE_DOMAIN (TREE_TYPE (array
));
6218 tree low_bound
= domain
? TYPE_MIN_VALUE (domain
) : integer_zero_node
;
6219 tree index
= TREE_OPERAND (exp
, 1);
6220 tree index_type
= TREE_TYPE (index
);
6223 /* Optimize the special-case of a zero lower bound.
6225 We convert the low_bound to sizetype to avoid some problems
6226 with constant folding. (E.g. suppose the lower bound is 1,
6227 and its mode is QI. Without the conversion, (ARRAY
6228 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6229 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
6231 But sizetype isn't quite right either (especially if
6232 the lowbound is negative). FIXME */
6234 if (! integer_zerop (low_bound
))
6235 index
= fold (build (MINUS_EXPR
, index_type
, index
,
6236 convert (sizetype
, low_bound
)));
6238 /* Fold an expression like: "foo"[2].
6239 This is not done in fold so it won't happen inside &.
6240 Don't fold if this is for wide characters since it's too
6241 difficult to do correctly and this is a very rare case. */
6243 if (TREE_CODE (array
) == STRING_CST
6244 && TREE_CODE (index
) == INTEGER_CST
6245 && !TREE_INT_CST_HIGH (index
)
6246 && (i
= TREE_INT_CST_LOW (index
)) < TREE_STRING_LENGTH (array
)
6247 && GET_MODE_CLASS (mode
) == MODE_INT
6248 && GET_MODE_SIZE (mode
) == 1)
6249 return GEN_INT (TREE_STRING_POINTER (array
)[i
]);
6251 /* If this is a constant index into a constant array,
6252 just get the value from the array. Handle both the cases when
6253 we have an explicit constructor and when our operand is a variable
6254 that was declared const. */
6256 if (TREE_CODE (array
) == CONSTRUCTOR
&& ! TREE_SIDE_EFFECTS (array
))
6258 if (TREE_CODE (index
) == INTEGER_CST
6259 && TREE_INT_CST_HIGH (index
) == 0)
6261 tree elem
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0));
6263 i
= TREE_INT_CST_LOW (index
);
6265 elem
= TREE_CHAIN (elem
);
6267 return expand_expr (fold (TREE_VALUE (elem
)), target
,
6268 tmode
, ro_modifier
);
6272 else if (optimize
>= 1
6273 && TREE_READONLY (array
) && ! TREE_SIDE_EFFECTS (array
)
6274 && TREE_CODE (array
) == VAR_DECL
&& DECL_INITIAL (array
)
6275 && TREE_CODE (DECL_INITIAL (array
)) != ERROR_MARK
)
6277 if (TREE_CODE (index
) == INTEGER_CST
)
6279 tree init
= DECL_INITIAL (array
);
6281 i
= TREE_INT_CST_LOW (index
);
6282 if (TREE_CODE (init
) == CONSTRUCTOR
)
6284 tree elem
= CONSTRUCTOR_ELTS (init
);
6287 && !tree_int_cst_equal (TREE_PURPOSE (elem
), index
))
6288 elem
= TREE_CHAIN (elem
);
6290 return expand_expr (fold (TREE_VALUE (elem
)), target
,
6291 tmode
, ro_modifier
);
6293 else if (TREE_CODE (init
) == STRING_CST
6294 && TREE_INT_CST_HIGH (index
) == 0
6295 && (TREE_INT_CST_LOW (index
)
6296 < TREE_STRING_LENGTH (init
)))
6298 (TREE_STRING_POINTER
6299 (init
)[TREE_INT_CST_LOW (index
)]));
6304 /* ... fall through ... */
6308 /* If the operand is a CONSTRUCTOR, we can just extract the
6309 appropriate field if it is present. Don't do this if we have
6310 already written the data since we want to refer to that copy
6311 and varasm.c assumes that's what we'll do. */
6312 if (code
!= ARRAY_REF
6313 && TREE_CODE (TREE_OPERAND (exp
, 0)) == CONSTRUCTOR
6314 && TREE_CST_RTL (TREE_OPERAND (exp
, 0)) == 0)
6318 for (elt
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)); elt
;
6319 elt
= TREE_CHAIN (elt
))
6320 if (TREE_PURPOSE (elt
) == TREE_OPERAND (exp
, 1)
6321 /* We can normally use the value of the field in the
6322 CONSTRUCTOR. However, if this is a bitfield in
6323 an integral mode that we can fit in a HOST_WIDE_INT,
6324 we must mask only the number of bits in the bitfield,
6325 since this is done implicitly by the constructor. If
6326 the bitfield does not meet either of those conditions,
6327 we can't do this optimization. */
6328 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt
))
6329 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt
)))
6331 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt
)))
6332 <= HOST_BITS_PER_WIDE_INT
))))
6334 op0
= expand_expr (TREE_VALUE (elt
), target
, tmode
, modifier
);
6335 if (DECL_BIT_FIELD (TREE_PURPOSE (elt
)))
6337 int bitsize
= DECL_FIELD_SIZE (TREE_PURPOSE (elt
));
6339 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt
))))
6341 op1
= GEN_INT (((HOST_WIDE_INT
) 1 << bitsize
) - 1);
6342 op0
= expand_and (op0
, op1
, target
);
6346 enum machine_mode imode
6347 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt
)));
6349 = build_int_2 (GET_MODE_BITSIZE (imode
) - bitsize
,
6352 op0
= expand_shift (LSHIFT_EXPR
, imode
, op0
, count
,
6354 op0
= expand_shift (RSHIFT_EXPR
, imode
, op0
, count
,
6364 enum machine_mode mode1
;
6370 tree tem
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
6371 &mode1
, &unsignedp
, &volatilep
,
6374 /* If we got back the original object, something is wrong. Perhaps
6375 we are evaluating an expression too early. In any event, don't
6376 infinitely recurse. */
6380 /* If TEM's type is a union of variable size, pass TARGET to the inner
6381 computation, since it will need a temporary and TARGET is known
6382 to have to do. This occurs in unchecked conversion in Ada. */
6384 op0
= expand_expr (tem
,
6385 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
6386 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
6388 ? target
: NULL_RTX
),
6390 modifier
== EXPAND_INITIALIZER
6391 ? modifier
: EXPAND_NORMAL
);
6393 /* If this is a constant, put it into a register if it is a
6394 legitimate constant and memory if it isn't. */
6395 if (CONSTANT_P (op0
))
6397 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (tem
));
6398 if (mode
!= BLKmode
&& LEGITIMATE_CONSTANT_P (op0
))
6399 op0
= force_reg (mode
, op0
);
6401 op0
= validize_mem (force_const_mem (mode
, op0
));
6406 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
6408 if (GET_CODE (op0
) != MEM
)
6411 if (GET_MODE (offset_rtx
) != ptr_mode
)
6413 #ifdef POINTERS_EXTEND_UNSIGNED
6414 offset_rtx
= convert_memory_address (ptr_mode
, offset_rtx
);
6416 offset_rtx
= convert_to_mode (ptr_mode
, offset_rtx
, 0);
6420 /* A constant address in TO_RTX can have VOIDmode, we must not try
6421 to call force_reg for that case. Avoid that case. */
6422 if (GET_CODE (op0
) == MEM
6423 && GET_MODE (op0
) == BLKmode
6424 && GET_MODE (XEXP (op0
, 0)) != VOIDmode
6426 && (bitpos
% bitsize
) == 0
6427 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
6428 && (alignment
* BITS_PER_UNIT
) == GET_MODE_ALIGNMENT (mode1
))
6430 rtx temp
= change_address (op0
, mode1
,
6431 plus_constant (XEXP (op0
, 0),
6434 if (GET_CODE (XEXP (temp
, 0)) == REG
)
6437 op0
= change_address (op0
, mode1
,
6438 force_reg (GET_MODE (XEXP (temp
, 0)),
6444 op0
= change_address (op0
, VOIDmode
,
6445 gen_rtx_PLUS (ptr_mode
, XEXP (op0
, 0),
6446 force_reg (ptr_mode
, offset_rtx
)));
6449 /* Don't forget about volatility even if this is a bitfield. */
6450 if (GET_CODE (op0
) == MEM
&& volatilep
&& ! MEM_VOLATILE_P (op0
))
6452 op0
= copy_rtx (op0
);
6453 MEM_VOLATILE_P (op0
) = 1;
6456 /* Check the access. */
6457 if (current_function_check_memory_usage
&& GET_CODE (op0
) == MEM
)
6459 enum memory_use_mode memory_usage
;
6460 memory_usage
= get_memory_usage_from_modifier (modifier
);
6462 if (memory_usage
!= MEMORY_USE_DONT
)
6467 to
= plus_constant (XEXP (op0
, 0), (bitpos
/ BITS_PER_UNIT
));
6468 size
= (bitpos
% BITS_PER_UNIT
) + bitsize
+ BITS_PER_UNIT
- 1;
6470 /* Check the access right of the pointer. */
6471 if (size
> BITS_PER_UNIT
)
6472 emit_library_call (chkr_check_addr_libfunc
, 1, VOIDmode
, 3,
6474 GEN_INT (size
/ BITS_PER_UNIT
),
6475 TYPE_MODE (sizetype
),
6476 GEN_INT (memory_usage
),
6477 TYPE_MODE (integer_type_node
));
6481 /* In cases where an aligned union has an unaligned object
6482 as a field, we might be extracting a BLKmode value from
6483 an integer-mode (e.g., SImode) object. Handle this case
6484 by doing the extract into an object as wide as the field
6485 (which we know to be the width of a basic mode), then
6486 storing into memory, and changing the mode to BLKmode.
6487 If we ultimately want the address (EXPAND_CONST_ADDRESS or
6488 EXPAND_INITIALIZER), then we must not copy to a temporary. */
6489 if (mode1
== VOIDmode
6490 || GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
6491 || (modifier
!= EXPAND_CONST_ADDRESS
6492 && modifier
!= EXPAND_INITIALIZER
6493 && ((mode1
!= BLKmode
&& ! direct_load
[(int) mode1
]
6494 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
6495 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
)
6496 /* If the field isn't aligned enough to fetch as a memref,
6497 fetch it as a bit field. */
6498 || (SLOW_UNALIGNED_ACCESS
6499 && ((TYPE_ALIGN (TREE_TYPE (tem
)) < (unsigned int) GET_MODE_ALIGNMENT (mode
))
6500 || (bitpos
% GET_MODE_ALIGNMENT (mode
) != 0))))))
6502 enum machine_mode ext_mode
= mode
;
6504 if (ext_mode
== BLKmode
)
6505 ext_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
6507 if (ext_mode
== BLKmode
)
6509 /* In this case, BITPOS must start at a byte boundary and
6510 TARGET, if specified, must be a MEM. */
6511 if (GET_CODE (op0
) != MEM
6512 || (target
!= 0 && GET_CODE (target
) != MEM
)
6513 || bitpos
% BITS_PER_UNIT
!= 0)
6516 op0
= change_address (op0
, VOIDmode
,
6517 plus_constant (XEXP (op0
, 0),
6518 bitpos
/ BITS_PER_UNIT
));
6520 target
= assign_temp (type
, 0, 1, 1);
6522 emit_block_move (target
, op0
,
6523 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
6530 op0
= validize_mem (op0
);
6532 if (GET_CODE (op0
) == MEM
&& GET_CODE (XEXP (op0
, 0)) == REG
)
6533 mark_reg_pointer (XEXP (op0
, 0), alignment
);
6535 op0
= extract_bit_field (op0
, bitsize
, bitpos
,
6536 unsignedp
, target
, ext_mode
, ext_mode
,
6538 int_size_in_bytes (TREE_TYPE (tem
)));
6540 /* If the result is a record type and BITSIZE is narrower than
6541 the mode of OP0, an integral mode, and this is a big endian
6542 machine, we must put the field into the high-order bits. */
6543 if (TREE_CODE (type
) == RECORD_TYPE
&& BYTES_BIG_ENDIAN
6544 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
6545 && bitsize
< GET_MODE_BITSIZE (GET_MODE (op0
)))
6546 op0
= expand_shift (LSHIFT_EXPR
, GET_MODE (op0
), op0
,
6547 size_int (GET_MODE_BITSIZE (GET_MODE (op0
))
6551 if (mode
== BLKmode
)
6553 rtx
new = assign_stack_temp (ext_mode
,
6554 bitsize
/ BITS_PER_UNIT
, 0);
6556 emit_move_insn (new, op0
);
6557 op0
= copy_rtx (new);
6558 PUT_MODE (op0
, BLKmode
);
6559 MEM_SET_IN_STRUCT_P (op0
, 1);
6565 /* If the result is BLKmode, use that to access the object
6567 if (mode
== BLKmode
)
6570 /* Get a reference to just this component. */
6571 if (modifier
== EXPAND_CONST_ADDRESS
6572 || modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
6573 op0
= gen_rtx_MEM (mode1
, plus_constant (XEXP (op0
, 0),
6574 (bitpos
/ BITS_PER_UNIT
)));
6576 op0
= change_address (op0
, mode1
,
6577 plus_constant (XEXP (op0
, 0),
6578 (bitpos
/ BITS_PER_UNIT
)));
6580 if (GET_CODE (op0
) == MEM
)
6581 MEM_ALIAS_SET (op0
) = get_alias_set (exp
);
6583 if (GET_CODE (XEXP (op0
, 0)) == REG
)
6584 mark_reg_pointer (XEXP (op0
, 0), alignment
);
6586 MEM_SET_IN_STRUCT_P (op0
, 1);
6587 MEM_VOLATILE_P (op0
) |= volatilep
;
6588 if (mode
== mode1
|| mode1
== BLKmode
|| mode1
== tmode
6589 || modifier
== EXPAND_CONST_ADDRESS
6590 || modifier
== EXPAND_INITIALIZER
)
6592 else if (target
== 0)
6593 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
6595 convert_move (target
, op0
, unsignedp
);
6599 /* Intended for a reference to a buffer of a file-object in Pascal.
6600 But it's not certain that a special tree code will really be
6601 necessary for these. INDIRECT_REF might work for them. */
6607 /* Pascal set IN expression.
6610 rlo = set_low - (set_low%bits_per_word);
6611 the_word = set [ (index - rlo)/bits_per_word ];
6612 bit_index = index % bits_per_word;
6613 bitmask = 1 << bit_index;
6614 return !!(the_word & bitmask); */
6616 tree set
= TREE_OPERAND (exp
, 0);
6617 tree index
= TREE_OPERAND (exp
, 1);
6618 int iunsignedp
= TREE_UNSIGNED (TREE_TYPE (index
));
6619 tree set_type
= TREE_TYPE (set
);
6620 tree set_low_bound
= TYPE_MIN_VALUE (TYPE_DOMAIN (set_type
));
6621 tree set_high_bound
= TYPE_MAX_VALUE (TYPE_DOMAIN (set_type
));
6622 rtx index_val
= expand_expr (index
, 0, VOIDmode
, 0);
6623 rtx lo_r
= expand_expr (set_low_bound
, 0, VOIDmode
, 0);
6624 rtx hi_r
= expand_expr (set_high_bound
, 0, VOIDmode
, 0);
6625 rtx setval
= expand_expr (set
, 0, VOIDmode
, 0);
6626 rtx setaddr
= XEXP (setval
, 0);
6627 enum machine_mode index_mode
= TYPE_MODE (TREE_TYPE (index
));
6629 rtx diff
, quo
, rem
, addr
, bit
, result
;
6631 preexpand_calls (exp
);
6633 /* If domain is empty, answer is no. Likewise if index is constant
6634 and out of bounds. */
6635 if (((TREE_CODE (set_high_bound
) == INTEGER_CST
6636 && TREE_CODE (set_low_bound
) == INTEGER_CST
6637 && tree_int_cst_lt (set_high_bound
, set_low_bound
))
6638 || (TREE_CODE (index
) == INTEGER_CST
6639 && TREE_CODE (set_low_bound
) == INTEGER_CST
6640 && tree_int_cst_lt (index
, set_low_bound
))
6641 || (TREE_CODE (set_high_bound
) == INTEGER_CST
6642 && TREE_CODE (index
) == INTEGER_CST
6643 && tree_int_cst_lt (set_high_bound
, index
))))
6647 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
6649 /* If we get here, we have to generate the code for both cases
6650 (in range and out of range). */
6652 op0
= gen_label_rtx ();
6653 op1
= gen_label_rtx ();
6655 if (! (GET_CODE (index_val
) == CONST_INT
6656 && GET_CODE (lo_r
) == CONST_INT
))
6658 emit_cmp_and_jump_insns (index_val
, lo_r
, LT
, NULL_RTX
,
6659 GET_MODE (index_val
), iunsignedp
, 0, op1
);
6662 if (! (GET_CODE (index_val
) == CONST_INT
6663 && GET_CODE (hi_r
) == CONST_INT
))
6665 emit_cmp_and_jump_insns (index_val
, hi_r
, GT
, NULL_RTX
,
6666 GET_MODE (index_val
), iunsignedp
, 0, op1
);
6669 /* Calculate the element number of bit zero in the first word
6671 if (GET_CODE (lo_r
) == CONST_INT
)
6672 rlow
= GEN_INT (INTVAL (lo_r
)
6673 & ~ ((HOST_WIDE_INT
) 1 << BITS_PER_UNIT
));
6675 rlow
= expand_binop (index_mode
, and_optab
, lo_r
,
6676 GEN_INT (~((HOST_WIDE_INT
) 1 << BITS_PER_UNIT
)),
6677 NULL_RTX
, iunsignedp
, OPTAB_LIB_WIDEN
);
6679 diff
= expand_binop (index_mode
, sub_optab
, index_val
, rlow
,
6680 NULL_RTX
, iunsignedp
, OPTAB_LIB_WIDEN
);
6682 quo
= expand_divmod (0, TRUNC_DIV_EXPR
, index_mode
, diff
,
6683 GEN_INT (BITS_PER_UNIT
), NULL_RTX
, iunsignedp
);
6684 rem
= expand_divmod (1, TRUNC_MOD_EXPR
, index_mode
, index_val
,
6685 GEN_INT (BITS_PER_UNIT
), NULL_RTX
, iunsignedp
);
6687 addr
= memory_address (byte_mode
,
6688 expand_binop (index_mode
, add_optab
, diff
,
6689 setaddr
, NULL_RTX
, iunsignedp
,
6692 /* Extract the bit we want to examine */
6693 bit
= expand_shift (RSHIFT_EXPR
, byte_mode
,
6694 gen_rtx_MEM (byte_mode
, addr
),
6695 make_tree (TREE_TYPE (index
), rem
),
6697 result
= expand_binop (byte_mode
, and_optab
, bit
, const1_rtx
,
6698 GET_MODE (target
) == byte_mode
? target
: 0,
6699 1, OPTAB_LIB_WIDEN
);
6701 if (result
!= target
)
6702 convert_move (target
, result
, 1);
6704 /* Output the code to handle the out-of-range case. */
6707 emit_move_insn (target
, const0_rtx
);
6712 case WITH_CLEANUP_EXPR
:
6713 if (RTL_EXPR_RTL (exp
) == 0)
6716 = expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, ro_modifier
);
6717 expand_decl_cleanup (NULL_TREE
, TREE_OPERAND (exp
, 2));
6719 /* That's it for this cleanup. */
6720 TREE_OPERAND (exp
, 2) = 0;
6722 return RTL_EXPR_RTL (exp
);
6724 case CLEANUP_POINT_EXPR
:
6726 /* Start a new binding layer that will keep track of all cleanup
6727 actions to be performed. */
6728 expand_start_bindings (0);
6730 target_temp_slot_level
= temp_slot_level
;
6732 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, ro_modifier
);
6733 /* If we're going to use this value, load it up now. */
6735 op0
= force_not_mem (op0
);
6736 preserve_temp_slots (op0
);
6737 expand_end_bindings (NULL_TREE
, 0, 0);
6742 /* Check for a built-in function. */
6743 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
6744 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
6746 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
6747 return expand_builtin (exp
, target
, subtarget
, tmode
, ignore
);
6749 /* If this call was expanded already by preexpand_calls,
6750 just return the result we got. */
6751 if (CALL_EXPR_RTL (exp
) != 0)
6752 return CALL_EXPR_RTL (exp
);
6754 return expand_call (exp
, target
, ignore
);
6756 case NON_LVALUE_EXPR
:
6759 case REFERENCE_EXPR
:
6760 if (TREE_CODE (type
) == UNION_TYPE
)
6762 tree valtype
= TREE_TYPE (TREE_OPERAND (exp
, 0));
6765 if (mode
!= BLKmode
)
6766 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
6768 target
= assign_temp (type
, 0, 1, 1);
6771 if (GET_CODE (target
) == MEM
)
6772 /* Store data into beginning of memory target. */
6773 store_expr (TREE_OPERAND (exp
, 0),
6774 change_address (target
, TYPE_MODE (valtype
), 0), 0);
6776 else if (GET_CODE (target
) == REG
)
6777 /* Store this field into a union of the proper type. */
6778 store_field (target
, GET_MODE_BITSIZE (TYPE_MODE (valtype
)), 0,
6779 TYPE_MODE (valtype
), TREE_OPERAND (exp
, 0),
6781 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp
, 0))),
6786 /* Return the entire union. */
6790 if (mode
== TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
6792 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
,
6795 /* If the signedness of the conversion differs and OP0 is
6796 a promoted SUBREG, clear that indication since we now
6797 have to do the proper extension. */
6798 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))) != unsignedp
6799 && GET_CODE (op0
) == SUBREG
)
6800 SUBREG_PROMOTED_VAR_P (op0
) = 0;
6805 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, mode
, 0);
6806 if (GET_MODE (op0
) == mode
)
6809 /* If OP0 is a constant, just convert it into the proper mode. */
6810 if (CONSTANT_P (op0
))
6812 convert_modes (mode
, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
6813 op0
, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
6815 if (modifier
== EXPAND_INITIALIZER
)
6816 return gen_rtx_fmt_e (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
);
6820 convert_to_mode (mode
, op0
,
6821 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
6823 convert_move (target
, op0
,
6824 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
6828 /* We come here from MINUS_EXPR when the second operand is a
6831 this_optab
= add_optab
;
6833 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
6834 something else, make sure we add the register to the constant and
6835 then to the other thing. This case can occur during strength
6836 reduction and doing it this way will produce better code if the
6837 frame pointer or argument pointer is eliminated.
6839 fold-const.c will ensure that the constant is always in the inner
6840 PLUS_EXPR, so the only case we need to do anything about is if
6841 sp, ap, or fp is our second argument, in which case we must swap
6842 the innermost first argument and our second argument. */
6844 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == PLUS_EXPR
6845 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1)) == INTEGER_CST
6846 && TREE_CODE (TREE_OPERAND (exp
, 1)) == RTL_EXPR
6847 && (RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == frame_pointer_rtx
6848 || RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == stack_pointer_rtx
6849 || RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == arg_pointer_rtx
))
6851 tree t
= TREE_OPERAND (exp
, 1);
6853 TREE_OPERAND (exp
, 1) = TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
6854 TREE_OPERAND (TREE_OPERAND (exp
, 0), 0) = t
;
6857 /* If the result is to be ptr_mode and we are adding an integer to
6858 something, we might be forming a constant. So try to use
6859 plus_constant. If it produces a sum and we can't accept it,
6860 use force_operand. This allows P = &ARR[const] to generate
6861 efficient code on machines where a SYMBOL_REF is not a valid
6864 If this is an EXPAND_SUM call, always return the sum. */
6865 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
6866 || mode
== ptr_mode
)
6868 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
6869 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
6870 && TREE_CONSTANT (TREE_OPERAND (exp
, 1)))
6872 op1
= expand_expr (TREE_OPERAND (exp
, 1), subtarget
, VOIDmode
,
6874 op1
= plus_constant (op1
, TREE_INT_CST_LOW (TREE_OPERAND (exp
, 0)));
6875 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
6876 op1
= force_operand (op1
, target
);
6880 else if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
6881 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_INT
6882 && TREE_CONSTANT (TREE_OPERAND (exp
, 0)))
6884 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
6886 if (! CONSTANT_P (op0
))
6888 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
6889 VOIDmode
, modifier
);
6890 /* Don't go to both_summands if modifier
6891 says it's not right to return a PLUS. */
6892 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
6896 op0
= plus_constant (op0
, TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)));
6897 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
6898 op0
= force_operand (op0
, target
);
6903 /* No sense saving up arithmetic to be done
6904 if it's all in the wrong mode to form part of an address.
6905 And force_operand won't know whether to sign-extend or
6907 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
6908 || mode
!= ptr_mode
)
6911 preexpand_calls (exp
);
6912 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
6915 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, ro_modifier
);
6916 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, ro_modifier
);
6919 /* Make sure any term that's a sum with a constant comes last. */
6920 if (GET_CODE (op0
) == PLUS
6921 && CONSTANT_P (XEXP (op0
, 1)))
6927 /* If adding to a sum including a constant,
6928 associate it to put the constant outside. */
6929 if (GET_CODE (op1
) == PLUS
6930 && CONSTANT_P (XEXP (op1
, 1)))
6932 rtx constant_term
= const0_rtx
;
6934 temp
= simplify_binary_operation (PLUS
, mode
, XEXP (op1
, 0), op0
);
6937 /* Ensure that MULT comes first if there is one. */
6938 else if (GET_CODE (op0
) == MULT
)
6939 op0
= gen_rtx_PLUS (mode
, op0
, XEXP (op1
, 0));
6941 op0
= gen_rtx_PLUS (mode
, XEXP (op1
, 0), op0
);
6943 /* Let's also eliminate constants from op0 if possible. */
6944 op0
= eliminate_constant_term (op0
, &constant_term
);
6946 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
6947 their sum should be a constant. Form it into OP1, since the
6948 result we want will then be OP0 + OP1. */
6950 temp
= simplify_binary_operation (PLUS
, mode
, constant_term
,
6955 op1
= gen_rtx_PLUS (mode
, constant_term
, XEXP (op1
, 1));
6958 /* Put a constant term last and put a multiplication first. */
6959 if (CONSTANT_P (op0
) || GET_CODE (op1
) == MULT
)
6960 temp
= op1
, op1
= op0
, op0
= temp
;
6962 temp
= simplify_binary_operation (PLUS
, mode
, op0
, op1
);
6963 return temp
? temp
: gen_rtx_PLUS (mode
, op0
, op1
);
6966 /* For initializers, we are allowed to return a MINUS of two
6967 symbolic constants. Here we handle all cases when both operands
6969 /* Handle difference of two symbolic constants,
6970 for the sake of an initializer. */
6971 if ((modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
6972 && really_constant_p (TREE_OPERAND (exp
, 0))
6973 && really_constant_p (TREE_OPERAND (exp
, 1)))
6975 rtx op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
,
6976 VOIDmode
, ro_modifier
);
6977 rtx op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
6978 VOIDmode
, ro_modifier
);
6980 /* If the last operand is a CONST_INT, use plus_constant of
6981 the negated constant. Else make the MINUS. */
6982 if (GET_CODE (op1
) == CONST_INT
)
6983 return plus_constant (op0
, - INTVAL (op1
));
6985 return gen_rtx_MINUS (mode
, op0
, op1
);
6987 /* Convert A - const to A + (-const). */
6988 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
6990 tree negated
= fold (build1 (NEGATE_EXPR
, type
,
6991 TREE_OPERAND (exp
, 1)));
6993 /* Deal with the case where we can't negate the constant
6995 if (TREE_UNSIGNED (type
) || TREE_OVERFLOW (negated
))
6997 tree newtype
= signed_type (type
);
6998 tree newop0
= convert (newtype
, TREE_OPERAND (exp
, 0));
6999 tree newop1
= convert (newtype
, TREE_OPERAND (exp
, 1));
7000 tree newneg
= fold (build1 (NEGATE_EXPR
, newtype
, newop1
));
7002 if (! TREE_OVERFLOW (newneg
))
7003 return expand_expr (convert (type
,
7004 build (PLUS_EXPR
, newtype
,
7006 target
, tmode
, ro_modifier
);
7010 exp
= build (PLUS_EXPR
, type
, TREE_OPERAND (exp
, 0), negated
);
7014 this_optab
= sub_optab
;
7018 preexpand_calls (exp
);
7019 /* If first operand is constant, swap them.
7020 Thus the following special case checks need only
7021 check the second operand. */
7022 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
)
7024 register tree t1
= TREE_OPERAND (exp
, 0);
7025 TREE_OPERAND (exp
, 0) = TREE_OPERAND (exp
, 1);
7026 TREE_OPERAND (exp
, 1) = t1
;
7029 /* Attempt to return something suitable for generating an
7030 indexed address, for machines that support that. */
7032 if (modifier
== EXPAND_SUM
&& mode
== ptr_mode
7033 && TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
7034 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
7036 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
7039 /* Apply distributive law if OP0 is x+c. */
7040 if (GET_CODE (op0
) == PLUS
7041 && GET_CODE (XEXP (op0
, 1)) == CONST_INT
)
7042 return gen_rtx_PLUS (mode
,
7043 gen_rtx_MULT (mode
, XEXP (op0
, 0),
7044 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)))),
7045 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))
7046 * INTVAL (XEXP (op0
, 1))));
7048 if (GET_CODE (op0
) != REG
)
7049 op0
= force_operand (op0
, NULL_RTX
);
7050 if (GET_CODE (op0
) != REG
)
7051 op0
= copy_to_mode_reg (mode
, op0
);
7053 return gen_rtx_MULT (mode
, op0
,
7054 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))));
7057 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
7060 /* Check for multiplying things that have been extended
7061 from a narrower type. If this machine supports multiplying
7062 in that narrower type with a result in the desired type,
7063 do it that way, and avoid the explicit type-conversion. */
7064 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == NOP_EXPR
7065 && TREE_CODE (type
) == INTEGER_TYPE
7066 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7067 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))))
7068 && ((TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
7069 && int_fits_type_p (TREE_OPERAND (exp
, 1),
7070 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7071 /* Don't use a widening multiply if a shift will do. */
7072 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
7073 > HOST_BITS_PER_WIDE_INT
)
7074 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))) < 0))
7076 (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
7077 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
7079 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))))
7080 /* If both operands are extended, they must either both
7081 be zero-extended or both be sign-extended. */
7082 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
7084 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))))))
7086 enum machine_mode innermode
7087 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)));
7088 optab other_optab
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7089 ? smul_widen_optab
: umul_widen_optab
);
7090 this_optab
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
7091 ? umul_widen_optab
: smul_widen_optab
);
7092 if (mode
== GET_MODE_WIDER_MODE (innermode
))
7094 if (this_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
7096 op0
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
7097 NULL_RTX
, VOIDmode
, 0);
7098 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
7099 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
7102 op1
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
7103 NULL_RTX
, VOIDmode
, 0);
7106 else if (other_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
7107 && innermode
== word_mode
)
7110 op0
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
7111 NULL_RTX
, VOIDmode
, 0);
7112 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
7113 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
7116 op1
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
7117 NULL_RTX
, VOIDmode
, 0);
7118 temp
= expand_binop (mode
, other_optab
, op0
, op1
, target
,
7119 unsignedp
, OPTAB_LIB_WIDEN
);
7120 htem
= expand_mult_highpart_adjust (innermode
,
7121 gen_highpart (innermode
, temp
),
7123 gen_highpart (innermode
, temp
),
7125 emit_move_insn (gen_highpart (innermode
, temp
), htem
);
7130 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7131 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
7132 return expand_mult (mode
, op0
, op1
, target
, unsignedp
);
7134 case TRUNC_DIV_EXPR
:
7135 case FLOOR_DIV_EXPR
:
7137 case ROUND_DIV_EXPR
:
7138 case EXACT_DIV_EXPR
:
7139 preexpand_calls (exp
);
7140 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
7142 /* Possible optimization: compute the dividend with EXPAND_SUM
7143 then if the divisor is constant can optimize the case
7144 where some terms of the dividend have coeffs divisible by it. */
7145 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7146 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
7147 return expand_divmod (0, code
, mode
, op0
, op1
, target
, unsignedp
);
7150 this_optab
= flodiv_optab
;
7153 case TRUNC_MOD_EXPR
:
7154 case FLOOR_MOD_EXPR
:
7156 case ROUND_MOD_EXPR
:
7157 preexpand_calls (exp
);
7158 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
7160 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7161 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
7162 return expand_divmod (1, code
, mode
, op0
, op1
, target
, unsignedp
);
7164 case FIX_ROUND_EXPR
:
7165 case FIX_FLOOR_EXPR
:
7167 abort (); /* Not used for C. */
7169 case FIX_TRUNC_EXPR
:
7170 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
7172 target
= gen_reg_rtx (mode
);
7173 expand_fix (target
, op0
, unsignedp
);
7177 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
7179 target
= gen_reg_rtx (mode
);
7180 /* expand_float can't figure out what to do if FROM has VOIDmode.
7181 So give it the correct mode. With -O, cse will optimize this. */
7182 if (GET_MODE (op0
) == VOIDmode
)
7183 op0
= copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
7185 expand_float (target
, op0
,
7186 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7190 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7191 temp
= expand_unop (mode
, neg_optab
, op0
, target
, 0);
7197 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7199 /* Handle complex values specially. */
7200 if (GET_MODE_CLASS (mode
) == MODE_COMPLEX_INT
7201 || GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
)
7202 return expand_complex_abs (mode
, op0
, target
, unsignedp
);
7204 /* Unsigned abs is simply the operand. Testing here means we don't
7205 risk generating incorrect code below. */
7206 if (TREE_UNSIGNED (type
))
7209 return expand_abs (mode
, op0
, target
,
7210 safe_from_p (target
, TREE_OPERAND (exp
, 0), 1));
7214 target
= original_target
;
7215 if (target
== 0 || ! safe_from_p (target
, TREE_OPERAND (exp
, 1), 1)
7216 || (GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
))
7217 || GET_MODE (target
) != mode
7218 || (GET_CODE (target
) == REG
7219 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
7220 target
= gen_reg_rtx (mode
);
7221 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
7222 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, 0);
7224 /* First try to do it with a special MIN or MAX instruction.
7225 If that does not win, use a conditional jump to select the proper
7227 this_optab
= (TREE_UNSIGNED (type
)
7228 ? (code
== MIN_EXPR
? umin_optab
: umax_optab
)
7229 : (code
== MIN_EXPR
? smin_optab
: smax_optab
));
7231 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
, unsignedp
,
7236 /* At this point, a MEM target is no longer useful; we will get better
7239 if (GET_CODE (target
) == MEM
)
7240 target
= gen_reg_rtx (mode
);
7243 emit_move_insn (target
, op0
);
7245 op0
= gen_label_rtx ();
7247 /* If this mode is an integer too wide to compare properly,
7248 compare word by word. Rely on cse to optimize constant cases. */
7249 if (GET_MODE_CLASS (mode
) == MODE_INT
&& !can_compare_p (mode
))
7251 if (code
== MAX_EXPR
)
7252 do_jump_by_parts_greater_rtx (mode
, TREE_UNSIGNED (type
),
7253 target
, op1
, NULL_RTX
, op0
);
7255 do_jump_by_parts_greater_rtx (mode
, TREE_UNSIGNED (type
),
7256 op1
, target
, NULL_RTX
, op0
);
7257 emit_move_insn (target
, op1
);
7261 if (code
== MAX_EXPR
)
7262 temp
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 1)))
7263 ? compare_from_rtx (target
, op1
, GEU
, 1, mode
, NULL_RTX
, 0)
7264 : compare_from_rtx (target
, op1
, GE
, 0, mode
, NULL_RTX
, 0));
7266 temp
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 1)))
7267 ? compare_from_rtx (target
, op1
, LEU
, 1, mode
, NULL_RTX
, 0)
7268 : compare_from_rtx (target
, op1
, LE
, 0, mode
, NULL_RTX
, 0));
7269 if (temp
== const0_rtx
)
7270 emit_move_insn (target
, op1
);
7271 else if (temp
!= const_true_rtx
)
7273 if (bcc_gen_fctn
[(int) GET_CODE (temp
)] != 0)
7274 emit_jump_insn ((*bcc_gen_fctn
[(int) GET_CODE (temp
)]) (op0
));
7277 emit_move_insn (target
, op1
);
7284 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7285 temp
= expand_unop (mode
, one_cmpl_optab
, op0
, target
, 1);
7291 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7292 temp
= expand_unop (mode
, ffs_optab
, op0
, target
, 1);
7297 /* ??? Can optimize bitwise operations with one arg constant.
7298 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7299 and (a bitwise1 b) bitwise2 b (etc)
7300 but that is probably not worth while. */
7302 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7303 boolean values when we want in all cases to compute both of them. In
7304 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7305 as actual zero-or-1 values and then bitwise anding. In cases where
7306 there cannot be any side effects, better code would be made by
7307 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7308 how to recognize those cases. */
7310 case TRUTH_AND_EXPR
:
7312 this_optab
= and_optab
;
7317 this_optab
= ior_optab
;
7320 case TRUTH_XOR_EXPR
:
7322 this_optab
= xor_optab
;
7329 preexpand_calls (exp
);
7330 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
7332 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7333 return expand_shift (code
, mode
, op0
, TREE_OPERAND (exp
, 1), target
,
7336 /* Could determine the answer when only additive constants differ. Also,
7337 the addition of one can be handled by changing the condition. */
7344 preexpand_calls (exp
);
7345 temp
= do_store_flag (exp
, target
, tmode
!= VOIDmode
? tmode
: mode
, 0);
7349 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7350 if (code
== NE_EXPR
&& integer_zerop (TREE_OPERAND (exp
, 1))
7352 && GET_CODE (original_target
) == REG
7353 && (GET_MODE (original_target
)
7354 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
7356 temp
= expand_expr (TREE_OPERAND (exp
, 0), original_target
,
7359 if (temp
!= original_target
)
7360 temp
= copy_to_reg (temp
);
7362 op1
= gen_label_rtx ();
7363 emit_cmp_and_jump_insns (temp
, const0_rtx
, EQ
, NULL_RTX
,
7364 GET_MODE (temp
), unsignedp
, 0, op1
);
7365 emit_move_insn (temp
, const1_rtx
);
7370 /* If no set-flag instruction, must generate a conditional
7371 store into a temporary variable. Drop through
7372 and handle this like && and ||. */
7374 case TRUTH_ANDIF_EXPR
:
7375 case TRUTH_ORIF_EXPR
:
7377 && (target
== 0 || ! safe_from_p (target
, exp
, 1)
7378 /* Make sure we don't have a hard reg (such as function's return
7379 value) live across basic blocks, if not optimizing. */
7380 || (!optimize
&& GET_CODE (target
) == REG
7381 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)))
7382 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
7385 emit_clr_insn (target
);
7387 op1
= gen_label_rtx ();
7388 jumpifnot (exp
, op1
);
7391 emit_0_to_1_insn (target
);
7394 return ignore
? const0_rtx
: target
;
7396 case TRUTH_NOT_EXPR
:
7397 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, 0);
7398 /* The parser is careful to generate TRUTH_NOT_EXPR
7399 only with operands that are always zero or one. */
7400 temp
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
,
7401 target
, 1, OPTAB_LIB_WIDEN
);
7407 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
7409 return expand_expr (TREE_OPERAND (exp
, 1),
7410 (ignore
? const0_rtx
: target
),
7414 /* If we would have a "singleton" (see below) were it not for a
7415 conversion in each arm, bring that conversion back out. */
7416 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
7417 && TREE_CODE (TREE_OPERAND (exp
, 2)) == NOP_EXPR
7418 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0))
7419 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 2), 0))))
7421 tree
true = TREE_OPERAND (TREE_OPERAND (exp
, 1), 0);
7422 tree
false = TREE_OPERAND (TREE_OPERAND (exp
, 2), 0);
7424 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
7425 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7426 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
7427 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
7428 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
7429 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7430 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
7431 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
7432 return expand_expr (build1 (NOP_EXPR
, type
,
7433 build (COND_EXPR
, TREE_TYPE (true),
7434 TREE_OPERAND (exp
, 0),
7436 target
, tmode
, modifier
);
7440 /* Note that COND_EXPRs whose type is a structure or union
7441 are required to be constructed to contain assignments of
7442 a temporary variable, so that we can evaluate them here
7443 for side effect only. If type is void, we must do likewise. */
7445 /* If an arm of the branch requires a cleanup,
7446 only that cleanup is performed. */
7449 tree binary_op
= 0, unary_op
= 0;
7451 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
7452 convert it to our mode, if necessary. */
7453 if (integer_onep (TREE_OPERAND (exp
, 1))
7454 && integer_zerop (TREE_OPERAND (exp
, 2))
7455 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<')
7459 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
7464 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, mode
, ro_modifier
);
7465 if (GET_MODE (op0
) == mode
)
7469 target
= gen_reg_rtx (mode
);
7470 convert_move (target
, op0
, unsignedp
);
7474 /* Check for X ? A + B : A. If we have this, we can copy A to the
7475 output and conditionally add B. Similarly for unary operations.
7476 Don't do this if X has side-effects because those side effects
7477 might affect A or B and the "?" operation is a sequence point in
7478 ANSI. (operand_equal_p tests for side effects.) */
7480 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 1))) == '2'
7481 && operand_equal_p (TREE_OPERAND (exp
, 2),
7482 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0), 0))
7483 singleton
= TREE_OPERAND (exp
, 2), binary_op
= TREE_OPERAND (exp
, 1);
7484 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 2))) == '2'
7485 && operand_equal_p (TREE_OPERAND (exp
, 1),
7486 TREE_OPERAND (TREE_OPERAND (exp
, 2), 0), 0))
7487 singleton
= TREE_OPERAND (exp
, 1), binary_op
= TREE_OPERAND (exp
, 2);
7488 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 1))) == '1'
7489 && operand_equal_p (TREE_OPERAND (exp
, 2),
7490 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0), 0))
7491 singleton
= TREE_OPERAND (exp
, 2), unary_op
= TREE_OPERAND (exp
, 1);
7492 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 2))) == '1'
7493 && operand_equal_p (TREE_OPERAND (exp
, 1),
7494 TREE_OPERAND (TREE_OPERAND (exp
, 2), 0), 0))
7495 singleton
= TREE_OPERAND (exp
, 1), unary_op
= TREE_OPERAND (exp
, 2);
7497 /* If we are not to produce a result, we have no target. Otherwise,
7498 if a target was specified use it; it will not be used as an
7499 intermediate target unless it is safe. If no target, use a
7504 else if (original_target
7505 && (safe_from_p (original_target
, TREE_OPERAND (exp
, 0), 1)
7506 || (singleton
&& GET_CODE (original_target
) == REG
7507 && REGNO (original_target
) >= FIRST_PSEUDO_REGISTER
7508 && original_target
== var_rtx (singleton
)))
7509 && GET_MODE (original_target
) == mode
7510 #ifdef HAVE_conditional_move
7511 && (! can_conditionally_move_p (mode
)
7512 || GET_CODE (original_target
) == REG
7513 || TREE_ADDRESSABLE (type
))
7515 && ! (GET_CODE (original_target
) == MEM
7516 && MEM_VOLATILE_P (original_target
)))
7517 temp
= original_target
;
7518 else if (TREE_ADDRESSABLE (type
))
7521 temp
= assign_temp (type
, 0, 0, 1);
7523 /* If we had X ? A + C : A, with C a constant power of 2, and we can
7524 do the test of X as a store-flag operation, do this as
7525 A + ((X != 0) << log C). Similarly for other simple binary
7526 operators. Only do for C == 1 if BRANCH_COST is low. */
7527 if (temp
&& singleton
&& binary_op
7528 && (TREE_CODE (binary_op
) == PLUS_EXPR
7529 || TREE_CODE (binary_op
) == MINUS_EXPR
7530 || TREE_CODE (binary_op
) == BIT_IOR_EXPR
7531 || TREE_CODE (binary_op
) == BIT_XOR_EXPR
)
7532 && (BRANCH_COST
>= 3 ? integer_pow2p (TREE_OPERAND (binary_op
, 1))
7533 : integer_onep (TREE_OPERAND (binary_op
, 1)))
7534 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<')
7537 optab boptab
= (TREE_CODE (binary_op
) == PLUS_EXPR
? add_optab
7538 : TREE_CODE (binary_op
) == MINUS_EXPR
? sub_optab
7539 : TREE_CODE (binary_op
) == BIT_IOR_EXPR
? ior_optab
7542 /* If we had X ? A : A + 1, do this as A + (X == 0).
7544 We have to invert the truth value here and then put it
7545 back later if do_store_flag fails. We cannot simply copy
7546 TREE_OPERAND (exp, 0) to another variable and modify that
7547 because invert_truthvalue can modify the tree pointed to
7549 if (singleton
== TREE_OPERAND (exp
, 1))
7550 TREE_OPERAND (exp
, 0)
7551 = invert_truthvalue (TREE_OPERAND (exp
, 0));
7553 result
= do_store_flag (TREE_OPERAND (exp
, 0),
7554 (safe_from_p (temp
, singleton
, 1)
7556 mode
, BRANCH_COST
<= 1);
7558 if (result
!= 0 && ! integer_onep (TREE_OPERAND (binary_op
, 1)))
7559 result
= expand_shift (LSHIFT_EXPR
, mode
, result
,
7560 build_int_2 (tree_log2
7564 (safe_from_p (temp
, singleton
, 1)
7565 ? temp
: NULL_RTX
), 0);
7569 op1
= expand_expr (singleton
, NULL_RTX
, VOIDmode
, 0);
7570 return expand_binop (mode
, boptab
, op1
, result
, temp
,
7571 unsignedp
, OPTAB_LIB_WIDEN
);
7573 else if (singleton
== TREE_OPERAND (exp
, 1))
7574 TREE_OPERAND (exp
, 0)
7575 = invert_truthvalue (TREE_OPERAND (exp
, 0));
7578 do_pending_stack_adjust ();
7580 op0
= gen_label_rtx ();
7582 if (singleton
&& ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0)))
7586 /* If the target conflicts with the other operand of the
7587 binary op, we can't use it. Also, we can't use the target
7588 if it is a hard register, because evaluating the condition
7589 might clobber it. */
7591 && ! safe_from_p (temp
, TREE_OPERAND (binary_op
, 1), 1))
7592 || (GET_CODE (temp
) == REG
7593 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
))
7594 temp
= gen_reg_rtx (mode
);
7595 store_expr (singleton
, temp
, 0);
7598 expand_expr (singleton
,
7599 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
7600 if (singleton
== TREE_OPERAND (exp
, 1))
7601 jumpif (TREE_OPERAND (exp
, 0), op0
);
7603 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
7605 start_cleanup_deferral ();
7606 if (binary_op
&& temp
== 0)
7607 /* Just touch the other operand. */
7608 expand_expr (TREE_OPERAND (binary_op
, 1),
7609 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
7611 store_expr (build (TREE_CODE (binary_op
), type
,
7612 make_tree (type
, temp
),
7613 TREE_OPERAND (binary_op
, 1)),
7616 store_expr (build1 (TREE_CODE (unary_op
), type
,
7617 make_tree (type
, temp
)),
7621 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
7622 comparison operator. If we have one of these cases, set the
7623 output to A, branch on A (cse will merge these two references),
7624 then set the output to FOO. */
7626 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<'
7627 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1))
7628 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
7629 TREE_OPERAND (exp
, 1), 0)
7630 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
7631 || TREE_CODE (TREE_OPERAND (exp
, 1)) == SAVE_EXPR
)
7632 && safe_from_p (temp
, TREE_OPERAND (exp
, 2), 1))
7634 if (GET_CODE (temp
) == REG
&& REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
7635 temp
= gen_reg_rtx (mode
);
7636 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
7637 jumpif (TREE_OPERAND (exp
, 0), op0
);
7639 start_cleanup_deferral ();
7640 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
7644 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<'
7645 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1))
7646 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
7647 TREE_OPERAND (exp
, 2), 0)
7648 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
7649 || TREE_CODE (TREE_OPERAND (exp
, 2)) == SAVE_EXPR
)
7650 && safe_from_p (temp
, TREE_OPERAND (exp
, 1), 1))
7652 if (GET_CODE (temp
) == REG
&& REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
7653 temp
= gen_reg_rtx (mode
);
7654 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
7655 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
7657 start_cleanup_deferral ();
7658 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
7663 op1
= gen_label_rtx ();
7664 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
7666 start_cleanup_deferral ();
7668 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
7670 expand_expr (TREE_OPERAND (exp
, 1),
7671 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
7672 end_cleanup_deferral ();
7674 emit_jump_insn (gen_jump (op1
));
7677 start_cleanup_deferral ();
7679 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
7681 expand_expr (TREE_OPERAND (exp
, 2),
7682 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
7685 end_cleanup_deferral ();
7696 /* Something needs to be initialized, but we didn't know
7697 where that thing was when building the tree. For example,
7698 it could be the return value of a function, or a parameter
7699 to a function which lays down in the stack, or a temporary
7700 variable which must be passed by reference.
7702 We guarantee that the expression will either be constructed
7703 or copied into our original target. */
7705 tree slot
= TREE_OPERAND (exp
, 0);
7706 tree cleanups
= NULL_TREE
;
7709 if (TREE_CODE (slot
) != VAR_DECL
)
7713 target
= original_target
;
7717 if (DECL_RTL (slot
) != 0)
7719 target
= DECL_RTL (slot
);
7720 /* If we have already expanded the slot, so don't do
7722 if (TREE_OPERAND (exp
, 1) == NULL_TREE
)
7727 target
= assign_temp (type
, 2, 0, 1);
7728 /* All temp slots at this level must not conflict. */
7729 preserve_temp_slots (target
);
7730 DECL_RTL (slot
) = target
;
7731 if (TREE_ADDRESSABLE (slot
))
7733 TREE_ADDRESSABLE (slot
) = 0;
7734 mark_addressable (slot
);
7737 /* Since SLOT is not known to the called function
7738 to belong to its stack frame, we must build an explicit
7739 cleanup. This case occurs when we must build up a reference
7740 to pass the reference as an argument. In this case,
7741 it is very likely that such a reference need not be
7744 if (TREE_OPERAND (exp
, 2) == 0)
7745 TREE_OPERAND (exp
, 2) = maybe_build_cleanup (slot
);
7746 cleanups
= TREE_OPERAND (exp
, 2);
7751 /* This case does occur, when expanding a parameter which
7752 needs to be constructed on the stack. The target
7753 is the actual stack address that we want to initialize.
7754 The function we call will perform the cleanup in this case. */
7756 /* If we have already assigned it space, use that space,
7757 not target that we were passed in, as our target
7758 parameter is only a hint. */
7759 if (DECL_RTL (slot
) != 0)
7761 target
= DECL_RTL (slot
);
7762 /* If we have already expanded the slot, so don't do
7764 if (TREE_OPERAND (exp
, 1) == NULL_TREE
)
7769 DECL_RTL (slot
) = target
;
7770 /* If we must have an addressable slot, then make sure that
7771 the RTL that we just stored in slot is OK. */
7772 if (TREE_ADDRESSABLE (slot
))
7774 TREE_ADDRESSABLE (slot
) = 0;
7775 mark_addressable (slot
);
7780 exp1
= TREE_OPERAND (exp
, 3) = TREE_OPERAND (exp
, 1);
7781 /* Mark it as expanded. */
7782 TREE_OPERAND (exp
, 1) = NULL_TREE
;
7784 TREE_USED (slot
) = 1;
7785 store_expr (exp1
, target
, 0);
7787 expand_decl_cleanup (NULL_TREE
, cleanups
);
7794 tree lhs
= TREE_OPERAND (exp
, 0);
7795 tree rhs
= TREE_OPERAND (exp
, 1);
7796 tree noncopied_parts
= 0;
7797 tree lhs_type
= TREE_TYPE (lhs
);
7799 temp
= expand_assignment (lhs
, rhs
, ! ignore
, original_target
!= 0);
7800 if (TYPE_NONCOPIED_PARTS (lhs_type
) != 0 && !fixed_type_p (rhs
))
7801 noncopied_parts
= init_noncopied_parts (stabilize_reference (lhs
),
7802 TYPE_NONCOPIED_PARTS (lhs_type
));
7803 while (noncopied_parts
!= 0)
7805 expand_assignment (TREE_VALUE (noncopied_parts
),
7806 TREE_PURPOSE (noncopied_parts
), 0, 0);
7807 noncopied_parts
= TREE_CHAIN (noncopied_parts
);
7814 /* If lhs is complex, expand calls in rhs before computing it.
7815 That's so we don't compute a pointer and save it over a call.
7816 If lhs is simple, compute it first so we can give it as a
7817 target if the rhs is just a call. This avoids an extra temp and copy
7818 and that prevents a partial-subsumption which makes bad code.
7819 Actually we could treat component_ref's of vars like vars. */
7821 tree lhs
= TREE_OPERAND (exp
, 0);
7822 tree rhs
= TREE_OPERAND (exp
, 1);
7823 tree noncopied_parts
= 0;
7824 tree lhs_type
= TREE_TYPE (lhs
);
7828 if (TREE_CODE (lhs
) != VAR_DECL
7829 && TREE_CODE (lhs
) != RESULT_DECL
7830 && TREE_CODE (lhs
) != PARM_DECL
7831 && ! (TREE_CODE (lhs
) == INDIRECT_REF
7832 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs
, 0)))))
7833 preexpand_calls (exp
);
7835 /* Check for |= or &= of a bitfield of size one into another bitfield
7836 of size 1. In this case, (unless we need the result of the
7837 assignment) we can do this more efficiently with a
7838 test followed by an assignment, if necessary.
7840 ??? At this point, we can't get a BIT_FIELD_REF here. But if
7841 things change so we do, this code should be enhanced to
7844 && TREE_CODE (lhs
) == COMPONENT_REF
7845 && (TREE_CODE (rhs
) == BIT_IOR_EXPR
7846 || TREE_CODE (rhs
) == BIT_AND_EXPR
)
7847 && TREE_OPERAND (rhs
, 0) == lhs
7848 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == COMPONENT_REF
7849 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs
, 1))) == 1
7850 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs
, 1), 1))) == 1)
7852 rtx label
= gen_label_rtx ();
7854 do_jump (TREE_OPERAND (rhs
, 1),
7855 TREE_CODE (rhs
) == BIT_IOR_EXPR
? label
: 0,
7856 TREE_CODE (rhs
) == BIT_AND_EXPR
? label
: 0);
7857 expand_assignment (lhs
, convert (TREE_TYPE (rhs
),
7858 (TREE_CODE (rhs
) == BIT_IOR_EXPR
7860 : integer_zero_node
)),
7862 do_pending_stack_adjust ();
7867 if (TYPE_NONCOPIED_PARTS (lhs_type
) != 0
7868 && ! (fixed_type_p (lhs
) && fixed_type_p (rhs
)))
7869 noncopied_parts
= save_noncopied_parts (stabilize_reference (lhs
),
7870 TYPE_NONCOPIED_PARTS (lhs_type
));
7872 temp
= expand_assignment (lhs
, rhs
, ! ignore
, original_target
!= 0);
7873 while (noncopied_parts
!= 0)
7875 expand_assignment (TREE_PURPOSE (noncopied_parts
),
7876 TREE_VALUE (noncopied_parts
), 0, 0);
7877 noncopied_parts
= TREE_CHAIN (noncopied_parts
);
7883 if (!TREE_OPERAND (exp
, 0))
7884 expand_null_return ();
7886 expand_return (TREE_OPERAND (exp
, 0));
7889 case PREINCREMENT_EXPR
:
7890 case PREDECREMENT_EXPR
:
7891 return expand_increment (exp
, 0, ignore
);
7893 case POSTINCREMENT_EXPR
:
7894 case POSTDECREMENT_EXPR
:
7895 /* Faster to treat as pre-increment if result is not used. */
7896 return expand_increment (exp
, ! ignore
, ignore
);
7899 /* If nonzero, TEMP will be set to the address of something that might
7900 be a MEM corresponding to a stack slot. */
7903 /* Are we taking the address of a nested function? */
7904 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == FUNCTION_DECL
7905 && decl_function_context (TREE_OPERAND (exp
, 0)) != 0
7906 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp
, 0))
7907 && ! TREE_STATIC (exp
))
7909 op0
= trampoline_address (TREE_OPERAND (exp
, 0));
7910 op0
= force_operand (op0
, target
);
7912 /* If we are taking the address of something erroneous, just
7914 else if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ERROR_MARK
)
7918 /* We make sure to pass const0_rtx down if we came in with
7919 ignore set, to avoid doing the cleanups twice for something. */
7920 op0
= expand_expr (TREE_OPERAND (exp
, 0),
7921 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
,
7922 (modifier
== EXPAND_INITIALIZER
7923 ? modifier
: EXPAND_CONST_ADDRESS
));
7925 /* If we are going to ignore the result, OP0 will have been set
7926 to const0_rtx, so just return it. Don't get confused and
7927 think we are taking the address of the constant. */
7931 op0
= protect_from_queue (op0
, 0);
7933 /* We would like the object in memory. If it is a constant,
7934 we can have it be statically allocated into memory. For
7935 a non-constant (REG, SUBREG or CONCAT), we need to allocate some
7936 memory and store the value into it. */
7938 if (CONSTANT_P (op0
))
7939 op0
= force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
7941 else if (GET_CODE (op0
) == MEM
)
7943 mark_temp_addr_taken (op0
);
7944 temp
= XEXP (op0
, 0);
7947 else if (GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
7948 || GET_CODE (op0
) == CONCAT
|| GET_CODE (op0
) == ADDRESSOF
)
7950 /* If this object is in a register, it must be not
7952 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7953 rtx memloc
= assign_temp (inner_type
, 1, 1, 1);
7955 mark_temp_addr_taken (memloc
);
7956 emit_move_insn (memloc
, op0
);
7960 if (GET_CODE (op0
) != MEM
)
7963 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7965 temp
= XEXP (op0
, 0);
7966 #ifdef POINTERS_EXTEND_UNSIGNED
7967 if (GET_MODE (temp
) == Pmode
&& GET_MODE (temp
) != mode
7968 && mode
== ptr_mode
)
7969 temp
= convert_memory_address (ptr_mode
, temp
);
7974 op0
= force_operand (XEXP (op0
, 0), target
);
7977 if (flag_force_addr
&& GET_CODE (op0
) != REG
)
7978 op0
= force_reg (Pmode
, op0
);
7980 if (GET_CODE (op0
) == REG
7981 && ! REG_USERVAR_P (op0
))
7982 mark_reg_pointer (op0
, TYPE_ALIGN (TREE_TYPE (type
)) / BITS_PER_UNIT
);
7984 /* If we might have had a temp slot, add an equivalent address
7987 update_temp_slot_address (temp
, op0
);
7989 #ifdef POINTERS_EXTEND_UNSIGNED
7990 if (GET_MODE (op0
) == Pmode
&& GET_MODE (op0
) != mode
7991 && mode
== ptr_mode
)
7992 op0
= convert_memory_address (ptr_mode
, op0
);
7997 case ENTRY_VALUE_EXPR
:
8000 /* COMPLEX type for Extended Pascal & Fortran */
8003 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
8006 /* Get the rtx code of the operands. */
8007 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8008 op1
= expand_expr (TREE_OPERAND (exp
, 1), 0, VOIDmode
, 0);
8011 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
8015 /* Move the real (op0) and imaginary (op1) parts to their location. */
8016 emit_move_insn (gen_realpart (mode
, target
), op0
);
8017 emit_move_insn (gen_imagpart (mode
, target
), op1
);
8019 insns
= get_insns ();
8022 /* Complex construction should appear as a single unit. */
8023 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8024 each with a separate pseudo as destination.
8025 It's not correct for flow to treat them as a unit. */
8026 if (GET_CODE (target
) != CONCAT
)
8027 emit_no_conflict_block (insns
, target
, op0
, op1
, NULL_RTX
);
8035 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8036 return gen_realpart (mode
, op0
);
8039 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8040 return gen_imagpart (mode
, op0
);
8044 enum machine_mode partmode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
8048 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8051 target
= gen_reg_rtx (mode
);
8055 /* Store the realpart and the negated imagpart to target. */
8056 emit_move_insn (gen_realpart (partmode
, target
),
8057 gen_realpart (partmode
, op0
));
8059 imag_t
= gen_imagpart (partmode
, target
);
8060 temp
= expand_unop (partmode
, neg_optab
,
8061 gen_imagpart (partmode
, op0
), imag_t
, 0);
8063 emit_move_insn (imag_t
, temp
);
8065 insns
= get_insns ();
8068 /* Conjugate should appear as a single unit
8069 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8070 each with a separate pseudo as destination.
8071 It's not correct for flow to treat them as a unit. */
8072 if (GET_CODE (target
) != CONCAT
)
8073 emit_no_conflict_block (insns
, target
, op0
, NULL_RTX
, NULL_RTX
);
8080 case TRY_CATCH_EXPR
:
8082 tree handler
= TREE_OPERAND (exp
, 1);
8084 expand_eh_region_start ();
8086 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
8088 expand_eh_region_end (handler
);
8093 case TRY_FINALLY_EXPR
:
8095 tree try_block
= TREE_OPERAND (exp
, 0);
8096 tree finally_block
= TREE_OPERAND (exp
, 1);
8097 rtx finally_label
= gen_label_rtx ();
8098 rtx done_label
= gen_label_rtx ();
8099 rtx return_link
= gen_reg_rtx (Pmode
);
8100 tree cleanup
= build (GOTO_SUBROUTINE_EXPR
, void_type_node
,
8101 (tree
) finally_label
, (tree
) return_link
);
8102 TREE_SIDE_EFFECTS (cleanup
) = 1;
8104 /* Start a new binding layer that will keep track of all cleanup
8105 actions to be performed. */
8106 expand_start_bindings (0);
8108 target_temp_slot_level
= temp_slot_level
;
8110 expand_decl_cleanup (NULL_TREE
, cleanup
);
8111 op0
= expand_expr (try_block
, target
, tmode
, modifier
);
8113 preserve_temp_slots (op0
);
8114 expand_end_bindings (NULL_TREE
, 0, 0);
8115 emit_jump (done_label
);
8116 emit_label (finally_label
);
8117 expand_expr (finally_block
, const0_rtx
, VOIDmode
, 0);
8118 emit_indirect_jump (return_link
);
8119 emit_label (done_label
);
8123 case GOTO_SUBROUTINE_EXPR
:
8125 rtx subr
= (rtx
) TREE_OPERAND (exp
, 0);
8126 rtx return_link
= *(rtx
*) &TREE_OPERAND (exp
, 1);
8127 rtx return_address
= gen_label_rtx ();
8128 emit_move_insn (return_link
, gen_rtx_LABEL_REF (Pmode
, return_address
));
8130 emit_label (return_address
);
8136 rtx dcc
= get_dynamic_cleanup_chain ();
8137 emit_move_insn (dcc
, validize_mem (gen_rtx_MEM (Pmode
, dcc
)));
8143 rtx dhc
= get_dynamic_handler_chain ();
8144 emit_move_insn (dhc
, validize_mem (gen_rtx_MEM (Pmode
, dhc
)));
8149 return (*lang_expand_expr
) (exp
, original_target
, tmode
, modifier
);
8152 /* Here to do an ordinary binary operator, generating an instruction
8153 from the optab already placed in `this_optab'. */
8155 preexpand_calls (exp
);
8156 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1), 1))
8158 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
8159 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
8161 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
,
8162 unsignedp
, OPTAB_LIB_WIDEN
);
8170 /* Return the alignment in bits of EXP, a pointer valued expression.
8171 But don't return more than MAX_ALIGN no matter what.
8172 The alignment returned is, by default, the alignment of the thing that
8173 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
8175 Otherwise, look at the expression to see if we can do better, i.e., if the
8176 expression is actually pointing at an object whose alignment is tighter. */
8179 get_pointer_alignment (exp
, max_align
)
8183 unsigned align
, inner
;
8185 if (TREE_CODE (TREE_TYPE (exp
)) != POINTER_TYPE
)
8188 align
= TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp
)));
8189 align
= MIN (align
, max_align
);
8193 switch (TREE_CODE (exp
))
8197 case NON_LVALUE_EXPR
:
8198 exp
= TREE_OPERAND (exp
, 0);
8199 if (TREE_CODE (TREE_TYPE (exp
)) != POINTER_TYPE
)
8201 inner
= TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp
)));
8202 align
= MIN (inner
, max_align
);
8206 /* If sum of pointer + int, restrict our maximum alignment to that
8207 imposed by the integer. If not, we can't do any better than
8209 if (TREE_CODE (TREE_OPERAND (exp
, 1)) != INTEGER_CST
)
8212 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)) * BITS_PER_UNIT
)
8217 exp
= TREE_OPERAND (exp
, 0);
8221 /* See what we are pointing at and look at its alignment. */
8222 exp
= TREE_OPERAND (exp
, 0);
8223 if (TREE_CODE (exp
) == FUNCTION_DECL
)
8224 align
= FUNCTION_BOUNDARY
;
8225 else if (TREE_CODE_CLASS (TREE_CODE (exp
)) == 'd')
8226 align
= DECL_ALIGN (exp
);
8227 #ifdef CONSTANT_ALIGNMENT
8228 else if (TREE_CODE_CLASS (TREE_CODE (exp
)) == 'c')
8229 align
= CONSTANT_ALIGNMENT (exp
, align
);
8231 return MIN (align
, max_align
);
8239 /* Return the tree node and offset if a given argument corresponds to
8240 a string constant. */
8243 string_constant (arg
, ptr_offset
)
8249 if (TREE_CODE (arg
) == ADDR_EXPR
8250 && TREE_CODE (TREE_OPERAND (arg
, 0)) == STRING_CST
)
8252 *ptr_offset
= integer_zero_node
;
8253 return TREE_OPERAND (arg
, 0);
8255 else if (TREE_CODE (arg
) == PLUS_EXPR
)
8257 tree arg0
= TREE_OPERAND (arg
, 0);
8258 tree arg1
= TREE_OPERAND (arg
, 1);
8263 if (TREE_CODE (arg0
) == ADDR_EXPR
8264 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == STRING_CST
)
8267 return TREE_OPERAND (arg0
, 0);
8269 else if (TREE_CODE (arg1
) == ADDR_EXPR
8270 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == STRING_CST
)
8273 return TREE_OPERAND (arg1
, 0);
8280 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
8281 way, because it could contain a zero byte in the middle.
8282 TREE_STRING_LENGTH is the size of the character array, not the string.
8284 Unfortunately, string_constant can't access the values of const char
8285 arrays with initializers, so neither can we do so here. */
8295 src
= string_constant (src
, &offset_node
);
8298 max
= TREE_STRING_LENGTH (src
);
8299 ptr
= TREE_STRING_POINTER (src
);
8300 if (offset_node
&& TREE_CODE (offset_node
) != INTEGER_CST
)
8302 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
8303 compute the offset to the following null if we don't know where to
8304 start searching for it. */
8306 for (i
= 0; i
< max
; i
++)
8309 /* We don't know the starting offset, but we do know that the string
8310 has no internal zero bytes. We can assume that the offset falls
8311 within the bounds of the string; otherwise, the programmer deserves
8312 what he gets. Subtract the offset from the length of the string,
8314 /* This would perhaps not be valid if we were dealing with named
8315 arrays in addition to literal string constants. */
8316 return size_binop (MINUS_EXPR
, size_int (max
), offset_node
);
8319 /* We have a known offset into the string. Start searching there for
8320 a null character. */
8321 if (offset_node
== 0)
8325 /* Did we get a long long offset? If so, punt. */
8326 if (TREE_INT_CST_HIGH (offset_node
) != 0)
8328 offset
= TREE_INT_CST_LOW (offset_node
);
8330 /* If the offset is known to be out of bounds, warn, and call strlen at
8332 if (offset
< 0 || offset
> max
)
8334 warning ("offset outside bounds of constant string");
8337 /* Use strlen to search for the first zero byte. Since any strings
8338 constructed with build_string will have nulls appended, we win even
8339 if we get handed something like (char[4])"abcd".
8341 Since OFFSET is our starting index into the string, no further
8342 calculation is needed. */
8343 return size_int (strlen (ptr
+ offset
));
8347 expand_builtin_return_addr (fndecl_code
, count
, tem
)
8348 enum built_in_function fndecl_code
;
8354 /* Some machines need special handling before we can access
8355 arbitrary frames. For example, on the sparc, we must first flush
8356 all register windows to the stack. */
8357 #ifdef SETUP_FRAME_ADDRESSES
8359 SETUP_FRAME_ADDRESSES ();
8362 /* On the sparc, the return address is not in the frame, it is in a
8363 register. There is no way to access it off of the current frame
8364 pointer, but it can be accessed off the previous frame pointer by
8365 reading the value from the register window save area. */
8366 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
8367 if (fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
8371 /* Scan back COUNT frames to the specified frame. */
8372 for (i
= 0; i
< count
; i
++)
8374 /* Assume the dynamic chain pointer is in the word that the
8375 frame address points to, unless otherwise specified. */
8376 #ifdef DYNAMIC_CHAIN_ADDRESS
8377 tem
= DYNAMIC_CHAIN_ADDRESS (tem
);
8379 tem
= memory_address (Pmode
, tem
);
8380 tem
= copy_to_reg (gen_rtx_MEM (Pmode
, tem
));
8383 /* For __builtin_frame_address, return what we've got. */
8384 if (fndecl_code
== BUILT_IN_FRAME_ADDRESS
)
8387 /* For __builtin_return_address, Get the return address from that
8389 #ifdef RETURN_ADDR_RTX
8390 tem
= RETURN_ADDR_RTX (count
, tem
);
8392 tem
= memory_address (Pmode
,
8393 plus_constant (tem
, GET_MODE_SIZE (Pmode
)));
8394 tem
= gen_rtx_MEM (Pmode
, tem
);
8399 /* __builtin_setjmp is passed a pointer to an array of five words (not
8400 all will be used on all machines). It operates similarly to the C
8401 library function of the same name, but is more efficient. Much of
8402 the code below (and for longjmp) is copied from the handling of
8405 NOTE: This is intended for use by GNAT and the exception handling
8406 scheme in the compiler and will only work in the method used by
8410 expand_builtin_setjmp (buf_addr
, target
, first_label
, next_label
)
8413 rtx first_label
, next_label
;
8415 rtx lab1
= gen_label_rtx ();
8416 enum machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
8417 enum machine_mode value_mode
;
8420 value_mode
= TYPE_MODE (integer_type_node
);
8422 #ifdef POINTERS_EXTEND_UNSIGNED
8423 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
8426 buf_addr
= force_reg (Pmode
, buf_addr
);
8428 if (target
== 0 || GET_CODE (target
) != REG
8429 || REGNO (target
) < FIRST_PSEUDO_REGISTER
)
8430 target
= gen_reg_rtx (value_mode
);
8434 /* We store the frame pointer and the address of lab1 in the buffer
8435 and use the rest of it for the stack save area, which is
8436 machine-dependent. */
8438 #ifndef BUILTIN_SETJMP_FRAME_VALUE
8439 #define BUILTIN_SETJMP_FRAME_VALUE virtual_stack_vars_rtx
8442 emit_move_insn (gen_rtx_MEM (Pmode
, buf_addr
),
8443 BUILTIN_SETJMP_FRAME_VALUE
);
8444 emit_move_insn (validize_mem
8445 (gen_rtx_MEM (Pmode
,
8446 plus_constant (buf_addr
,
8447 GET_MODE_SIZE (Pmode
)))),
8448 force_reg (Pmode
, gen_rtx_LABEL_REF (Pmode
, lab1
)));
8450 stack_save
= gen_rtx_MEM (sa_mode
,
8451 plus_constant (buf_addr
,
8452 2 * GET_MODE_SIZE (Pmode
)));
8453 emit_stack_save (SAVE_NONLOCAL
, &stack_save
, NULL_RTX
);
8455 /* If there is further processing to do, do it. */
8456 #ifdef HAVE_builtin_setjmp_setup
8457 if (HAVE_builtin_setjmp_setup
)
8458 emit_insn (gen_builtin_setjmp_setup (buf_addr
));
8461 /* Set TARGET to zero and branch to the first-time-through label. */
8462 emit_move_insn (target
, const0_rtx
);
8463 emit_jump_insn (gen_jump (first_label
));
8467 /* Tell flow about the strange goings on. Putting `lab1' on
8468 `nonlocal_goto_handler_labels' to indicates that function
8469 calls may traverse the arc back to this label. */
8471 current_function_has_nonlocal_label
= 1;
8472 nonlocal_goto_handler_labels
=
8473 gen_rtx_EXPR_LIST (VOIDmode
, lab1
, nonlocal_goto_handler_labels
);
8475 /* Clobber the FP when we get here, so we have to make sure it's
8476 marked as used by this function. */
8477 emit_insn (gen_rtx_USE (VOIDmode
, hard_frame_pointer_rtx
));
8479 /* Mark the static chain as clobbered here so life information
8480 doesn't get messed up for it. */
8481 emit_insn (gen_rtx_CLOBBER (VOIDmode
, static_chain_rtx
));
8483 /* Now put in the code to restore the frame pointer, and argument
8484 pointer, if needed. The code below is from expand_end_bindings
8485 in stmt.c; see detailed documentation there. */
8486 #ifdef HAVE_nonlocal_goto
8487 if (! HAVE_nonlocal_goto
)
8489 emit_move_insn (virtual_stack_vars_rtx
, hard_frame_pointer_rtx
);
8491 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
8492 if (fixed_regs
[ARG_POINTER_REGNUM
])
8494 #ifdef ELIMINABLE_REGS
8496 static struct elims
{int from
, to
;} elim_regs
[] = ELIMINABLE_REGS
;
8498 for (i
= 0; i
< sizeof elim_regs
/ sizeof elim_regs
[0]; i
++)
8499 if (elim_regs
[i
].from
== ARG_POINTER_REGNUM
8500 && elim_regs
[i
].to
== HARD_FRAME_POINTER_REGNUM
)
8503 if (i
== sizeof elim_regs
/ sizeof elim_regs
[0])
8506 /* Now restore our arg pointer from the address at which it
8507 was saved in our stack frame.
8508 If there hasn't be space allocated for it yet, make
8510 if (arg_pointer_save_area
== 0)
8511 arg_pointer_save_area
8512 = assign_stack_local (Pmode
, GET_MODE_SIZE (Pmode
), 0);
8513 emit_move_insn (virtual_incoming_args_rtx
,
8514 copy_to_reg (arg_pointer_save_area
));
8519 #ifdef HAVE_builtin_setjmp_receiver
8520 if (HAVE_builtin_setjmp_receiver
)
8521 emit_insn (gen_builtin_setjmp_receiver (lab1
));
8524 #ifdef HAVE_nonlocal_goto_receiver
8525 if (HAVE_nonlocal_goto_receiver
)
8526 emit_insn (gen_nonlocal_goto_receiver ());
8533 /* Set TARGET, and branch to the next-time-through label. */
8534 emit_move_insn (target
, const1_rtx
);
8535 emit_jump_insn (gen_jump (next_label
));
8542 expand_builtin_longjmp (buf_addr
, value
)
8543 rtx buf_addr
, value
;
8546 enum machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
8548 #ifdef POINTERS_EXTEND_UNSIGNED
8549 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
8551 buf_addr
= force_reg (Pmode
, buf_addr
);
8553 /* We used to store value in static_chain_rtx, but that fails if pointers
8554 are smaller than integers. We instead require that the user must pass
8555 a second argument of 1, because that is what builtin_setjmp will
8556 return. This also makes EH slightly more efficient, since we are no
8557 longer copying around a value that we don't care about. */
8558 if (value
!= const1_rtx
)
8561 #ifdef HAVE_builtin_longjmp
8562 if (HAVE_builtin_longjmp
)
8563 emit_insn (gen_builtin_longjmp (buf_addr
));
8567 fp
= gen_rtx_MEM (Pmode
, buf_addr
);
8568 lab
= gen_rtx_MEM (Pmode
, plus_constant (buf_addr
,
8569 GET_MODE_SIZE (Pmode
)));
8571 stack
= gen_rtx_MEM (sa_mode
, plus_constant (buf_addr
,
8572 2 * GET_MODE_SIZE (Pmode
)));
8574 /* Pick up FP, label, and SP from the block and jump. This code is
8575 from expand_goto in stmt.c; see there for detailed comments. */
8576 #if HAVE_nonlocal_goto
8577 if (HAVE_nonlocal_goto
)
8578 /* We have to pass a value to the nonlocal_goto pattern that will
8579 get copied into the static_chain pointer, but it does not matter
8580 what that value is, because builtin_setjmp does not use it. */
8581 emit_insn (gen_nonlocal_goto (value
, fp
, stack
, lab
));
8585 lab
= copy_to_reg (lab
);
8587 emit_move_insn (hard_frame_pointer_rtx
, fp
);
8588 emit_stack_restore (SAVE_NONLOCAL
, stack
, NULL_RTX
);
8590 emit_insn (gen_rtx_USE (VOIDmode
, hard_frame_pointer_rtx
));
8591 emit_insn (gen_rtx_USE (VOIDmode
, stack_pointer_rtx
));
8592 emit_indirect_jump (lab
);
8598 get_memory_rtx (exp
)
8604 mem
= gen_rtx_MEM (BLKmode
,
8605 memory_address (BLKmode
,
8606 expand_expr (exp
, NULL_RTX
,
8607 ptr_mode
, EXPAND_SUM
)));
8609 RTX_UNCHANGING_P (mem
) = TREE_READONLY (exp
);
8611 /* Figure out the type of the object pointed to. Set MEM_IN_STRUCT_P
8612 if the value is the address of a structure or if the expression is
8613 cast to a pointer to structure type. */
8616 while (TREE_CODE (exp
) == NOP_EXPR
)
8618 tree cast_type
= TREE_TYPE (exp
);
8619 if (TREE_CODE (cast_type
) == POINTER_TYPE
8620 && AGGREGATE_TYPE_P (TREE_TYPE (cast_type
)))
8625 exp
= TREE_OPERAND (exp
, 0);
8628 if (is_aggregate
== 0)
8632 if (TREE_CODE (exp
) == ADDR_EXPR
)
8633 /* If this is the address of an object, check whether the
8634 object is an array. */
8635 type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
8637 type
= TREE_TYPE (TREE_TYPE (exp
));
8638 is_aggregate
= AGGREGATE_TYPE_P (type
);
8641 MEM_SET_IN_STRUCT_P (mem
, is_aggregate
);
8646 /* Expand an expression EXP that calls a built-in function,
8647 with result going to TARGET if that's convenient
8648 (and in mode MODE if that's convenient).
8649 SUBTARGET may be used as the target for computing one of EXP's operands.
8650 IGNORE is nonzero if the value is to be ignored. */
8652 #define CALLED_AS_BUILT_IN(NODE) \
8653 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
8656 expand_builtin (exp
, target
, subtarget
, mode
, ignore
)
8660 enum machine_mode mode
;
8663 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
8664 tree arglist
= TREE_OPERAND (exp
, 1);
8667 enum machine_mode value_mode
= TYPE_MODE (TREE_TYPE (exp
));
8668 optab builtin_optab
;
8670 switch (DECL_FUNCTION_CODE (fndecl
))
8675 /* build_function_call changes these into ABS_EXPR. */
8680 /* Treat these like sqrt, but only if the user asks for them. */
8681 if (! flag_fast_math
)
8683 case BUILT_IN_FSQRT
:
8684 /* If not optimizing, call the library function. */
8689 /* Arg could be wrong type if user redeclared this fcn wrong. */
8690 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != REAL_TYPE
)
8693 /* Stabilize and compute the argument. */
8694 if (TREE_CODE (TREE_VALUE (arglist
)) != VAR_DECL
8695 && TREE_CODE (TREE_VALUE (arglist
)) != PARM_DECL
)
8697 exp
= copy_node (exp
);
8698 arglist
= copy_node (arglist
);
8699 TREE_OPERAND (exp
, 1) = arglist
;
8700 TREE_VALUE (arglist
) = save_expr (TREE_VALUE (arglist
));
8702 op0
= expand_expr (TREE_VALUE (arglist
), subtarget
, VOIDmode
, 0);
8704 /* Make a suitable register to place result in. */
8705 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
8710 switch (DECL_FUNCTION_CODE (fndecl
))
8713 builtin_optab
= sin_optab
; break;
8715 builtin_optab
= cos_optab
; break;
8716 case BUILT_IN_FSQRT
:
8717 builtin_optab
= sqrt_optab
; break;
8722 /* Compute into TARGET.
8723 Set TARGET to wherever the result comes back. */
8724 target
= expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist
))),
8725 builtin_optab
, op0
, target
, 0);
8727 /* If we were unable to expand via the builtin, stop the
8728 sequence (without outputting the insns) and break, causing
8729 a call to the library function. */
8736 /* Check the results by default. But if flag_fast_math is turned on,
8737 then assume sqrt will always be called with valid arguments. */
8739 if (! flag_fast_math
)
8741 /* Don't define the builtin FP instructions
8742 if your machine is not IEEE. */
8743 if (TARGET_FLOAT_FORMAT
!= IEEE_FLOAT_FORMAT
)
8746 lab1
= gen_label_rtx ();
8748 /* Test the result; if it is NaN, set errno=EDOM because
8749 the argument was not in the domain. */
8750 emit_cmp_and_jump_insns (target
, target
, EQ
, 0, GET_MODE (target
),
8755 #ifdef GEN_ERRNO_RTX
8756 rtx errno_rtx
= GEN_ERRNO_RTX
;
8759 = gen_rtx_MEM (word_mode
, gen_rtx_SYMBOL_REF (Pmode
, "errno"));
8762 emit_move_insn (errno_rtx
, GEN_INT (TARGET_EDOM
));
8765 /* We can't set errno=EDOM directly; let the library call do it.
8766 Pop the arguments right away in case the call gets deleted. */
8768 expand_call (exp
, target
, 0);
8775 /* Output the entire sequence. */
8776 insns
= get_insns ();
8785 /* __builtin_apply_args returns block of memory allocated on
8786 the stack into which is stored the arg pointer, structure
8787 value address, static chain, and all the registers that might
8788 possibly be used in performing a function call. The code is
8789 moved to the start of the function so the incoming values are
8791 case BUILT_IN_APPLY_ARGS
:
8792 /* Don't do __builtin_apply_args more than once in a function.
8793 Save the result of the first call and reuse it. */
8794 if (apply_args_value
!= 0)
8795 return apply_args_value
;
8797 /* When this function is called, it means that registers must be
8798 saved on entry to this function. So we migrate the
8799 call to the first insn of this function. */
8804 temp
= expand_builtin_apply_args ();
8808 apply_args_value
= temp
;
8810 /* Put the sequence after the NOTE that starts the function.
8811 If this is inside a SEQUENCE, make the outer-level insn
8812 chain current, so the code is placed at the start of the
8814 push_topmost_sequence ();
8815 emit_insns_before (seq
, NEXT_INSN (get_insns ()));
8816 pop_topmost_sequence ();
8820 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
8821 FUNCTION with a copy of the parameters described by
8822 ARGUMENTS, and ARGSIZE. It returns a block of memory
8823 allocated on the stack into which is stored all the registers
8824 that might possibly be used for returning the result of a
8825 function. ARGUMENTS is the value returned by
8826 __builtin_apply_args. ARGSIZE is the number of bytes of
8827 arguments that must be copied. ??? How should this value be
8828 computed? We'll also need a safe worst case value for varargs
8830 case BUILT_IN_APPLY
:
8832 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8833 || ! POINTER_TYPE_P (TREE_TYPE (TREE_VALUE (arglist
)))
8834 || TREE_CHAIN (arglist
) == 0
8835 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist
)))) != POINTER_TYPE
8836 || TREE_CHAIN (TREE_CHAIN (arglist
)) == 0
8837 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
))))) != INTEGER_TYPE
)
8845 for (t
= arglist
, i
= 0; t
; t
= TREE_CHAIN (t
), i
++)
8846 ops
[i
] = expand_expr (TREE_VALUE (t
), NULL_RTX
, VOIDmode
, 0);
8848 return expand_builtin_apply (ops
[0], ops
[1], ops
[2]);
8851 /* __builtin_return (RESULT) causes the function to return the
8852 value described by RESULT. RESULT is address of the block of
8853 memory returned by __builtin_apply. */
8854 case BUILT_IN_RETURN
:
8856 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8857 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) == POINTER_TYPE
)
8858 expand_builtin_return (expand_expr (TREE_VALUE (arglist
),
8859 NULL_RTX
, VOIDmode
, 0));
8862 case BUILT_IN_SAVEREGS
:
8863 /* Don't do __builtin_saveregs more than once in a function.
8864 Save the result of the first call and reuse it. */
8865 if (saveregs_value
!= 0)
8866 return saveregs_value
;
8868 /* When this function is called, it means that registers must be
8869 saved on entry to this function. So we migrate the
8870 call to the first insn of this function. */
8874 /* Now really call the function. `expand_call' does not call
8875 expand_builtin, so there is no danger of infinite recursion here. */
8878 #ifdef EXPAND_BUILTIN_SAVEREGS
8879 /* Do whatever the machine needs done in this case. */
8880 temp
= EXPAND_BUILTIN_SAVEREGS (arglist
);
8882 /* The register where the function returns its value
8883 is likely to have something else in it, such as an argument.
8884 So preserve that register around the call. */
8886 if (value_mode
!= VOIDmode
)
8888 rtx valreg
= hard_libcall_value (value_mode
);
8889 rtx saved_valreg
= gen_reg_rtx (value_mode
);
8891 emit_move_insn (saved_valreg
, valreg
);
8892 temp
= expand_call (exp
, target
, ignore
);
8893 emit_move_insn (valreg
, saved_valreg
);
8896 /* Generate the call, putting the value in a pseudo. */
8897 temp
= expand_call (exp
, target
, ignore
);
8903 saveregs_value
= temp
;
8905 /* Put the sequence after the NOTE that starts the function.
8906 If this is inside a SEQUENCE, make the outer-level insn
8907 chain current, so the code is placed at the start of the
8909 push_topmost_sequence ();
8910 emit_insns_before (seq
, NEXT_INSN (get_insns ()));
8911 pop_topmost_sequence ();
8915 /* __builtin_args_info (N) returns word N of the arg space info
8916 for the current function. The number and meanings of words
8917 is controlled by the definition of CUMULATIVE_ARGS. */
8918 case BUILT_IN_ARGS_INFO
:
8920 int nwords
= sizeof (CUMULATIVE_ARGS
) / sizeof (int);
8921 int *word_ptr
= (int *) ¤t_function_args_info
;
8923 /* These are used by the code below that is if 0'ed away */
8925 tree type
, elts
, result
;
8928 if (sizeof (CUMULATIVE_ARGS
) % sizeof (int) != 0)
8929 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
8930 __FILE__
, __LINE__
);
8934 tree arg
= TREE_VALUE (arglist
);
8935 if (TREE_CODE (arg
) != INTEGER_CST
)
8936 error ("argument of `__builtin_args_info' must be constant");
8939 int wordnum
= TREE_INT_CST_LOW (arg
);
8941 if (wordnum
< 0 || wordnum
>= nwords
|| TREE_INT_CST_HIGH (arg
))
8942 error ("argument of `__builtin_args_info' out of range");
8944 return GEN_INT (word_ptr
[wordnum
]);
8948 error ("missing argument in `__builtin_args_info'");
8953 for (i
= 0; i
< nwords
; i
++)
8954 elts
= tree_cons (NULL_TREE
, build_int_2 (word_ptr
[i
], 0));
8956 type
= build_array_type (integer_type_node
,
8957 build_index_type (build_int_2 (nwords
, 0)));
8958 result
= build (CONSTRUCTOR
, type
, NULL_TREE
, nreverse (elts
));
8959 TREE_CONSTANT (result
) = 1;
8960 TREE_STATIC (result
) = 1;
8961 result
= build (INDIRECT_REF
, build_pointer_type (type
), result
);
8962 TREE_CONSTANT (result
) = 1;
8963 return expand_expr (result
, NULL_RTX
, VOIDmode
, EXPAND_MEMORY_USE_BAD
);
8967 /* Return the address of the first anonymous stack arg. */
8968 case BUILT_IN_NEXT_ARG
:
8970 tree fntype
= TREE_TYPE (current_function_decl
);
8972 if ((TYPE_ARG_TYPES (fntype
) == 0
8973 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype
)))
8975 && ! current_function_varargs
)
8977 error ("`va_start' used in function with fixed args");
8983 tree last_parm
= tree_last (DECL_ARGUMENTS (current_function_decl
));
8984 tree arg
= TREE_VALUE (arglist
);
8986 /* Strip off all nops for the sake of the comparison. This
8987 is not quite the same as STRIP_NOPS. It does more.
8988 We must also strip off INDIRECT_EXPR for C++ reference
8990 while (TREE_CODE (arg
) == NOP_EXPR
8991 || TREE_CODE (arg
) == CONVERT_EXPR
8992 || TREE_CODE (arg
) == NON_LVALUE_EXPR
8993 || TREE_CODE (arg
) == INDIRECT_REF
)
8994 arg
= TREE_OPERAND (arg
, 0);
8995 if (arg
!= last_parm
)
8996 warning ("second parameter of `va_start' not last named argument");
8998 else if (! current_function_varargs
)
8999 /* Evidently an out of date version of <stdarg.h>; can't validate
9000 va_start's second argument, but can still work as intended. */
9001 warning ("`__builtin_next_arg' called without an argument");
9004 return expand_binop (Pmode
, add_optab
,
9005 current_function_internal_arg_pointer
,
9006 current_function_arg_offset_rtx
,
9007 NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
9009 case BUILT_IN_CLASSIFY_TYPE
:
9012 tree type
= TREE_TYPE (TREE_VALUE (arglist
));
9013 enum tree_code code
= TREE_CODE (type
);
9014 if (code
== VOID_TYPE
)
9015 return GEN_INT (void_type_class
);
9016 if (code
== INTEGER_TYPE
)
9017 return GEN_INT (integer_type_class
);
9018 if (code
== CHAR_TYPE
)
9019 return GEN_INT (char_type_class
);
9020 if (code
== ENUMERAL_TYPE
)
9021 return GEN_INT (enumeral_type_class
);
9022 if (code
== BOOLEAN_TYPE
)
9023 return GEN_INT (boolean_type_class
);
9024 if (code
== POINTER_TYPE
)
9025 return GEN_INT (pointer_type_class
);
9026 if (code
== REFERENCE_TYPE
)
9027 return GEN_INT (reference_type_class
);
9028 if (code
== OFFSET_TYPE
)
9029 return GEN_INT (offset_type_class
);
9030 if (code
== REAL_TYPE
)
9031 return GEN_INT (real_type_class
);
9032 if (code
== COMPLEX_TYPE
)
9033 return GEN_INT (complex_type_class
);
9034 if (code
== FUNCTION_TYPE
)
9035 return GEN_INT (function_type_class
);
9036 if (code
== METHOD_TYPE
)
9037 return GEN_INT (method_type_class
);
9038 if (code
== RECORD_TYPE
)
9039 return GEN_INT (record_type_class
);
9040 if (code
== UNION_TYPE
|| code
== QUAL_UNION_TYPE
)
9041 return GEN_INT (union_type_class
);
9042 if (code
== ARRAY_TYPE
)
9044 if (TYPE_STRING_FLAG (type
))
9045 return GEN_INT (string_type_class
);
9047 return GEN_INT (array_type_class
);
9049 if (code
== SET_TYPE
)
9050 return GEN_INT (set_type_class
);
9051 if (code
== FILE_TYPE
)
9052 return GEN_INT (file_type_class
);
9053 if (code
== LANG_TYPE
)
9054 return GEN_INT (lang_type_class
);
9056 return GEN_INT (no_type_class
);
9058 case BUILT_IN_CONSTANT_P
:
9063 tree arg
= TREE_VALUE (arglist
);
9066 /* We return 1 for a numeric type that's known to be a constant
9067 value at compile-time or for an aggregate type that's a
9068 literal constant. */
9071 /* If we know this is a constant, emit the constant of one. */
9072 if (TREE_CODE_CLASS (TREE_CODE (arg
)) == 'c'
9073 || (TREE_CODE (arg
) == CONSTRUCTOR
9074 && TREE_CONSTANT (arg
))
9075 || (TREE_CODE (arg
) == ADDR_EXPR
9076 && TREE_CODE (TREE_OPERAND (arg
, 0)) == STRING_CST
))
9079 /* If we aren't going to be running CSE or this expression
9080 has side effects, show we don't know it to be a constant.
9081 Likewise if it's a pointer or aggregate type since in those
9082 case we only want literals, since those are only optimized
9083 when generating RTL, not later. */
9084 if (TREE_SIDE_EFFECTS (arg
) || cse_not_expected
9085 || AGGREGATE_TYPE_P (TREE_TYPE (arg
))
9086 || POINTER_TYPE_P (TREE_TYPE (arg
)))
9089 /* Otherwise, emit (constant_p_rtx (ARG)) and let CSE get a
9090 chance to see if it can deduce whether ARG is constant. */
9092 tmp
= expand_expr (arg
, NULL_RTX
, VOIDmode
, 0);
9093 tmp
= gen_rtx_CONSTANT_P_RTX (value_mode
, tmp
);
9097 case BUILT_IN_FRAME_ADDRESS
:
9098 /* The argument must be a nonnegative integer constant.
9099 It counts the number of frames to scan up the stack.
9100 The value is the address of that frame. */
9101 case BUILT_IN_RETURN_ADDRESS
:
9102 /* The argument must be a nonnegative integer constant.
9103 It counts the number of frames to scan up the stack.
9104 The value is the return address saved in that frame. */
9106 /* Warning about missing arg was already issued. */
9108 else if (TREE_CODE (TREE_VALUE (arglist
)) != INTEGER_CST
9109 || tree_int_cst_sgn (TREE_VALUE (arglist
)) < 0)
9111 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
9112 error ("invalid arg to `__builtin_frame_address'");
9114 error ("invalid arg to `__builtin_return_address'");
9119 rtx tem
= expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl
),
9120 TREE_INT_CST_LOW (TREE_VALUE (arglist
)),
9121 hard_frame_pointer_rtx
);
9123 /* Some ports cannot access arbitrary stack frames. */
9126 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
9127 warning ("unsupported arg to `__builtin_frame_address'");
9129 warning ("unsupported arg to `__builtin_return_address'");
9133 /* For __builtin_frame_address, return what we've got. */
9134 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
9137 if (GET_CODE (tem
) != REG
9138 && ! CONSTANT_P (tem
))
9139 tem
= copy_to_mode_reg (Pmode
, tem
);
9143 /* Returns the address of the area where the structure is returned.
9145 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS
:
9147 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl
)))
9148 || GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl
))) != MEM
)
9151 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl
)), 0);
9153 case BUILT_IN_ALLOCA
:
9155 /* Arg could be non-integer if user redeclared this fcn wrong. */
9156 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != INTEGER_TYPE
)
9159 /* Compute the argument. */
9160 op0
= expand_expr (TREE_VALUE (arglist
), NULL_RTX
, VOIDmode
, 0);
9162 /* Allocate the desired space. */
9163 return allocate_dynamic_stack_space (op0
, target
, BITS_PER_UNIT
);
9166 /* If not optimizing, call the library function. */
9167 if (!optimize
&& ! CALLED_AS_BUILT_IN (fndecl
))
9171 /* Arg could be non-integer if user redeclared this fcn wrong. */
9172 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != INTEGER_TYPE
)
9175 /* Compute the argument. */
9176 op0
= expand_expr (TREE_VALUE (arglist
), subtarget
, VOIDmode
, 0);
9177 /* Compute ffs, into TARGET if possible.
9178 Set TARGET to wherever the result comes back. */
9179 target
= expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist
))),
9180 ffs_optab
, op0
, target
, 1);
9185 case BUILT_IN_STRLEN
:
9186 /* If not optimizing, call the library function. */
9187 if (!optimize
&& ! CALLED_AS_BUILT_IN (fndecl
))
9191 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9192 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
)
9196 tree src
= TREE_VALUE (arglist
);
9197 tree len
= c_strlen (src
);
9200 = get_pointer_alignment (src
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
9202 rtx result
, src_rtx
, char_rtx
;
9203 enum machine_mode insn_mode
= value_mode
, char_mode
;
9204 enum insn_code icode
;
9206 /* If the length is known, just return it. */
9208 return expand_expr (len
, target
, mode
, EXPAND_MEMORY_USE_BAD
);
9210 /* If SRC is not a pointer type, don't do this operation inline. */
9214 /* Call a function if we can't compute strlen in the right mode. */
9216 while (insn_mode
!= VOIDmode
)
9218 icode
= strlen_optab
->handlers
[(int) insn_mode
].insn_code
;
9219 if (icode
!= CODE_FOR_nothing
)
9222 insn_mode
= GET_MODE_WIDER_MODE (insn_mode
);
9224 if (insn_mode
== VOIDmode
)
9227 /* Make a place to write the result of the instruction. */
9230 && GET_CODE (result
) == REG
9231 && GET_MODE (result
) == insn_mode
9232 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
9233 result
= gen_reg_rtx (insn_mode
);
9235 /* Make sure the operands are acceptable to the predicates. */
9237 if (! (*insn_operand_predicate
[(int)icode
][0]) (result
, insn_mode
))
9238 result
= gen_reg_rtx (insn_mode
);
9239 src_rtx
= memory_address (BLKmode
,
9240 expand_expr (src
, NULL_RTX
, ptr_mode
,
9243 if (! (*insn_operand_predicate
[(int)icode
][1]) (src_rtx
, Pmode
))
9244 src_rtx
= copy_to_mode_reg (Pmode
, src_rtx
);
9246 /* Check the string is readable and has an end. */
9247 if (current_function_check_memory_usage
)
9248 emit_library_call (chkr_check_str_libfunc
, 1, VOIDmode
, 2,
9250 GEN_INT (MEMORY_USE_RO
),
9251 TYPE_MODE (integer_type_node
));
9253 char_rtx
= const0_rtx
;
9254 char_mode
= insn_operand_mode
[(int)icode
][2];
9255 if (! (*insn_operand_predicate
[(int)icode
][2]) (char_rtx
, char_mode
))
9256 char_rtx
= copy_to_mode_reg (char_mode
, char_rtx
);
9258 emit_insn (GEN_FCN (icode
) (result
,
9259 gen_rtx_MEM (BLKmode
, src_rtx
),
9260 char_rtx
, GEN_INT (align
)));
9262 /* Return the value in the proper mode for this function. */
9263 if (GET_MODE (result
) == value_mode
)
9265 else if (target
!= 0)
9267 convert_move (target
, result
, 0);
9271 return convert_to_mode (value_mode
, result
, 0);
9274 case BUILT_IN_STRCPY
:
9275 /* If not optimizing, call the library function. */
9276 if (!optimize
&& ! CALLED_AS_BUILT_IN (fndecl
))
9280 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9281 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
9282 || TREE_CHAIN (arglist
) == 0
9283 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist
)))) != POINTER_TYPE
)
9287 tree len
= c_strlen (TREE_VALUE (TREE_CHAIN (arglist
)));
9292 len
= size_binop (PLUS_EXPR
, len
, integer_one_node
);
9294 chainon (arglist
, build_tree_list (NULL_TREE
, len
));
9298 case BUILT_IN_MEMCPY
:
9299 /* If not optimizing, call the library function. */
9300 if (!optimize
&& ! CALLED_AS_BUILT_IN (fndecl
))
9304 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9305 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
9306 || TREE_CHAIN (arglist
) == 0
9307 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist
))))
9309 || TREE_CHAIN (TREE_CHAIN (arglist
)) == 0
9310 || (TREE_CODE (TREE_TYPE (TREE_VALUE
9311 (TREE_CHAIN (TREE_CHAIN (arglist
)))))
9316 tree dest
= TREE_VALUE (arglist
);
9317 tree src
= TREE_VALUE (TREE_CHAIN (arglist
));
9318 tree len
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
9321 = get_pointer_alignment (src
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
9323 = get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
9324 rtx dest_mem
, src_mem
, dest_addr
, len_rtx
;
9326 /* If either SRC or DEST is not a pointer type, don't do
9327 this operation in-line. */
9328 if (src_align
== 0 || dest_align
== 0)
9330 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRCPY
)
9331 TREE_CHAIN (TREE_CHAIN (arglist
)) = 0;
9335 dest_mem
= get_memory_rtx (dest
);
9336 src_mem
= get_memory_rtx (src
);
9337 len_rtx
= expand_expr (len
, NULL_RTX
, VOIDmode
, 0);
9339 /* Just copy the rights of SRC to the rights of DEST. */
9340 if (current_function_check_memory_usage
)
9341 emit_library_call (chkr_copy_bitmap_libfunc
, 1, VOIDmode
, 3,
9342 XEXP (dest_mem
, 0), ptr_mode
,
9343 XEXP (src_mem
, 0), ptr_mode
,
9344 len_rtx
, TYPE_MODE (sizetype
));
9346 /* Copy word part most expediently. */
9348 = emit_block_move (dest_mem
, src_mem
, len_rtx
,
9349 MIN (src_align
, dest_align
));
9352 dest_addr
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
9357 case BUILT_IN_MEMSET
:
9358 /* If not optimizing, call the library function. */
9359 if (!optimize
&& ! CALLED_AS_BUILT_IN (fndecl
))
9363 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9364 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
9365 || TREE_CHAIN (arglist
) == 0
9366 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist
))))
9368 || TREE_CHAIN (TREE_CHAIN (arglist
)) == 0
9370 != (TREE_CODE (TREE_TYPE
9372 (TREE_CHAIN (TREE_CHAIN (arglist
))))))))
9376 tree dest
= TREE_VALUE (arglist
);
9377 tree val
= TREE_VALUE (TREE_CHAIN (arglist
));
9378 tree len
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
9381 = get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
9382 rtx dest_mem
, dest_addr
, len_rtx
;
9384 /* If DEST is not a pointer type, don't do this
9385 operation in-line. */
9386 if (dest_align
== 0)
9389 /* If the arguments have side-effects, then we can only evaluate
9390 them at most once. The following code evaluates them twice if
9391 they are not constants because we break out to expand_call
9392 in that case. They can't be constants if they have side-effects
9393 so we can check for that first. Alternatively, we could call
9394 save_expr to make multiple evaluation safe. */
9395 if (TREE_SIDE_EFFECTS (val
) || TREE_SIDE_EFFECTS (len
))
9398 /* If VAL is not 0, don't do this operation in-line. */
9399 if (expand_expr (val
, NULL_RTX
, VOIDmode
, 0) != const0_rtx
)
9402 /* If LEN does not expand to a constant, don't do this
9403 operation in-line. */
9404 len_rtx
= expand_expr (len
, NULL_RTX
, VOIDmode
, 0);
9405 if (GET_CODE (len_rtx
) != CONST_INT
)
9408 dest_mem
= get_memory_rtx (dest
);
9410 /* Just check DST is writable and mark it as readable. */
9411 if (current_function_check_memory_usage
)
9412 emit_library_call (chkr_check_addr_libfunc
, 1, VOIDmode
, 3,
9413 XEXP (dest_mem
, 0), ptr_mode
,
9414 len_rtx
, TYPE_MODE (sizetype
),
9415 GEN_INT (MEMORY_USE_WO
),
9416 TYPE_MODE (integer_type_node
));
9419 dest_addr
= clear_storage (dest_mem
, len_rtx
, dest_align
);
9422 dest_addr
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
9427 /* These comparison functions need an instruction that returns an actual
9428 index. An ordinary compare that just sets the condition codes
9430 #ifdef HAVE_cmpstrsi
9431 case BUILT_IN_STRCMP
:
9432 /* If not optimizing, call the library function. */
9433 if (!optimize
&& ! CALLED_AS_BUILT_IN (fndecl
))
9436 /* If we need to check memory accesses, call the library function. */
9437 if (current_function_check_memory_usage
)
9441 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9442 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
9443 || TREE_CHAIN (arglist
) == 0
9444 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist
)))) != POINTER_TYPE
)
9446 else if (!HAVE_cmpstrsi
)
9449 tree arg1
= TREE_VALUE (arglist
);
9450 tree arg2
= TREE_VALUE (TREE_CHAIN (arglist
));
9453 len
= c_strlen (arg1
);
9455 len
= size_binop (PLUS_EXPR
, integer_one_node
, len
);
9456 len2
= c_strlen (arg2
);
9458 len2
= size_binop (PLUS_EXPR
, integer_one_node
, len2
);
9460 /* If we don't have a constant length for the first, use the length
9461 of the second, if we know it. We don't require a constant for
9462 this case; some cost analysis could be done if both are available
9463 but neither is constant. For now, assume they're equally cheap.
9465 If both strings have constant lengths, use the smaller. This
9466 could arise if optimization results in strcpy being called with
9467 two fixed strings, or if the code was machine-generated. We should
9468 add some code to the `memcmp' handler below to deal with such
9469 situations, someday. */
9470 if (!len
|| TREE_CODE (len
) != INTEGER_CST
)
9477 else if (len2
&& TREE_CODE (len2
) == INTEGER_CST
)
9479 if (tree_int_cst_lt (len2
, len
))
9483 chainon (arglist
, build_tree_list (NULL_TREE
, len
));
9487 case BUILT_IN_MEMCMP
:
9488 /* If not optimizing, call the library function. */
9489 if (!optimize
&& ! CALLED_AS_BUILT_IN (fndecl
))
9492 /* If we need to check memory accesses, call the library function. */
9493 if (current_function_check_memory_usage
)
9497 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9498 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
9499 || TREE_CHAIN (arglist
) == 0
9500 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist
)))) != POINTER_TYPE
9501 || TREE_CHAIN (TREE_CHAIN (arglist
)) == 0
9502 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
))))) != INTEGER_TYPE
)
9504 else if (!HAVE_cmpstrsi
)
9507 tree arg1
= TREE_VALUE (arglist
);
9508 tree arg2
= TREE_VALUE (TREE_CHAIN (arglist
));
9509 tree len
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
9513 = get_pointer_alignment (arg1
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
9515 = get_pointer_alignment (arg2
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
9516 enum machine_mode insn_mode
9517 = insn_operand_mode
[(int) CODE_FOR_cmpstrsi
][0];
9519 /* If we don't have POINTER_TYPE, call the function. */
9520 if (arg1_align
== 0 || arg2_align
== 0)
9522 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRCMP
)
9523 TREE_CHAIN (TREE_CHAIN (arglist
)) = 0;
9527 /* Make a place to write the result of the instruction. */
9530 && GET_CODE (result
) == REG
&& GET_MODE (result
) == insn_mode
9531 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
9532 result
= gen_reg_rtx (insn_mode
);
9534 emit_insn (gen_cmpstrsi (result
, get_memory_rtx (arg1
),
9535 get_memory_rtx (arg2
),
9536 expand_expr (len
, NULL_RTX
, VOIDmode
, 0),
9537 GEN_INT (MIN (arg1_align
, arg2_align
))));
9539 /* Return the value in the proper mode for this function. */
9540 mode
= TYPE_MODE (TREE_TYPE (exp
));
9541 if (GET_MODE (result
) == mode
)
9543 else if (target
!= 0)
9545 convert_move (target
, result
, 0);
9549 return convert_to_mode (mode
, result
, 0);
9552 case BUILT_IN_STRCMP
:
9553 case BUILT_IN_MEMCMP
:
9557 case BUILT_IN_SETJMP
:
9559 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
)
9563 rtx buf_addr
= expand_expr (TREE_VALUE (arglist
), subtarget
,
9565 rtx lab
= gen_label_rtx ();
9566 rtx ret
= expand_builtin_setjmp (buf_addr
, target
, lab
, lab
);
9571 /* __builtin_longjmp is passed a pointer to an array of five words.
9572 It's similar to the C library longjmp function but works with
9573 __builtin_setjmp above. */
9574 case BUILT_IN_LONGJMP
:
9575 if (arglist
== 0 || TREE_CHAIN (arglist
) == 0
9576 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
)
9580 rtx buf_addr
= expand_expr (TREE_VALUE (arglist
), subtarget
,
9582 rtx value
= expand_expr (TREE_VALUE (TREE_CHAIN (arglist
)),
9583 NULL_RTX
, VOIDmode
, 0);
9585 if (value
!= const1_rtx
)
9587 error ("__builtin_longjmp second argument must be 1");
9591 expand_builtin_longjmp (buf_addr
, value
);
9598 emit_insn (gen_trap ());
9601 error ("__builtin_trap not supported by this target");
9605 /* Various hooks for the DWARF 2 __throw routine. */
9606 case BUILT_IN_UNWIND_INIT
:
9607 expand_builtin_unwind_init ();
9609 case BUILT_IN_DWARF_CFA
:
9610 return virtual_cfa_rtx
;
9611 #ifdef DWARF2_UNWIND_INFO
9612 case BUILT_IN_DWARF_FP_REGNUM
:
9613 return expand_builtin_dwarf_fp_regnum ();
9614 case BUILT_IN_DWARF_REG_SIZE
:
9615 return expand_builtin_dwarf_reg_size (TREE_VALUE (arglist
), target
);
9617 case BUILT_IN_FROB_RETURN_ADDR
:
9618 return expand_builtin_frob_return_addr (TREE_VALUE (arglist
));
9619 case BUILT_IN_EXTRACT_RETURN_ADDR
:
9620 return expand_builtin_extract_return_addr (TREE_VALUE (arglist
));
9621 case BUILT_IN_EH_RETURN
:
9622 expand_builtin_eh_return (TREE_VALUE (arglist
),
9623 TREE_VALUE (TREE_CHAIN (arglist
)),
9624 TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
))));
9627 default: /* just do library call, if unknown builtin */
9628 error ("built-in function `%s' not currently supported",
9629 IDENTIFIER_POINTER (DECL_NAME (fndecl
)));
9632 /* The switch statement above can drop through to cause the function
9633 to be called normally. */
9635 return expand_call (exp
, target
, ignore
);
9638 /* Built-in functions to perform an untyped call and return. */
9640 /* For each register that may be used for calling a function, this
9641 gives a mode used to copy the register's value. VOIDmode indicates
9642 the register is not used for calling a function. If the machine
9643 has register windows, this gives only the outbound registers.
9644 INCOMING_REGNO gives the corresponding inbound register. */
9645 static enum machine_mode apply_args_mode
[FIRST_PSEUDO_REGISTER
];
9647 /* For each register that may be used for returning values, this gives
9648 a mode used to copy the register's value. VOIDmode indicates the
9649 register is not used for returning values. If the machine has
9650 register windows, this gives only the outbound registers.
9651 INCOMING_REGNO gives the corresponding inbound register. */
9652 static enum machine_mode apply_result_mode
[FIRST_PSEUDO_REGISTER
];
9654 /* For each register that may be used for calling a function, this
9655 gives the offset of that register into the block returned by
9656 __builtin_apply_args. 0 indicates that the register is not
9657 used for calling a function. */
9658 static int apply_args_reg_offset
[FIRST_PSEUDO_REGISTER
];
9660 /* Return the offset of register REGNO into the block returned by
9661 __builtin_apply_args. This is not declared static, since it is
9662 needed in objc-act.c. */
9665 apply_args_register_offset (regno
)
9670 /* Arguments are always put in outgoing registers (in the argument
9671 block) if such make sense. */
9672 #ifdef OUTGOING_REGNO
9673 regno
= OUTGOING_REGNO(regno
);
9675 return apply_args_reg_offset
[regno
];
9678 /* Return the size required for the block returned by __builtin_apply_args,
9679 and initialize apply_args_mode. */
9684 static int size
= -1;
9686 enum machine_mode mode
;
9688 /* The values computed by this function never change. */
9691 /* The first value is the incoming arg-pointer. */
9692 size
= GET_MODE_SIZE (Pmode
);
9694 /* The second value is the structure value address unless this is
9695 passed as an "invisible" first argument. */
9696 if (struct_value_rtx
)
9697 size
+= GET_MODE_SIZE (Pmode
);
9699 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
9700 if (FUNCTION_ARG_REGNO_P (regno
))
9702 /* Search for the proper mode for copying this register's
9703 value. I'm not sure this is right, but it works so far. */
9704 enum machine_mode best_mode
= VOIDmode
;
9706 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
9708 mode
= GET_MODE_WIDER_MODE (mode
))
9709 if (HARD_REGNO_MODE_OK (regno
, mode
)
9710 && HARD_REGNO_NREGS (regno
, mode
) == 1)
9713 if (best_mode
== VOIDmode
)
9714 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
);
9716 mode
= GET_MODE_WIDER_MODE (mode
))
9717 if (HARD_REGNO_MODE_OK (regno
, mode
)
9718 && (mov_optab
->handlers
[(int) mode
].insn_code
9719 != CODE_FOR_nothing
))
9723 if (mode
== VOIDmode
)
9726 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
9727 if (size
% align
!= 0)
9728 size
= CEIL (size
, align
) * align
;
9729 apply_args_reg_offset
[regno
] = size
;
9730 size
+= GET_MODE_SIZE (mode
);
9731 apply_args_mode
[regno
] = mode
;
9735 apply_args_mode
[regno
] = VOIDmode
;
9736 apply_args_reg_offset
[regno
] = 0;
9742 /* Return the size required for the block returned by __builtin_apply,
9743 and initialize apply_result_mode. */
9746 apply_result_size ()
9748 static int size
= -1;
9750 enum machine_mode mode
;
9752 /* The values computed by this function never change. */
9757 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
9758 if (FUNCTION_VALUE_REGNO_P (regno
))
9760 /* Search for the proper mode for copying this register's
9761 value. I'm not sure this is right, but it works so far. */
9762 enum machine_mode best_mode
= VOIDmode
;
9764 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
9766 mode
= GET_MODE_WIDER_MODE (mode
))
9767 if (HARD_REGNO_MODE_OK (regno
, mode
))
9770 if (best_mode
== VOIDmode
)
9771 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
);
9773 mode
= GET_MODE_WIDER_MODE (mode
))
9774 if (HARD_REGNO_MODE_OK (regno
, mode
)
9775 && (mov_optab
->handlers
[(int) mode
].insn_code
9776 != CODE_FOR_nothing
))
9780 if (mode
== VOIDmode
)
9783 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
9784 if (size
% align
!= 0)
9785 size
= CEIL (size
, align
) * align
;
9786 size
+= GET_MODE_SIZE (mode
);
9787 apply_result_mode
[regno
] = mode
;
9790 apply_result_mode
[regno
] = VOIDmode
;
9792 /* Allow targets that use untyped_call and untyped_return to override
9793 the size so that machine-specific information can be stored here. */
9794 #ifdef APPLY_RESULT_SIZE
9795 size
= APPLY_RESULT_SIZE
;
9801 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
9802 /* Create a vector describing the result block RESULT. If SAVEP is true,
9803 the result block is used to save the values; otherwise it is used to
9804 restore the values. */
9807 result_vector (savep
, result
)
9811 int regno
, size
, align
, nelts
;
9812 enum machine_mode mode
;
9814 rtx
*savevec
= (rtx
*) alloca (FIRST_PSEUDO_REGISTER
* sizeof (rtx
));
9817 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
9818 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
9820 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
9821 if (size
% align
!= 0)
9822 size
= CEIL (size
, align
) * align
;
9823 reg
= gen_rtx_REG (mode
, savep
? regno
: INCOMING_REGNO (regno
));
9824 mem
= change_address (result
, mode
,
9825 plus_constant (XEXP (result
, 0), size
));
9826 savevec
[nelts
++] = (savep
9827 ? gen_rtx_SET (VOIDmode
, mem
, reg
)
9828 : gen_rtx_SET (VOIDmode
, reg
, mem
));
9829 size
+= GET_MODE_SIZE (mode
);
9831 return gen_rtx_PARALLEL (VOIDmode
, gen_rtvec_v (nelts
, savevec
));
9833 #endif /* HAVE_untyped_call or HAVE_untyped_return */
9835 /* Save the state required to perform an untyped call with the same
9836 arguments as were passed to the current function. */
9839 expand_builtin_apply_args ()
9842 int size
, align
, regno
;
9843 enum machine_mode mode
;
9845 /* Create a block where the arg-pointer, structure value address,
9846 and argument registers can be saved. */
9847 registers
= assign_stack_local (BLKmode
, apply_args_size (), -1);
9849 /* Walk past the arg-pointer and structure value address. */
9850 size
= GET_MODE_SIZE (Pmode
);
9851 if (struct_value_rtx
)
9852 size
+= GET_MODE_SIZE (Pmode
);
9854 /* Save each register used in calling a function to the block. */
9855 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
9856 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
9860 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
9861 if (size
% align
!= 0)
9862 size
= CEIL (size
, align
) * align
;
9864 tem
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
9867 /* For reg-stack.c's stack register household.
9868 Compare with a similar piece of code in function.c. */
9870 emit_insn (gen_rtx_USE (mode
, tem
));
9873 emit_move_insn (change_address (registers
, mode
,
9874 plus_constant (XEXP (registers
, 0),
9877 size
+= GET_MODE_SIZE (mode
);
9880 /* Save the arg pointer to the block. */
9881 emit_move_insn (change_address (registers
, Pmode
, XEXP (registers
, 0)),
9882 copy_to_reg (virtual_incoming_args_rtx
));
9883 size
= GET_MODE_SIZE (Pmode
);
9885 /* Save the structure value address unless this is passed as an
9886 "invisible" first argument. */
9887 if (struct_value_incoming_rtx
)
9889 emit_move_insn (change_address (registers
, Pmode
,
9890 plus_constant (XEXP (registers
, 0),
9892 copy_to_reg (struct_value_incoming_rtx
));
9893 size
+= GET_MODE_SIZE (Pmode
);
9896 /* Return the address of the block. */
9897 return copy_addr_to_reg (XEXP (registers
, 0));
9900 /* Perform an untyped call and save the state required to perform an
9901 untyped return of whatever value was returned by the given function. */
9904 expand_builtin_apply (function
, arguments
, argsize
)
9905 rtx function
, arguments
, argsize
;
9907 int size
, align
, regno
;
9908 enum machine_mode mode
;
9909 rtx incoming_args
, result
, reg
, dest
, call_insn
;
9910 rtx old_stack_level
= 0;
9911 rtx call_fusage
= 0;
9913 /* Create a block where the return registers can be saved. */
9914 result
= assign_stack_local (BLKmode
, apply_result_size (), -1);
9916 /* ??? The argsize value should be adjusted here. */
9918 /* Fetch the arg pointer from the ARGUMENTS block. */
9919 incoming_args
= gen_reg_rtx (Pmode
);
9920 emit_move_insn (incoming_args
,
9921 gen_rtx_MEM (Pmode
, arguments
));
9922 #ifndef STACK_GROWS_DOWNWARD
9923 incoming_args
= expand_binop (Pmode
, sub_optab
, incoming_args
, argsize
,
9924 incoming_args
, 0, OPTAB_LIB_WIDEN
);
9927 /* Perform postincrements before actually calling the function. */
9930 /* Push a new argument block and copy the arguments. */
9931 do_pending_stack_adjust ();
9933 /* Save the stack with nonlocal if available */
9934 #ifdef HAVE_save_stack_nonlocal
9935 if (HAVE_save_stack_nonlocal
)
9936 emit_stack_save (SAVE_NONLOCAL
, &old_stack_level
, NULL_RTX
);
9939 emit_stack_save (SAVE_BLOCK
, &old_stack_level
, NULL_RTX
);
9941 /* Push a block of memory onto the stack to store the memory arguments.
9942 Save the address in a register, and copy the memory arguments. ??? I
9943 haven't figured out how the calling convention macros effect this,
9944 but it's likely that the source and/or destination addresses in
9945 the block copy will need updating in machine specific ways. */
9946 dest
= allocate_dynamic_stack_space (argsize
, 0, 0);
9947 emit_block_move (gen_rtx_MEM (BLKmode
, dest
),
9948 gen_rtx_MEM (BLKmode
, incoming_args
),
9950 PARM_BOUNDARY
/ BITS_PER_UNIT
);
9952 /* Refer to the argument block. */
9954 arguments
= gen_rtx_MEM (BLKmode
, arguments
);
9956 /* Walk past the arg-pointer and structure value address. */
9957 size
= GET_MODE_SIZE (Pmode
);
9958 if (struct_value_rtx
)
9959 size
+= GET_MODE_SIZE (Pmode
);
9961 /* Restore each of the registers previously saved. Make USE insns
9962 for each of these registers for use in making the call. */
9963 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
9964 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
9966 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
9967 if (size
% align
!= 0)
9968 size
= CEIL (size
, align
) * align
;
9969 reg
= gen_rtx_REG (mode
, regno
);
9970 emit_move_insn (reg
,
9971 change_address (arguments
, mode
,
9972 plus_constant (XEXP (arguments
, 0),
9975 use_reg (&call_fusage
, reg
);
9976 size
+= GET_MODE_SIZE (mode
);
9979 /* Restore the structure value address unless this is passed as an
9980 "invisible" first argument. */
9981 size
= GET_MODE_SIZE (Pmode
);
9982 if (struct_value_rtx
)
9984 rtx value
= gen_reg_rtx (Pmode
);
9985 emit_move_insn (value
,
9986 change_address (arguments
, Pmode
,
9987 plus_constant (XEXP (arguments
, 0),
9989 emit_move_insn (struct_value_rtx
, value
);
9990 if (GET_CODE (struct_value_rtx
) == REG
)
9991 use_reg (&call_fusage
, struct_value_rtx
);
9992 size
+= GET_MODE_SIZE (Pmode
);
9995 /* All arguments and registers used for the call are set up by now! */
9996 function
= prepare_call_address (function
, NULL_TREE
, &call_fusage
, 0);
9998 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
9999 and we don't want to load it into a register as an optimization,
10000 because prepare_call_address already did it if it should be done. */
10001 if (GET_CODE (function
) != SYMBOL_REF
)
10002 function
= memory_address (FUNCTION_MODE
, function
);
10004 /* Generate the actual call instruction and save the return value. */
10005 #ifdef HAVE_untyped_call
10006 if (HAVE_untyped_call
)
10007 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE
, function
),
10008 result
, result_vector (1, result
)));
10011 #ifdef HAVE_call_value
10012 if (HAVE_call_value
)
10016 /* Locate the unique return register. It is not possible to
10017 express a call that sets more than one return register using
10018 call_value; use untyped_call for that. In fact, untyped_call
10019 only needs to save the return registers in the given block. */
10020 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
10021 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
10024 abort (); /* HAVE_untyped_call required. */
10025 valreg
= gen_rtx_REG (mode
, regno
);
10028 emit_call_insn (gen_call_value (valreg
,
10029 gen_rtx_MEM (FUNCTION_MODE
, function
),
10030 const0_rtx
, NULL_RTX
, const0_rtx
));
10032 emit_move_insn (change_address (result
, GET_MODE (valreg
),
10040 /* Find the CALL insn we just emitted. */
10041 for (call_insn
= get_last_insn ();
10042 call_insn
&& GET_CODE (call_insn
) != CALL_INSN
;
10043 call_insn
= PREV_INSN (call_insn
))
10049 /* Put the register usage information on the CALL. If there is already
10050 some usage information, put ours at the end. */
10051 if (CALL_INSN_FUNCTION_USAGE (call_insn
))
10055 for (link
= CALL_INSN_FUNCTION_USAGE (call_insn
); XEXP (link
, 1) != 0;
10056 link
= XEXP (link
, 1))
10059 XEXP (link
, 1) = call_fusage
;
10062 CALL_INSN_FUNCTION_USAGE (call_insn
) = call_fusage
;
10064 /* Restore the stack. */
10065 #ifdef HAVE_save_stack_nonlocal
10066 if (HAVE_save_stack_nonlocal
)
10067 emit_stack_restore (SAVE_NONLOCAL
, old_stack_level
, NULL_RTX
);
10070 emit_stack_restore (SAVE_BLOCK
, old_stack_level
, NULL_RTX
);
10072 /* Return the address of the result block. */
10073 return copy_addr_to_reg (XEXP (result
, 0));
10076 /* Perform an untyped return. */
10079 expand_builtin_return (result
)
10082 int size
, align
, regno
;
10083 enum machine_mode mode
;
10085 rtx call_fusage
= 0;
10087 apply_result_size ();
10088 result
= gen_rtx_MEM (BLKmode
, result
);
10090 #ifdef HAVE_untyped_return
10091 if (HAVE_untyped_return
)
10093 emit_jump_insn (gen_untyped_return (result
, result_vector (0, result
)));
10099 /* Restore the return value and note that each value is used. */
10101 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
10102 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
10104 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
10105 if (size
% align
!= 0)
10106 size
= CEIL (size
, align
) * align
;
10107 reg
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
10108 emit_move_insn (reg
,
10109 change_address (result
, mode
,
10110 plus_constant (XEXP (result
, 0),
10113 push_to_sequence (call_fusage
);
10114 emit_insn (gen_rtx_USE (VOIDmode
, reg
));
10115 call_fusage
= get_insns ();
10117 size
+= GET_MODE_SIZE (mode
);
10120 /* Put the USE insns before the return. */
10121 emit_insns (call_fusage
);
10123 /* Return whatever values was restored by jumping directly to the end
10124 of the function. */
10125 expand_null_return ();
10128 /* Expand code for a post- or pre- increment or decrement
10129 and return the RTX for the result.
10130 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
10133 expand_increment (exp
, post
, ignore
)
10137 register rtx op0
, op1
;
10138 register rtx temp
, value
;
10139 register tree incremented
= TREE_OPERAND (exp
, 0);
10140 optab this_optab
= add_optab
;
10142 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
10143 int op0_is_copy
= 0;
10144 int single_insn
= 0;
10145 /* 1 means we can't store into OP0 directly,
10146 because it is a subreg narrower than a word,
10147 and we don't dare clobber the rest of the word. */
10148 int bad_subreg
= 0;
10150 /* Stabilize any component ref that might need to be
10151 evaluated more than once below. */
10153 || TREE_CODE (incremented
) == BIT_FIELD_REF
10154 || (TREE_CODE (incremented
) == COMPONENT_REF
10155 && (TREE_CODE (TREE_OPERAND (incremented
, 0)) != INDIRECT_REF
10156 || DECL_BIT_FIELD (TREE_OPERAND (incremented
, 1)))))
10157 incremented
= stabilize_reference (incremented
);
10158 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
10159 ones into save exprs so that they don't accidentally get evaluated
10160 more than once by the code below. */
10161 if (TREE_CODE (incremented
) == PREINCREMENT_EXPR
10162 || TREE_CODE (incremented
) == PREDECREMENT_EXPR
)
10163 incremented
= save_expr (incremented
);
10165 /* Compute the operands as RTX.
10166 Note whether OP0 is the actual lvalue or a copy of it:
10167 I believe it is a copy iff it is a register or subreg
10168 and insns were generated in computing it. */
10170 temp
= get_last_insn ();
10171 op0
= expand_expr (incremented
, NULL_RTX
, VOIDmode
, EXPAND_MEMORY_USE_RW
);
10173 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
10174 in place but instead must do sign- or zero-extension during assignment,
10175 so we copy it into a new register and let the code below use it as
10178 Note that we can safely modify this SUBREG since it is know not to be
10179 shared (it was made by the expand_expr call above). */
10181 if (GET_CODE (op0
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (op0
))
10184 SUBREG_REG (op0
) = copy_to_reg (SUBREG_REG (op0
));
10188 else if (GET_CODE (op0
) == SUBREG
10189 && GET_MODE_BITSIZE (GET_MODE (op0
)) < BITS_PER_WORD
)
10191 /* We cannot increment this SUBREG in place. If we are
10192 post-incrementing, get a copy of the old value. Otherwise,
10193 just mark that we cannot increment in place. */
10195 op0
= copy_to_reg (op0
);
10200 op0_is_copy
= ((GET_CODE (op0
) == SUBREG
|| GET_CODE (op0
) == REG
)
10201 && temp
!= get_last_insn ());
10202 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
,
10203 EXPAND_MEMORY_USE_BAD
);
10205 /* Decide whether incrementing or decrementing. */
10206 if (TREE_CODE (exp
) == POSTDECREMENT_EXPR
10207 || TREE_CODE (exp
) == PREDECREMENT_EXPR
)
10208 this_optab
= sub_optab
;
10210 /* Convert decrement by a constant into a negative increment. */
10211 if (this_optab
== sub_optab
10212 && GET_CODE (op1
) == CONST_INT
)
10214 op1
= GEN_INT (- INTVAL (op1
));
10215 this_optab
= add_optab
;
10218 /* For a preincrement, see if we can do this with a single instruction. */
10221 icode
= (int) this_optab
->handlers
[(int) mode
].insn_code
;
10222 if (icode
!= (int) CODE_FOR_nothing
10223 /* Make sure that OP0 is valid for operands 0 and 1
10224 of the insn we want to queue. */
10225 && (*insn_operand_predicate
[icode
][0]) (op0
, mode
)
10226 && (*insn_operand_predicate
[icode
][1]) (op0
, mode
)
10227 && (*insn_operand_predicate
[icode
][2]) (op1
, mode
))
10231 /* If OP0 is not the actual lvalue, but rather a copy in a register,
10232 then we cannot just increment OP0. We must therefore contrive to
10233 increment the original value. Then, for postincrement, we can return
10234 OP0 since it is a copy of the old value. For preincrement, expand here
10235 unless we can do it with a single insn.
10237 Likewise if storing directly into OP0 would clobber high bits
10238 we need to preserve (bad_subreg). */
10239 if (op0_is_copy
|| (!post
&& !single_insn
) || bad_subreg
)
10241 /* This is the easiest way to increment the value wherever it is.
10242 Problems with multiple evaluation of INCREMENTED are prevented
10243 because either (1) it is a component_ref or preincrement,
10244 in which case it was stabilized above, or (2) it is an array_ref
10245 with constant index in an array in a register, which is
10246 safe to reevaluate. */
10247 tree newexp
= build (((TREE_CODE (exp
) == POSTDECREMENT_EXPR
10248 || TREE_CODE (exp
) == PREDECREMENT_EXPR
)
10249 ? MINUS_EXPR
: PLUS_EXPR
),
10252 TREE_OPERAND (exp
, 1));
10254 while (TREE_CODE (incremented
) == NOP_EXPR
10255 || TREE_CODE (incremented
) == CONVERT_EXPR
)
10257 newexp
= convert (TREE_TYPE (incremented
), newexp
);
10258 incremented
= TREE_OPERAND (incremented
, 0);
10261 temp
= expand_assignment (incremented
, newexp
, ! post
&& ! ignore
, 0);
10262 return post
? op0
: temp
;
10267 /* We have a true reference to the value in OP0.
10268 If there is an insn to add or subtract in this mode, queue it.
10269 Queueing the increment insn avoids the register shuffling
10270 that often results if we must increment now and first save
10271 the old value for subsequent use. */
10273 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
10274 op0
= stabilize (op0
);
10277 icode
= (int) this_optab
->handlers
[(int) mode
].insn_code
;
10278 if (icode
!= (int) CODE_FOR_nothing
10279 /* Make sure that OP0 is valid for operands 0 and 1
10280 of the insn we want to queue. */
10281 && (*insn_operand_predicate
[icode
][0]) (op0
, mode
)
10282 && (*insn_operand_predicate
[icode
][1]) (op0
, mode
))
10284 if (! (*insn_operand_predicate
[icode
][2]) (op1
, mode
))
10285 op1
= force_reg (mode
, op1
);
10287 return enqueue_insn (op0
, GEN_FCN (icode
) (op0
, op0
, op1
));
10289 if (icode
!= (int) CODE_FOR_nothing
&& GET_CODE (op0
) == MEM
)
10291 rtx addr
= (general_operand (XEXP (op0
, 0), mode
)
10292 ? force_reg (Pmode
, XEXP (op0
, 0))
10293 : copy_to_reg (XEXP (op0
, 0)));
10296 op0
= change_address (op0
, VOIDmode
, addr
);
10297 temp
= force_reg (GET_MODE (op0
), op0
);
10298 if (! (*insn_operand_predicate
[icode
][2]) (op1
, mode
))
10299 op1
= force_reg (mode
, op1
);
10301 /* The increment queue is LIFO, thus we have to `queue'
10302 the instructions in reverse order. */
10303 enqueue_insn (op0
, gen_move_insn (op0
, temp
));
10304 result
= enqueue_insn (temp
, GEN_FCN (icode
) (temp
, temp
, op1
));
10309 /* Preincrement, or we can't increment with one simple insn. */
10311 /* Save a copy of the value before inc or dec, to return it later. */
10312 temp
= value
= copy_to_reg (op0
);
10314 /* Arrange to return the incremented value. */
10315 /* Copy the rtx because expand_binop will protect from the queue,
10316 and the results of that would be invalid for us to return
10317 if our caller does emit_queue before using our result. */
10318 temp
= copy_rtx (value
= op0
);
10320 /* Increment however we can. */
10321 op1
= expand_binop (mode
, this_optab
, value
, op1
,
10322 current_function_check_memory_usage
? NULL_RTX
: op0
,
10323 TREE_UNSIGNED (TREE_TYPE (exp
)), OPTAB_LIB_WIDEN
);
10324 /* Make sure the value is stored into OP0. */
10326 emit_move_insn (op0
, op1
);
10331 /* Expand all function calls contained within EXP, innermost ones first.
10332 But don't look within expressions that have sequence points.
10333 For each CALL_EXPR, record the rtx for its value
10334 in the CALL_EXPR_RTL field. */
10337 preexpand_calls (exp
)
10340 register int nops
, i
;
10341 int type
= TREE_CODE_CLASS (TREE_CODE (exp
));
10343 if (! do_preexpand_calls
)
10346 /* Only expressions and references can contain calls. */
10348 if (type
!= 'e' && type
!= '<' && type
!= '1' && type
!= '2' && type
!= 'r')
10351 switch (TREE_CODE (exp
))
10354 /* Do nothing if already expanded. */
10355 if (CALL_EXPR_RTL (exp
) != 0
10356 /* Do nothing if the call returns a variable-sized object. */
10357 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp
))) != INTEGER_CST
10358 /* Do nothing to built-in functions. */
10359 || (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
10360 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
10362 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))))
10365 CALL_EXPR_RTL (exp
) = expand_call (exp
, NULL_RTX
, 0);
10368 case COMPOUND_EXPR
:
10370 case TRUTH_ANDIF_EXPR
:
10371 case TRUTH_ORIF_EXPR
:
10372 /* If we find one of these, then we can be sure
10373 the adjust will be done for it (since it makes jumps).
10374 Do it now, so that if this is inside an argument
10375 of a function, we don't get the stack adjustment
10376 after some other args have already been pushed. */
10377 do_pending_stack_adjust ();
10382 case WITH_CLEANUP_EXPR
:
10383 case CLEANUP_POINT_EXPR
:
10384 case TRY_CATCH_EXPR
:
10388 if (SAVE_EXPR_RTL (exp
) != 0)
10395 nops
= tree_code_length
[(int) TREE_CODE (exp
)];
10396 for (i
= 0; i
< nops
; i
++)
10397 if (TREE_OPERAND (exp
, i
) != 0)
10399 type
= TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, i
)));
10400 if (type
== 'e' || type
== '<' || type
== '1' || type
== '2'
10402 preexpand_calls (TREE_OPERAND (exp
, i
));
10406 /* At the start of a function, record that we have no previously-pushed
10407 arguments waiting to be popped. */
10410 init_pending_stack_adjust ()
10412 pending_stack_adjust
= 0;
10415 /* When exiting from function, if safe, clear out any pending stack adjust
10416 so the adjustment won't get done.
10418 Note, if the current function calls alloca, then it must have a
10419 frame pointer regardless of the value of flag_omit_frame_pointer. */
10422 clear_pending_stack_adjust ()
10424 #ifdef EXIT_IGNORE_STACK
10426 && (! flag_omit_frame_pointer
|| current_function_calls_alloca
)
10427 && EXIT_IGNORE_STACK
10428 && ! (DECL_INLINE (current_function_decl
) && ! flag_no_inline
)
10429 && ! flag_inline_functions
)
10430 pending_stack_adjust
= 0;
10434 /* Pop any previously-pushed arguments that have not been popped yet. */
10437 do_pending_stack_adjust ()
10439 if (inhibit_defer_pop
== 0)
10441 if (pending_stack_adjust
!= 0)
10442 adjust_stack (GEN_INT (pending_stack_adjust
));
10443 pending_stack_adjust
= 0;
10447 /* Expand conditional expressions. */
10449 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
10450 LABEL is an rtx of code CODE_LABEL, in this function and all the
10454 jumpifnot (exp
, label
)
10458 do_jump (exp
, label
, NULL_RTX
);
10461 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
10464 jumpif (exp
, label
)
10468 do_jump (exp
, NULL_RTX
, label
);
10471 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
10472 the result is zero, or IF_TRUE_LABEL if the result is one.
10473 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
10474 meaning fall through in that case.
10476 do_jump always does any pending stack adjust except when it does not
10477 actually perform a jump. An example where there is no jump
10478 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
10480 This function is responsible for optimizing cases such as
10481 &&, || and comparison operators in EXP. */
10484 do_jump (exp
, if_false_label
, if_true_label
)
10486 rtx if_false_label
, if_true_label
;
10488 register enum tree_code code
= TREE_CODE (exp
);
10489 /* Some cases need to create a label to jump to
10490 in order to properly fall through.
10491 These cases set DROP_THROUGH_LABEL nonzero. */
10492 rtx drop_through_label
= 0;
10494 rtx comparison
= 0;
10497 enum machine_mode mode
;
10499 #ifdef MAX_INTEGER_COMPUTATION_MODE
10500 check_max_integer_computation_mode (exp
);
10511 temp
= integer_zerop (exp
) ? if_false_label
: if_true_label
;
10517 /* This is not true with #pragma weak */
10519 /* The address of something can never be zero. */
10521 emit_jump (if_true_label
);
10526 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == COMPONENT_REF
10527 || TREE_CODE (TREE_OPERAND (exp
, 0)) == BIT_FIELD_REF
10528 || TREE_CODE (TREE_OPERAND (exp
, 0)) == ARRAY_REF
)
10531 /* If we are narrowing the operand, we have to do the compare in the
10533 if ((TYPE_PRECISION (TREE_TYPE (exp
))
10534 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
10536 case NON_LVALUE_EXPR
:
10537 case REFERENCE_EXPR
:
10542 /* These cannot change zero->non-zero or vice versa. */
10543 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
10547 /* This is never less insns than evaluating the PLUS_EXPR followed by
10548 a test and can be longer if the test is eliminated. */
10550 /* Reduce to minus. */
10551 exp
= build (MINUS_EXPR
, TREE_TYPE (exp
),
10552 TREE_OPERAND (exp
, 0),
10553 fold (build1 (NEGATE_EXPR
, TREE_TYPE (TREE_OPERAND (exp
, 1)),
10554 TREE_OPERAND (exp
, 1))));
10555 /* Process as MINUS. */
10559 /* Non-zero iff operands of minus differ. */
10560 comparison
= compare (build (NE_EXPR
, TREE_TYPE (exp
),
10561 TREE_OPERAND (exp
, 0),
10562 TREE_OPERAND (exp
, 1)),
10567 /* If we are AND'ing with a small constant, do this comparison in the
10568 smallest type that fits. If the machine doesn't have comparisons
10569 that small, it will be converted back to the wider comparison.
10570 This helps if we are testing the sign bit of a narrower object.
10571 combine can't do this for us because it can't know whether a
10572 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
10574 if (! SLOW_BYTE_ACCESS
10575 && TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
10576 && TYPE_PRECISION (TREE_TYPE (exp
)) <= HOST_BITS_PER_WIDE_INT
10577 && (i
= floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)))) >= 0
10578 && (mode
= mode_for_size (i
+ 1, MODE_INT
, 0)) != BLKmode
10579 && (type
= type_for_mode (mode
, 1)) != 0
10580 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (exp
))
10581 && (cmp_optab
->handlers
[(int) TYPE_MODE (type
)].insn_code
10582 != CODE_FOR_nothing
))
10584 do_jump (convert (type
, exp
), if_false_label
, if_true_label
);
10589 case TRUTH_NOT_EXPR
:
10590 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
10593 case TRUTH_ANDIF_EXPR
:
10594 if (if_false_label
== 0)
10595 if_false_label
= drop_through_label
= gen_label_rtx ();
10596 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, NULL_RTX
);
10597 start_cleanup_deferral ();
10598 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
10599 end_cleanup_deferral ();
10602 case TRUTH_ORIF_EXPR
:
10603 if (if_true_label
== 0)
10604 if_true_label
= drop_through_label
= gen_label_rtx ();
10605 do_jump (TREE_OPERAND (exp
, 0), NULL_RTX
, if_true_label
);
10606 start_cleanup_deferral ();
10607 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
10608 end_cleanup_deferral ();
10611 case COMPOUND_EXPR
:
10612 push_temp_slots ();
10613 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
10614 preserve_temp_slots (NULL_RTX
);
10615 free_temp_slots ();
10618 do_pending_stack_adjust ();
10619 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
10622 case COMPONENT_REF
:
10623 case BIT_FIELD_REF
:
10626 int bitsize
, bitpos
, unsignedp
;
10627 enum machine_mode mode
;
10633 /* Get description of this reference. We don't actually care
10634 about the underlying object here. */
10635 get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
10636 &mode
, &unsignedp
, &volatilep
,
10639 type
= type_for_size (bitsize
, unsignedp
);
10640 if (! SLOW_BYTE_ACCESS
10641 && type
!= 0 && bitsize
>= 0
10642 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (exp
))
10643 && (cmp_optab
->handlers
[(int) TYPE_MODE (type
)].insn_code
10644 != CODE_FOR_nothing
))
10646 do_jump (convert (type
, exp
), if_false_label
, if_true_label
);
10653 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
10654 if (integer_onep (TREE_OPERAND (exp
, 1))
10655 && integer_zerop (TREE_OPERAND (exp
, 2)))
10656 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
10658 else if (integer_zerop (TREE_OPERAND (exp
, 1))
10659 && integer_onep (TREE_OPERAND (exp
, 2)))
10660 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
10664 register rtx label1
= gen_label_rtx ();
10665 drop_through_label
= gen_label_rtx ();
10667 do_jump (TREE_OPERAND (exp
, 0), label1
, NULL_RTX
);
10669 start_cleanup_deferral ();
10670 /* Now the THEN-expression. */
10671 do_jump (TREE_OPERAND (exp
, 1),
10672 if_false_label
? if_false_label
: drop_through_label
,
10673 if_true_label
? if_true_label
: drop_through_label
);
10674 /* In case the do_jump just above never jumps. */
10675 do_pending_stack_adjust ();
10676 emit_label (label1
);
10678 /* Now the ELSE-expression. */
10679 do_jump (TREE_OPERAND (exp
, 2),
10680 if_false_label
? if_false_label
: drop_through_label
,
10681 if_true_label
? if_true_label
: drop_through_label
);
10682 end_cleanup_deferral ();
10688 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
10690 if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_FLOAT
10691 || GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_INT
)
10693 tree exp0
= save_expr (TREE_OPERAND (exp
, 0));
10694 tree exp1
= save_expr (TREE_OPERAND (exp
, 1));
10697 (build (TRUTH_ANDIF_EXPR
, TREE_TYPE (exp
),
10698 fold (build (EQ_EXPR
, TREE_TYPE (exp
),
10699 fold (build1 (REALPART_EXPR
,
10700 TREE_TYPE (inner_type
),
10702 fold (build1 (REALPART_EXPR
,
10703 TREE_TYPE (inner_type
),
10705 fold (build (EQ_EXPR
, TREE_TYPE (exp
),
10706 fold (build1 (IMAGPART_EXPR
,
10707 TREE_TYPE (inner_type
),
10709 fold (build1 (IMAGPART_EXPR
,
10710 TREE_TYPE (inner_type
),
10712 if_false_label
, if_true_label
);
10715 else if (integer_zerop (TREE_OPERAND (exp
, 1)))
10716 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
10718 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_INT
10719 && !can_compare_p (TYPE_MODE (inner_type
)))
10720 do_jump_by_parts_equality (exp
, if_false_label
, if_true_label
);
10722 comparison
= compare (exp
, EQ
, EQ
);
10728 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
10730 if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_FLOAT
10731 || GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_INT
)
10733 tree exp0
= save_expr (TREE_OPERAND (exp
, 0));
10734 tree exp1
= save_expr (TREE_OPERAND (exp
, 1));
10737 (build (TRUTH_ORIF_EXPR
, TREE_TYPE (exp
),
10738 fold (build (NE_EXPR
, TREE_TYPE (exp
),
10739 fold (build1 (REALPART_EXPR
,
10740 TREE_TYPE (inner_type
),
10742 fold (build1 (REALPART_EXPR
,
10743 TREE_TYPE (inner_type
),
10745 fold (build (NE_EXPR
, TREE_TYPE (exp
),
10746 fold (build1 (IMAGPART_EXPR
,
10747 TREE_TYPE (inner_type
),
10749 fold (build1 (IMAGPART_EXPR
,
10750 TREE_TYPE (inner_type
),
10752 if_false_label
, if_true_label
);
10755 else if (integer_zerop (TREE_OPERAND (exp
, 1)))
10756 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
10758 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_INT
10759 && !can_compare_p (TYPE_MODE (inner_type
)))
10760 do_jump_by_parts_equality (exp
, if_true_label
, if_false_label
);
10762 comparison
= compare (exp
, NE
, NE
);
10767 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
10769 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
10770 do_jump_by_parts_greater (exp
, 1, if_false_label
, if_true_label
);
10772 comparison
= compare (exp
, LT
, LTU
);
10776 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
10778 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
10779 do_jump_by_parts_greater (exp
, 0, if_true_label
, if_false_label
);
10781 comparison
= compare (exp
, LE
, LEU
);
10785 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
10787 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
10788 do_jump_by_parts_greater (exp
, 0, if_false_label
, if_true_label
);
10790 comparison
= compare (exp
, GT
, GTU
);
10794 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
10796 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
10797 do_jump_by_parts_greater (exp
, 1, if_true_label
, if_false_label
);
10799 comparison
= compare (exp
, GE
, GEU
);
10804 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
10806 /* This is not needed any more and causes poor code since it causes
10807 comparisons and tests from non-SI objects to have different code
10809 /* Copy to register to avoid generating bad insns by cse
10810 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
10811 if (!cse_not_expected
&& GET_CODE (temp
) == MEM
)
10812 temp
= copy_to_reg (temp
);
10814 do_pending_stack_adjust ();
10815 if (GET_CODE (temp
) == CONST_INT
)
10816 comparison
= (temp
== const0_rtx
? const0_rtx
: const_true_rtx
);
10817 else if (GET_CODE (temp
) == LABEL_REF
)
10818 comparison
= const_true_rtx
;
10819 else if (GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
10820 && !can_compare_p (GET_MODE (temp
)))
10821 /* Note swapping the labels gives us not-equal. */
10822 do_jump_by_parts_equality_rtx (temp
, if_true_label
, if_false_label
);
10823 else if (GET_MODE (temp
) != VOIDmode
)
10824 comparison
= compare_from_rtx (temp
, CONST0_RTX (GET_MODE (temp
)),
10825 NE
, TREE_UNSIGNED (TREE_TYPE (exp
)),
10826 GET_MODE (temp
), NULL_RTX
, 0);
10831 /* Do any postincrements in the expression that was tested. */
10834 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
10835 straight into a conditional jump instruction as the jump condition.
10836 Otherwise, all the work has been done already. */
10838 if (comparison
== const_true_rtx
)
10841 emit_jump (if_true_label
);
10843 else if (comparison
== const0_rtx
)
10845 if (if_false_label
)
10846 emit_jump (if_false_label
);
10848 else if (comparison
)
10849 do_jump_for_compare (comparison
, if_false_label
, if_true_label
);
10851 if (drop_through_label
)
10853 /* If do_jump produces code that might be jumped around,
10854 do any stack adjusts from that code, before the place
10855 where control merges in. */
10856 do_pending_stack_adjust ();
10857 emit_label (drop_through_label
);
10861 /* Given a comparison expression EXP for values too wide to be compared
10862 with one insn, test the comparison and jump to the appropriate label.
10863 The code of EXP is ignored; we always test GT if SWAP is 0,
10864 and LT if SWAP is 1. */
10867 do_jump_by_parts_greater (exp
, swap
, if_false_label
, if_true_label
)
10870 rtx if_false_label
, if_true_label
;
10872 rtx op0
= expand_expr (TREE_OPERAND (exp
, swap
), NULL_RTX
, VOIDmode
, 0);
10873 rtx op1
= expand_expr (TREE_OPERAND (exp
, !swap
), NULL_RTX
, VOIDmode
, 0);
10874 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
10875 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
10876 rtx drop_through_label
= 0;
10877 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0)));
10880 if (! if_true_label
|| ! if_false_label
)
10881 drop_through_label
= gen_label_rtx ();
10882 if (! if_true_label
)
10883 if_true_label
= drop_through_label
;
10884 if (! if_false_label
)
10885 if_false_label
= drop_through_label
;
10887 /* Compare a word at a time, high order first. */
10888 for (i
= 0; i
< nwords
; i
++)
10891 rtx op0_word
, op1_word
;
10893 if (WORDS_BIG_ENDIAN
)
10895 op0_word
= operand_subword_force (op0
, i
, mode
);
10896 op1_word
= operand_subword_force (op1
, i
, mode
);
10900 op0_word
= operand_subword_force (op0
, nwords
- 1 - i
, mode
);
10901 op1_word
= operand_subword_force (op1
, nwords
- 1 - i
, mode
);
10904 /* All but high-order word must be compared as unsigned. */
10905 comp
= compare_from_rtx (op0_word
, op1_word
,
10906 (unsignedp
|| i
> 0) ? GTU
: GT
,
10907 unsignedp
, word_mode
, NULL_RTX
, 0);
10908 if (comp
== const_true_rtx
)
10909 emit_jump (if_true_label
);
10910 else if (comp
!= const0_rtx
)
10911 do_jump_for_compare (comp
, NULL_RTX
, if_true_label
);
10913 /* Consider lower words only if these are equal. */
10914 comp
= compare_from_rtx (op0_word
, op1_word
, NE
, unsignedp
, word_mode
,
10916 if (comp
== const_true_rtx
)
10917 emit_jump (if_false_label
);
10918 else if (comp
!= const0_rtx
)
10919 do_jump_for_compare (comp
, NULL_RTX
, if_false_label
);
10922 if (if_false_label
)
10923 emit_jump (if_false_label
);
10924 if (drop_through_label
)
10925 emit_label (drop_through_label
);
10928 /* Compare OP0 with OP1, word at a time, in mode MODE.
10929 UNSIGNEDP says to do unsigned comparison.
10930 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
10933 do_jump_by_parts_greater_rtx (mode
, unsignedp
, op0
, op1
, if_false_label
, if_true_label
)
10934 enum machine_mode mode
;
10937 rtx if_false_label
, if_true_label
;
10939 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
10940 rtx drop_through_label
= 0;
10943 if (! if_true_label
|| ! if_false_label
)
10944 drop_through_label
= gen_label_rtx ();
10945 if (! if_true_label
)
10946 if_true_label
= drop_through_label
;
10947 if (! if_false_label
)
10948 if_false_label
= drop_through_label
;
10950 /* Compare a word at a time, high order first. */
10951 for (i
= 0; i
< nwords
; i
++)
10954 rtx op0_word
, op1_word
;
10956 if (WORDS_BIG_ENDIAN
)
10958 op0_word
= operand_subword_force (op0
, i
, mode
);
10959 op1_word
= operand_subword_force (op1
, i
, mode
);
10963 op0_word
= operand_subword_force (op0
, nwords
- 1 - i
, mode
);
10964 op1_word
= operand_subword_force (op1
, nwords
- 1 - i
, mode
);
10967 /* All but high-order word must be compared as unsigned. */
10968 comp
= compare_from_rtx (op0_word
, op1_word
,
10969 (unsignedp
|| i
> 0) ? GTU
: GT
,
10970 unsignedp
, word_mode
, NULL_RTX
, 0);
10971 if (comp
== const_true_rtx
)
10972 emit_jump (if_true_label
);
10973 else if (comp
!= const0_rtx
)
10974 do_jump_for_compare (comp
, NULL_RTX
, if_true_label
);
10976 /* Consider lower words only if these are equal. */
10977 comp
= compare_from_rtx (op0_word
, op1_word
, NE
, unsignedp
, word_mode
,
10979 if (comp
== const_true_rtx
)
10980 emit_jump (if_false_label
);
10981 else if (comp
!= const0_rtx
)
10982 do_jump_for_compare (comp
, NULL_RTX
, if_false_label
);
10985 if (if_false_label
)
10986 emit_jump (if_false_label
);
10987 if (drop_through_label
)
10988 emit_label (drop_through_label
);
10991 /* Given an EQ_EXPR expression EXP for values too wide to be compared
10992 with one insn, test the comparison and jump to the appropriate label. */
10995 do_jump_by_parts_equality (exp
, if_false_label
, if_true_label
)
10997 rtx if_false_label
, if_true_label
;
10999 rtx op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
11000 rtx op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
11001 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
11002 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
11004 rtx drop_through_label
= 0;
11006 if (! if_false_label
)
11007 drop_through_label
= if_false_label
= gen_label_rtx ();
11009 for (i
= 0; i
< nwords
; i
++)
11011 rtx comp
= compare_from_rtx (operand_subword_force (op0
, i
, mode
),
11012 operand_subword_force (op1
, i
, mode
),
11013 EQ
, TREE_UNSIGNED (TREE_TYPE (exp
)),
11014 word_mode
, NULL_RTX
, 0);
11015 if (comp
== const_true_rtx
)
11016 emit_jump (if_false_label
);
11017 else if (comp
!= const0_rtx
)
11018 do_jump_for_compare (comp
, if_false_label
, NULL_RTX
);
11022 emit_jump (if_true_label
);
11023 if (drop_through_label
)
11024 emit_label (drop_through_label
);
11027 /* Jump according to whether OP0 is 0.
11028 We assume that OP0 has an integer mode that is too wide
11029 for the available compare insns. */
11032 do_jump_by_parts_equality_rtx (op0
, if_false_label
, if_true_label
)
11034 rtx if_false_label
, if_true_label
;
11036 int nwords
= GET_MODE_SIZE (GET_MODE (op0
)) / UNITS_PER_WORD
;
11039 rtx drop_through_label
= 0;
11041 /* The fastest way of doing this comparison on almost any machine is to
11042 "or" all the words and compare the result. If all have to be loaded
11043 from memory and this is a very wide item, it's possible this may
11044 be slower, but that's highly unlikely. */
11046 part
= gen_reg_rtx (word_mode
);
11047 emit_move_insn (part
, operand_subword_force (op0
, 0, GET_MODE (op0
)));
11048 for (i
= 1; i
< nwords
&& part
!= 0; i
++)
11049 part
= expand_binop (word_mode
, ior_optab
, part
,
11050 operand_subword_force (op0
, i
, GET_MODE (op0
)),
11051 part
, 1, OPTAB_WIDEN
);
11055 rtx comp
= compare_from_rtx (part
, const0_rtx
, EQ
, 1, word_mode
,
11058 if (comp
== const_true_rtx
)
11059 emit_jump (if_false_label
);
11060 else if (comp
== const0_rtx
)
11061 emit_jump (if_true_label
);
11063 do_jump_for_compare (comp
, if_false_label
, if_true_label
);
11068 /* If we couldn't do the "or" simply, do this with a series of compares. */
11069 if (! if_false_label
)
11070 drop_through_label
= if_false_label
= gen_label_rtx ();
11072 for (i
= 0; i
< nwords
; i
++)
11074 rtx comp
= compare_from_rtx (operand_subword_force (op0
, i
,
11076 const0_rtx
, EQ
, 1, word_mode
, NULL_RTX
, 0);
11077 if (comp
== const_true_rtx
)
11078 emit_jump (if_false_label
);
11079 else if (comp
!= const0_rtx
)
11080 do_jump_for_compare (comp
, if_false_label
, NULL_RTX
);
11084 emit_jump (if_true_label
);
11086 if (drop_through_label
)
11087 emit_label (drop_through_label
);
11090 /* Given a comparison expression in rtl form, output conditional branches to
11091 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
11094 do_jump_for_compare (comparison
, if_false_label
, if_true_label
)
11095 rtx comparison
, if_false_label
, if_true_label
;
11099 if (bcc_gen_fctn
[(int) GET_CODE (comparison
)] != 0)
11100 emit_jump_insn ((*bcc_gen_fctn
[(int) GET_CODE (comparison
)])
11105 if (if_false_label
)
11106 emit_jump (if_false_label
);
11108 else if (if_false_label
)
11110 rtx first
= get_last_insn (), insn
, branch
;
11113 /* Output the branch with the opposite condition. Then try to invert
11114 what is generated. If more than one insn is a branch, or if the
11115 branch is not the last insn written, abort. If we can't invert
11116 the branch, emit make a true label, redirect this jump to that,
11117 emit a jump to the false label and define the true label. */
11118 /* ??? Note that we wouldn't have to do any of this nonsense if
11119 we passed both labels into a combined compare-and-branch.
11120 Ah well, jump threading does a good job of repairing the damage. */
11122 if (bcc_gen_fctn
[(int) GET_CODE (comparison
)] != 0)
11123 emit_jump_insn ((*bcc_gen_fctn
[(int) GET_CODE (comparison
)])
11128 /* Here we get the first insn that was just emitted. It used to be the
11129 case that, on some machines, emitting the branch would discard
11130 the previous compare insn and emit a replacement. This isn't
11131 done anymore, but abort if we see that FIRST is deleted. */
11134 first
= get_insns ();
11135 else if (INSN_DELETED_P (first
))
11138 first
= NEXT_INSN (first
);
11140 /* Look for multiple branches in this sequence, as might be generated
11141 for a multi-word integer comparison. */
11145 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
11146 if (GET_CODE (insn
) == JUMP_INSN
)
11152 /* If we've got one branch at the end of the sequence,
11153 we can try to reverse it. */
11155 if (br_count
== 1 && NEXT_INSN (branch
) == NULL_RTX
)
11158 insn_label
= XEXP (condjump_label (branch
), 0);
11159 JUMP_LABEL (branch
) = insn_label
;
11161 if (insn_label
!= if_false_label
)
11164 if (invert_jump (branch
, if_false_label
))
11168 /* Multiple branches, or reversion failed. Convert to branches
11169 around an unconditional jump. */
11171 if_true_label
= gen_label_rtx ();
11172 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
11173 if (GET_CODE (insn
) == JUMP_INSN
)
11176 insn_label
= XEXP (condjump_label (insn
), 0);
11177 JUMP_LABEL (insn
) = insn_label
;
11179 if (insn_label
== if_false_label
)
11180 redirect_jump (insn
, if_true_label
);
11182 emit_jump (if_false_label
);
11183 emit_label (if_true_label
);
11187 /* Generate code for a comparison expression EXP
11188 (including code to compute the values to be compared)
11189 and set (CC0) according to the result.
11190 SIGNED_CODE should be the rtx operation for this comparison for
11191 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
11193 We force a stack adjustment unless there are currently
11194 things pushed on the stack that aren't yet used. */
11197 compare (exp
, signed_code
, unsigned_code
)
11199 enum rtx_code signed_code
, unsigned_code
;
11202 = expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
11204 = expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
11205 register tree type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
11206 register enum machine_mode mode
= TYPE_MODE (type
);
11207 int unsignedp
= TREE_UNSIGNED (type
);
11208 enum rtx_code code
= unsignedp
? unsigned_code
: signed_code
;
11210 #ifdef HAVE_canonicalize_funcptr_for_compare
11211 /* If function pointers need to be "canonicalized" before they can
11212 be reliably compared, then canonicalize them. */
11213 if (HAVE_canonicalize_funcptr_for_compare
11214 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == POINTER_TYPE
11215 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
11218 rtx new_op0
= gen_reg_rtx (mode
);
11220 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0
, op0
));
11224 if (HAVE_canonicalize_funcptr_for_compare
11225 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 1))) == POINTER_TYPE
11226 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
11229 rtx new_op1
= gen_reg_rtx (mode
);
11231 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1
, op1
));
11236 return compare_from_rtx (op0
, op1
, code
, unsignedp
, mode
,
11238 ? expr_size (TREE_OPERAND (exp
, 0)) : NULL_RTX
),
11239 TYPE_ALIGN (TREE_TYPE (exp
)) / BITS_PER_UNIT
);
11242 /* Like compare but expects the values to compare as two rtx's.
11243 The decision as to signed or unsigned comparison must be made by the caller.
11245 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
11248 If ALIGN is non-zero, it is the alignment of this type; if zero, the
11249 size of MODE should be used. */
11252 compare_from_rtx (op0
, op1
, code
, unsignedp
, mode
, size
, align
)
11253 register rtx op0
, op1
;
11254 enum rtx_code code
;
11256 enum machine_mode mode
;
11262 /* If one operand is constant, make it the second one. Only do this
11263 if the other operand is not constant as well. */
11265 if ((CONSTANT_P (op0
) && ! CONSTANT_P (op1
))
11266 || (GET_CODE (op0
) == CONST_INT
&& GET_CODE (op1
) != CONST_INT
))
11271 code
= swap_condition (code
);
11274 if (flag_force_mem
)
11276 op0
= force_not_mem (op0
);
11277 op1
= force_not_mem (op1
);
11280 do_pending_stack_adjust ();
11282 if (GET_CODE (op0
) == CONST_INT
&& GET_CODE (op1
) == CONST_INT
11283 && (tem
= simplify_relational_operation (code
, mode
, op0
, op1
)) != 0)
11287 /* There's no need to do this now that combine.c can eliminate lots of
11288 sign extensions. This can be less efficient in certain cases on other
11291 /* If this is a signed equality comparison, we can do it as an
11292 unsigned comparison since zero-extension is cheaper than sign
11293 extension and comparisons with zero are done as unsigned. This is
11294 the case even on machines that can do fast sign extension, since
11295 zero-extension is easier to combine with other operations than
11296 sign-extension is. If we are comparing against a constant, we must
11297 convert it to what it would look like unsigned. */
11298 if ((code
== EQ
|| code
== NE
) && ! unsignedp
11299 && GET_MODE_BITSIZE (GET_MODE (op0
)) <= HOST_BITS_PER_WIDE_INT
)
11301 if (GET_CODE (op1
) == CONST_INT
11302 && (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
))) != INTVAL (op1
))
11303 op1
= GEN_INT (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
)));
11308 emit_cmp_insn (op0
, op1
, code
, size
, mode
, unsignedp
, align
);
11310 return gen_rtx_fmt_ee (code
, VOIDmode
, cc0_rtx
, const0_rtx
);
11313 /* Generate code to calculate EXP using a store-flag instruction
11314 and return an rtx for the result. EXP is either a comparison
11315 or a TRUTH_NOT_EXPR whose operand is a comparison.
11317 If TARGET is nonzero, store the result there if convenient.
11319 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
11322 Return zero if there is no suitable set-flag instruction
11323 available on this machine.
11325 Once expand_expr has been called on the arguments of the comparison,
11326 we are committed to doing the store flag, since it is not safe to
11327 re-evaluate the expression. We emit the store-flag insn by calling
11328 emit_store_flag, but only expand the arguments if we have a reason
11329 to believe that emit_store_flag will be successful. If we think that
11330 it will, but it isn't, we have to simulate the store-flag with a
11331 set/jump/set sequence. */
11334 do_store_flag (exp
, target
, mode
, only_cheap
)
11337 enum machine_mode mode
;
11340 enum rtx_code code
;
11341 tree arg0
, arg1
, type
;
11343 enum machine_mode operand_mode
;
11347 enum insn_code icode
;
11348 rtx subtarget
= target
;
11351 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
11352 result at the end. We can't simply invert the test since it would
11353 have already been inverted if it were valid. This case occurs for
11354 some floating-point comparisons. */
11356 if (TREE_CODE (exp
) == TRUTH_NOT_EXPR
)
11357 invert
= 1, exp
= TREE_OPERAND (exp
, 0);
11359 arg0
= TREE_OPERAND (exp
, 0);
11360 arg1
= TREE_OPERAND (exp
, 1);
11361 type
= TREE_TYPE (arg0
);
11362 operand_mode
= TYPE_MODE (type
);
11363 unsignedp
= TREE_UNSIGNED (type
);
11365 /* We won't bother with BLKmode store-flag operations because it would mean
11366 passing a lot of information to emit_store_flag. */
11367 if (operand_mode
== BLKmode
)
11370 /* We won't bother with store-flag operations involving function pointers
11371 when function pointers must be canonicalized before comparisons. */
11372 #ifdef HAVE_canonicalize_funcptr_for_compare
11373 if (HAVE_canonicalize_funcptr_for_compare
11374 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == POINTER_TYPE
11375 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
11377 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 1))) == POINTER_TYPE
11378 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
11379 == FUNCTION_TYPE
))))
11386 /* Get the rtx comparison code to use. We know that EXP is a comparison
11387 operation of some type. Some comparisons against 1 and -1 can be
11388 converted to comparisons with zero. Do so here so that the tests
11389 below will be aware that we have a comparison with zero. These
11390 tests will not catch constants in the first operand, but constants
11391 are rarely passed as the first operand. */
11393 switch (TREE_CODE (exp
))
11402 if (integer_onep (arg1
))
11403 arg1
= integer_zero_node
, code
= unsignedp
? LEU
: LE
;
11405 code
= unsignedp
? LTU
: LT
;
11408 if (! unsignedp
&& integer_all_onesp (arg1
))
11409 arg1
= integer_zero_node
, code
= LT
;
11411 code
= unsignedp
? LEU
: LE
;
11414 if (! unsignedp
&& integer_all_onesp (arg1
))
11415 arg1
= integer_zero_node
, code
= GE
;
11417 code
= unsignedp
? GTU
: GT
;
11420 if (integer_onep (arg1
))
11421 arg1
= integer_zero_node
, code
= unsignedp
? GTU
: GT
;
11423 code
= unsignedp
? GEU
: GE
;
11429 /* Put a constant second. */
11430 if (TREE_CODE (arg0
) == REAL_CST
|| TREE_CODE (arg0
) == INTEGER_CST
)
11432 tem
= arg0
; arg0
= arg1
; arg1
= tem
;
11433 code
= swap_condition (code
);
11436 /* If this is an equality or inequality test of a single bit, we can
11437 do this by shifting the bit being tested to the low-order bit and
11438 masking the result with the constant 1. If the condition was EQ,
11439 we xor it with 1. This does not require an scc insn and is faster
11440 than an scc insn even if we have it. */
11442 if ((code
== NE
|| code
== EQ
)
11443 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
11444 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
11446 tree inner
= TREE_OPERAND (arg0
, 0);
11447 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
11450 /* If INNER is a right shift of a constant and it plus BITNUM does
11451 not overflow, adjust BITNUM and INNER. */
11453 if (TREE_CODE (inner
) == RSHIFT_EXPR
11454 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
11455 && TREE_INT_CST_HIGH (TREE_OPERAND (inner
, 1)) == 0
11456 && (bitnum
+ TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1))
11457 < TYPE_PRECISION (type
)))
11459 bitnum
+= TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1));
11460 inner
= TREE_OPERAND (inner
, 0);
11463 /* If we are going to be able to omit the AND below, we must do our
11464 operations as unsigned. If we must use the AND, we have a choice.
11465 Normally unsigned is faster, but for some machines signed is. */
11466 ops_unsignedp
= (bitnum
== TYPE_PRECISION (type
) - 1 ? 1
11467 #ifdef LOAD_EXTEND_OP
11468 : (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
? 0 : 1)
11474 if (subtarget
== 0 || GET_CODE (subtarget
) != REG
11475 || GET_MODE (subtarget
) != operand_mode
11476 || ! safe_from_p (subtarget
, inner
, 1))
11479 op0
= expand_expr (inner
, subtarget
, VOIDmode
, 0);
11482 op0
= expand_shift (RSHIFT_EXPR
, GET_MODE (op0
), op0
,
11483 size_int (bitnum
), subtarget
, ops_unsignedp
);
11485 if (GET_MODE (op0
) != mode
)
11486 op0
= convert_to_mode (mode
, op0
, ops_unsignedp
);
11488 if ((code
== EQ
&& ! invert
) || (code
== NE
&& invert
))
11489 op0
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
, subtarget
,
11490 ops_unsignedp
, OPTAB_LIB_WIDEN
);
11492 /* Put the AND last so it can combine with more things. */
11493 if (bitnum
!= TYPE_PRECISION (type
) - 1)
11494 op0
= expand_and (op0
, const1_rtx
, subtarget
);
11499 /* Now see if we are likely to be able to do this. Return if not. */
11500 if (! can_compare_p (operand_mode
))
11502 icode
= setcc_gen_code
[(int) code
];
11503 if (icode
== CODE_FOR_nothing
11504 || (only_cheap
&& insn_operand_mode
[(int) icode
][0] != mode
))
11506 /* We can only do this if it is one of the special cases that
11507 can be handled without an scc insn. */
11508 if ((code
== LT
&& integer_zerop (arg1
))
11509 || (! only_cheap
&& code
== GE
&& integer_zerop (arg1
)))
11511 else if (BRANCH_COST
>= 0
11512 && ! only_cheap
&& (code
== NE
|| code
== EQ
)
11513 && TREE_CODE (type
) != REAL_TYPE
11514 && ((abs_optab
->handlers
[(int) operand_mode
].insn_code
11515 != CODE_FOR_nothing
)
11516 || (ffs_optab
->handlers
[(int) operand_mode
].insn_code
11517 != CODE_FOR_nothing
)))
11523 preexpand_calls (exp
);
11524 if (subtarget
== 0 || GET_CODE (subtarget
) != REG
11525 || GET_MODE (subtarget
) != operand_mode
11526 || ! safe_from_p (subtarget
, arg1
, 1))
11529 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, 0);
11530 op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
11533 target
= gen_reg_rtx (mode
);
11535 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
11536 because, if the emit_store_flag does anything it will succeed and
11537 OP0 and OP1 will not be used subsequently. */
11539 result
= emit_store_flag (target
, code
,
11540 queued_subexp_p (op0
) ? copy_rtx (op0
) : op0
,
11541 queued_subexp_p (op1
) ? copy_rtx (op1
) : op1
,
11542 operand_mode
, unsignedp
, 1);
11547 result
= expand_binop (mode
, xor_optab
, result
, const1_rtx
,
11548 result
, 0, OPTAB_LIB_WIDEN
);
11552 /* If this failed, we have to do this with set/compare/jump/set code. */
11553 if (GET_CODE (target
) != REG
11554 || reg_mentioned_p (target
, op0
) || reg_mentioned_p (target
, op1
))
11555 target
= gen_reg_rtx (GET_MODE (target
));
11557 emit_move_insn (target
, invert
? const0_rtx
: const1_rtx
);
11558 result
= compare_from_rtx (op0
, op1
, code
, unsignedp
,
11559 operand_mode
, NULL_RTX
, 0);
11560 if (GET_CODE (result
) == CONST_INT
)
11561 return (((result
== const0_rtx
&& ! invert
)
11562 || (result
!= const0_rtx
&& invert
))
11563 ? const0_rtx
: const1_rtx
);
11565 label
= gen_label_rtx ();
11566 if (bcc_gen_fctn
[(int) code
] == 0)
11569 emit_jump_insn ((*bcc_gen_fctn
[(int) code
]) (label
));
11570 emit_move_insn (target
, invert
? const1_rtx
: const0_rtx
);
11571 emit_label (label
);
11576 /* Generate a tablejump instruction (used for switch statements). */
11578 #ifdef HAVE_tablejump
11580 /* INDEX is the value being switched on, with the lowest value
11581 in the table already subtracted.
11582 MODE is its expected mode (needed if INDEX is constant).
11583 RANGE is the length of the jump table.
11584 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11586 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11587 index value is out of range. */
11590 do_tablejump (index
, mode
, range
, table_label
, default_label
)
11591 rtx index
, range
, table_label
, default_label
;
11592 enum machine_mode mode
;
11594 register rtx temp
, vector
;
11596 /* Do an unsigned comparison (in the proper mode) between the index
11597 expression and the value which represents the length of the range.
11598 Since we just finished subtracting the lower bound of the range
11599 from the index expression, this comparison allows us to simultaneously
11600 check that the original index expression value is both greater than
11601 or equal to the minimum value of the range and less than or equal to
11602 the maximum value of the range. */
11604 emit_cmp_and_jump_insns (index
, range
, GTU
, NULL_RTX
, mode
, 1,
11607 /* If index is in range, it must fit in Pmode.
11608 Convert to Pmode so we can index with it. */
11610 index
= convert_to_mode (Pmode
, index
, 1);
11612 /* Don't let a MEM slip thru, because then INDEX that comes
11613 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11614 and break_out_memory_refs will go to work on it and mess it up. */
11615 #ifdef PIC_CASE_VECTOR_ADDRESS
11616 if (flag_pic
&& GET_CODE (index
) != REG
)
11617 index
= copy_to_mode_reg (Pmode
, index
);
11620 /* If flag_force_addr were to affect this address
11621 it could interfere with the tricky assumptions made
11622 about addresses that contain label-refs,
11623 which may be valid only very near the tablejump itself. */
11624 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11625 GET_MODE_SIZE, because this indicates how large insns are. The other
11626 uses should all be Pmode, because they are addresses. This code
11627 could fail if addresses and insns are not the same size. */
11628 index
= gen_rtx_PLUS (Pmode
,
11629 gen_rtx_MULT (Pmode
, index
,
11630 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE
))),
11631 gen_rtx_LABEL_REF (Pmode
, table_label
));
11632 #ifdef PIC_CASE_VECTOR_ADDRESS
11634 index
= PIC_CASE_VECTOR_ADDRESS (index
);
11637 index
= memory_address_noforce (CASE_VECTOR_MODE
, index
);
11638 temp
= gen_reg_rtx (CASE_VECTOR_MODE
);
11639 vector
= gen_rtx_MEM (CASE_VECTOR_MODE
, index
);
11640 RTX_UNCHANGING_P (vector
) = 1;
11641 convert_move (temp
, vector
, 0);
11643 emit_jump_insn (gen_tablejump (temp
, table_label
));
11645 /* If we are generating PIC code or if the table is PC-relative, the
11646 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11647 if (! CASE_VECTOR_PC_RELATIVE
&& ! flag_pic
)
11651 #endif /* HAVE_tablejump */