1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 92, 93, 94, 95, 96, 1997 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
29 #include "hard-reg-set.h"
32 #include "insn-flags.h"
33 #include "insn-codes.h"
35 #include "insn-config.h"
38 #include "typeclass.h"
41 #include "bc-opcode.h"
42 #include "bc-typecd.h"
47 #define CEIL(x,y) (((x) + (y) - 1) / (y))
49 /* Decide whether a function's arguments should be processed
50 from first to last or from last to first.
52 They should if the stack and args grow in opposite directions, but
53 only if we have push insns. */
57 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
58 #define PUSH_ARGS_REVERSED /* If it's last to first */
63 #ifndef STACK_PUSH_CODE
64 #ifdef STACK_GROWS_DOWNWARD
65 #define STACK_PUSH_CODE PRE_DEC
67 #define STACK_PUSH_CODE PRE_INC
71 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
72 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
74 /* If this is nonzero, we do not bother generating VOLATILE
75 around volatile memory references, and we are willing to
76 output indirect addresses. If cse is to follow, we reject
77 indirect addresses so a useful potential cse is generated;
78 if it is used only once, instruction combination will produce
79 the same indirect address eventually. */
82 /* Nonzero to generate code for all the subroutines within an
83 expression before generating the upper levels of the expression.
84 Nowadays this is never zero. */
85 int do_preexpand_calls
= 1;
87 /* Number of units that we should eventually pop off the stack.
88 These are the arguments to function calls that have already returned. */
89 int pending_stack_adjust
;
91 /* Nonzero means stack pops must not be deferred, and deferred stack
92 pops must not be output. It is nonzero inside a function call,
93 inside a conditional expression, inside a statement expression,
94 and in other cases as well. */
95 int inhibit_defer_pop
;
97 /* When temporaries are created by TARGET_EXPRs, they are created at
98 this level of temp_slot_level, so that they can remain allocated
99 until no longer needed. CLEANUP_POINT_EXPRs define the lifetime
101 int target_temp_slot_level
;
103 /* Nonzero means __builtin_saveregs has already been done in this function.
104 The value is the pseudoreg containing the value __builtin_saveregs
106 static rtx saveregs_value
;
108 /* Similarly for __builtin_apply_args. */
109 static rtx apply_args_value
;
111 /* This structure is used by move_by_pieces to describe the move to
114 struct move_by_pieces
124 int explicit_inc_from
;
131 /* This structure is used by clear_by_pieces to describe the clear to
134 struct clear_by_pieces
146 /* Used to generate bytecodes: keep track of size of local variables,
147 as well as depth of arithmetic stack. (Notice that variables are
148 stored on the machine's stack, not the arithmetic stack.) */
150 extern int local_vars_size
;
151 extern int stack_depth
;
152 extern int max_stack_depth
;
153 extern struct obstack permanent_obstack
;
154 extern rtx arg_pointer_save_area
;
156 static rtx enqueue_insn
PROTO((rtx
, rtx
));
157 static int queued_subexp_p
PROTO((rtx
));
158 static void init_queue
PROTO((void));
159 static void move_by_pieces
PROTO((rtx
, rtx
, int, int));
160 static int move_by_pieces_ninsns
PROTO((unsigned int, int));
161 static void move_by_pieces_1
PROTO((rtx (*) (), enum machine_mode
,
162 struct move_by_pieces
*));
163 static void clear_by_pieces
PROTO((rtx
, int, int));
164 static void clear_by_pieces_1
PROTO((rtx (*) (), enum machine_mode
,
165 struct clear_by_pieces
*));
166 static int is_zeros_p
PROTO((tree
));
167 static int mostly_zeros_p
PROTO((tree
));
168 static void store_constructor
PROTO((tree
, rtx
, int));
169 static rtx store_field
PROTO((rtx
, int, int, enum machine_mode
, tree
,
170 enum machine_mode
, int, int, int));
171 static int get_inner_unaligned_p
PROTO((tree
));
172 static tree save_noncopied_parts
PROTO((tree
, tree
));
173 static tree init_noncopied_parts
PROTO((tree
, tree
));
174 static int safe_from_p
PROTO((rtx
, tree
));
175 static int fixed_type_p
PROTO((tree
));
176 static rtx var_rtx
PROTO((tree
));
177 static int get_pointer_alignment
PROTO((tree
, unsigned));
178 static tree string_constant
PROTO((tree
, tree
*));
179 static tree c_strlen
PROTO((tree
));
180 static rtx expand_builtin
PROTO((tree
, rtx
, rtx
,
181 enum machine_mode
, int));
182 static int apply_args_size
PROTO((void));
183 static int apply_result_size
PROTO((void));
184 static rtx result_vector
PROTO((int, rtx
));
185 static rtx expand_builtin_apply_args
PROTO((void));
186 static rtx expand_builtin_apply
PROTO((rtx
, rtx
, rtx
));
187 static void expand_builtin_return
PROTO((rtx
));
188 static rtx expand_increment
PROTO((tree
, int, int));
189 void bc_expand_increment
PROTO((struct increment_operator
*, tree
));
190 rtx bc_allocate_local
PROTO((int, int));
191 void bc_store_memory
PROTO((tree
, tree
));
192 tree bc_expand_component_address
PROTO((tree
));
193 tree bc_expand_address
PROTO((tree
));
194 void bc_expand_constructor
PROTO((tree
));
195 void bc_adjust_stack
PROTO((int));
196 tree bc_canonicalize_array_ref
PROTO((tree
));
197 void bc_load_memory
PROTO((tree
, tree
));
198 void bc_load_externaddr
PROTO((rtx
));
199 void bc_load_externaddr_id
PROTO((tree
, int));
200 void bc_load_localaddr
PROTO((rtx
));
201 void bc_load_parmaddr
PROTO((rtx
));
202 static void preexpand_calls
PROTO((tree
));
203 static void do_jump_by_parts_greater
PROTO((tree
, int, rtx
, rtx
));
204 void do_jump_by_parts_greater_rtx
PROTO((enum machine_mode
, int, rtx
, rtx
, rtx
, rtx
));
205 static void do_jump_by_parts_equality
PROTO((tree
, rtx
, rtx
));
206 static void do_jump_by_parts_equality_rtx
PROTO((rtx
, rtx
, rtx
));
207 static void do_jump_for_compare
PROTO((rtx
, rtx
, rtx
));
208 static rtx compare
PROTO((tree
, enum rtx_code
, enum rtx_code
));
209 static rtx do_store_flag
PROTO((tree
, rtx
, enum machine_mode
, int));
210 extern tree truthvalue_conversion
PROTO((tree
));
212 /* Record for each mode whether we can move a register directly to or
213 from an object of that mode in memory. If we can't, we won't try
214 to use that mode directly when accessing a field of that mode. */
216 static char direct_load
[NUM_MACHINE_MODES
];
217 static char direct_store
[NUM_MACHINE_MODES
];
219 /* MOVE_RATIO is the number of move instructions that is better than
223 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
226 /* A value of around 6 would minimize code size; infinity would minimize
228 #define MOVE_RATIO 15
232 /* This array records the insn_code of insns to perform block moves. */
233 enum insn_code movstr_optab
[NUM_MACHINE_MODES
];
235 /* This array records the insn_code of insns to perform block clears. */
236 enum insn_code clrstr_optab
[NUM_MACHINE_MODES
];
238 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
240 #ifndef SLOW_UNALIGNED_ACCESS
241 #define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
244 /* Register mappings for target machines without register windows. */
245 #ifndef INCOMING_REGNO
246 #define INCOMING_REGNO(OUT) (OUT)
248 #ifndef OUTGOING_REGNO
249 #define OUTGOING_REGNO(IN) (IN)
252 /* Maps used to convert modes to const, load, and store bytecodes. */
253 enum bytecode_opcode mode_to_const_map
[MAX_MACHINE_MODE
];
254 enum bytecode_opcode mode_to_load_map
[MAX_MACHINE_MODE
];
255 enum bytecode_opcode mode_to_store_map
[MAX_MACHINE_MODE
];
257 /* Initialize maps used to convert modes to const, load, and store
261 bc_init_mode_to_opcode_maps ()
265 for (mode
= 0; mode
< (int) MAX_MACHINE_MODE
; mode
++)
266 mode_to_const_map
[mode
]
267 = mode_to_load_map
[mode
]
268 = mode_to_store_map
[mode
] = neverneverland
;
270 #define DEF_MODEMAP(SYM, CODE, UCODE, CONST, LOAD, STORE) \
271 mode_to_const_map[(int) SYM] = CONST; \
272 mode_to_load_map[(int) SYM] = LOAD; \
273 mode_to_store_map[(int) SYM] = STORE;
275 #include "modemap.def"
279 /* This is run once per compilation to set up which modes can be used
280 directly in memory and to initialize the block move optab. */
286 enum machine_mode mode
;
287 /* Try indexing by frame ptr and try by stack ptr.
288 It is known that on the Convex the stack ptr isn't a valid index.
289 With luck, one or the other is valid on any machine. */
290 rtx mem
= gen_rtx (MEM
, VOIDmode
, stack_pointer_rtx
);
291 rtx mem1
= gen_rtx (MEM
, VOIDmode
, frame_pointer_rtx
);
294 insn
= emit_insn (gen_rtx (SET
, 0, 0));
295 pat
= PATTERN (insn
);
297 for (mode
= VOIDmode
; (int) mode
< NUM_MACHINE_MODES
;
298 mode
= (enum machine_mode
) ((int) mode
+ 1))
304 direct_load
[(int) mode
] = direct_store
[(int) mode
] = 0;
305 PUT_MODE (mem
, mode
);
306 PUT_MODE (mem1
, mode
);
308 /* See if there is some register that can be used in this mode and
309 directly loaded or stored from memory. */
311 if (mode
!= VOIDmode
&& mode
!= BLKmode
)
312 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
313 && (direct_load
[(int) mode
] == 0 || direct_store
[(int) mode
] == 0);
316 if (! HARD_REGNO_MODE_OK (regno
, mode
))
319 reg
= gen_rtx (REG
, mode
, regno
);
322 SET_DEST (pat
) = reg
;
323 if (recog (pat
, insn
, &num_clobbers
) >= 0)
324 direct_load
[(int) mode
] = 1;
326 SET_SRC (pat
) = mem1
;
327 SET_DEST (pat
) = reg
;
328 if (recog (pat
, insn
, &num_clobbers
) >= 0)
329 direct_load
[(int) mode
] = 1;
332 SET_DEST (pat
) = mem
;
333 if (recog (pat
, insn
, &num_clobbers
) >= 0)
334 direct_store
[(int) mode
] = 1;
337 SET_DEST (pat
) = mem1
;
338 if (recog (pat
, insn
, &num_clobbers
) >= 0)
339 direct_store
[(int) mode
] = 1;
346 /* This is run at the start of compiling a function. */
353 pending_stack_adjust
= 0;
354 inhibit_defer_pop
= 0;
356 apply_args_value
= 0;
360 /* Save all variables describing the current status into the structure *P.
361 This is used before starting a nested function. */
367 /* Instead of saving the postincrement queue, empty it. */
370 p
->pending_stack_adjust
= pending_stack_adjust
;
371 p
->inhibit_defer_pop
= inhibit_defer_pop
;
372 p
->saveregs_value
= saveregs_value
;
373 p
->apply_args_value
= apply_args_value
;
374 p
->forced_labels
= forced_labels
;
376 pending_stack_adjust
= 0;
377 inhibit_defer_pop
= 0;
379 apply_args_value
= 0;
383 /* Restore all variables describing the current status from the structure *P.
384 This is used after a nested function. */
387 restore_expr_status (p
)
390 pending_stack_adjust
= p
->pending_stack_adjust
;
391 inhibit_defer_pop
= p
->inhibit_defer_pop
;
392 saveregs_value
= p
->saveregs_value
;
393 apply_args_value
= p
->apply_args_value
;
394 forced_labels
= p
->forced_labels
;
397 /* Manage the queue of increment instructions to be output
398 for POSTINCREMENT_EXPR expressions, etc. */
400 static rtx pending_chain
;
402 /* Queue up to increment (or change) VAR later. BODY says how:
403 BODY should be the same thing you would pass to emit_insn
404 to increment right away. It will go to emit_insn later on.
406 The value is a QUEUED expression to be used in place of VAR
407 where you want to guarantee the pre-incrementation value of VAR. */
410 enqueue_insn (var
, body
)
413 pending_chain
= gen_rtx (QUEUED
, GET_MODE (var
),
414 var
, NULL_RTX
, NULL_RTX
, body
, pending_chain
);
415 return pending_chain
;
418 /* Use protect_from_queue to convert a QUEUED expression
419 into something that you can put immediately into an instruction.
420 If the queued incrementation has not happened yet,
421 protect_from_queue returns the variable itself.
422 If the incrementation has happened, protect_from_queue returns a temp
423 that contains a copy of the old value of the variable.
425 Any time an rtx which might possibly be a QUEUED is to be put
426 into an instruction, it must be passed through protect_from_queue first.
427 QUEUED expressions are not meaningful in instructions.
429 Do not pass a value through protect_from_queue and then hold
430 on to it for a while before putting it in an instruction!
431 If the queue is flushed in between, incorrect code will result. */
434 protect_from_queue (x
, modify
)
438 register RTX_CODE code
= GET_CODE (x
);
440 #if 0 /* A QUEUED can hang around after the queue is forced out. */
441 /* Shortcut for most common case. */
442 if (pending_chain
== 0)
448 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
449 use of autoincrement. Make a copy of the contents of the memory
450 location rather than a copy of the address, but not if the value is
451 of mode BLKmode. Don't modify X in place since it might be
453 if (code
== MEM
&& GET_MODE (x
) != BLKmode
454 && GET_CODE (XEXP (x
, 0)) == QUEUED
&& !modify
)
456 register rtx y
= XEXP (x
, 0);
457 register rtx
new = gen_rtx (MEM
, GET_MODE (x
), QUEUED_VAR (y
));
459 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x
);
460 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x
);
461 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x
);
465 register rtx temp
= gen_reg_rtx (GET_MODE (new));
466 emit_insn_before (gen_move_insn (temp
, new),
472 /* Otherwise, recursively protect the subexpressions of all
473 the kinds of rtx's that can contain a QUEUED. */
476 rtx tem
= protect_from_queue (XEXP (x
, 0), 0);
477 if (tem
!= XEXP (x
, 0))
483 else if (code
== PLUS
|| code
== MULT
)
485 rtx new0
= protect_from_queue (XEXP (x
, 0), 0);
486 rtx new1
= protect_from_queue (XEXP (x
, 1), 0);
487 if (new0
!= XEXP (x
, 0) || new1
!= XEXP (x
, 1))
496 /* If the increment has not happened, use the variable itself. */
497 if (QUEUED_INSN (x
) == 0)
498 return QUEUED_VAR (x
);
499 /* If the increment has happened and a pre-increment copy exists,
501 if (QUEUED_COPY (x
) != 0)
502 return QUEUED_COPY (x
);
503 /* The increment has happened but we haven't set up a pre-increment copy.
504 Set one up now, and use it. */
505 QUEUED_COPY (x
) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x
)));
506 emit_insn_before (gen_move_insn (QUEUED_COPY (x
), QUEUED_VAR (x
)),
508 return QUEUED_COPY (x
);
511 /* Return nonzero if X contains a QUEUED expression:
512 if it contains anything that will be altered by a queued increment.
513 We handle only combinations of MEM, PLUS, MINUS and MULT operators
514 since memory addresses generally contain only those. */
520 register enum rtx_code code
= GET_CODE (x
);
526 return queued_subexp_p (XEXP (x
, 0));
530 return queued_subexp_p (XEXP (x
, 0))
531 || queued_subexp_p (XEXP (x
, 1));
536 /* Perform all the pending incrementations. */
542 while (p
= pending_chain
)
544 QUEUED_INSN (p
) = emit_insn (QUEUED_BODY (p
));
545 pending_chain
= QUEUED_NEXT (p
);
556 /* Copy data from FROM to TO, where the machine modes are not the same.
557 Both modes may be integer, or both may be floating.
558 UNSIGNEDP should be nonzero if FROM is an unsigned type.
559 This causes zero-extension instead of sign-extension. */
562 convert_move (to
, from
, unsignedp
)
563 register rtx to
, from
;
566 enum machine_mode to_mode
= GET_MODE (to
);
567 enum machine_mode from_mode
= GET_MODE (from
);
568 int to_real
= GET_MODE_CLASS (to_mode
) == MODE_FLOAT
;
569 int from_real
= GET_MODE_CLASS (from_mode
) == MODE_FLOAT
;
573 /* rtx code for making an equivalent value. */
574 enum rtx_code equiv_code
= (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
);
576 to
= protect_from_queue (to
, 1);
577 from
= protect_from_queue (from
, 0);
579 if (to_real
!= from_real
)
582 /* If FROM is a SUBREG that indicates that we have already done at least
583 the required extension, strip it. We don't handle such SUBREGs as
586 if (GET_CODE (from
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (from
)
587 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from
)))
588 >= GET_MODE_SIZE (to_mode
))
589 && SUBREG_PROMOTED_UNSIGNED_P (from
) == unsignedp
)
590 from
= gen_lowpart (to_mode
, from
), from_mode
= to_mode
;
592 if (GET_CODE (to
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (to
))
595 if (to_mode
== from_mode
596 || (from_mode
== VOIDmode
&& CONSTANT_P (from
)))
598 emit_move_insn (to
, from
);
606 if (GET_MODE_BITSIZE (from_mode
) < GET_MODE_BITSIZE (to_mode
))
608 /* Try converting directly if the insn is supported. */
609 if ((code
= can_extend_p (to_mode
, from_mode
, 0))
612 emit_unop_insn (code
, to
, from
, UNKNOWN
);
617 #ifdef HAVE_trunchfqf2
618 if (HAVE_trunchfqf2
&& from_mode
== HFmode
&& to_mode
== QFmode
)
620 emit_unop_insn (CODE_FOR_trunchfqf2
, to
, from
, UNKNOWN
);
624 #ifdef HAVE_truncsfqf2
625 if (HAVE_truncsfqf2
&& from_mode
== SFmode
&& to_mode
== QFmode
)
627 emit_unop_insn (CODE_FOR_truncsfqf2
, to
, from
, UNKNOWN
);
631 #ifdef HAVE_truncdfqf2
632 if (HAVE_truncdfqf2
&& from_mode
== DFmode
&& to_mode
== QFmode
)
634 emit_unop_insn (CODE_FOR_truncdfqf2
, to
, from
, UNKNOWN
);
638 #ifdef HAVE_truncxfqf2
639 if (HAVE_truncxfqf2
&& from_mode
== XFmode
&& to_mode
== QFmode
)
641 emit_unop_insn (CODE_FOR_truncxfqf2
, to
, from
, UNKNOWN
);
645 #ifdef HAVE_trunctfqf2
646 if (HAVE_trunctfqf2
&& from_mode
== TFmode
&& to_mode
== QFmode
)
648 emit_unop_insn (CODE_FOR_trunctfqf2
, to
, from
, UNKNOWN
);
653 #ifdef HAVE_trunctqfhf2
654 if (HAVE_trunctqfhf2
&& from_mode
== TQFmode
&& to_mode
== HFmode
)
656 emit_unop_insn (CODE_FOR_trunctqfhf2
, to
, from
, UNKNOWN
);
660 #ifdef HAVE_truncsfhf2
661 if (HAVE_truncsfhf2
&& from_mode
== SFmode
&& to_mode
== HFmode
)
663 emit_unop_insn (CODE_FOR_truncsfhf2
, to
, from
, UNKNOWN
);
667 #ifdef HAVE_truncdfhf2
668 if (HAVE_truncdfhf2
&& from_mode
== DFmode
&& to_mode
== HFmode
)
670 emit_unop_insn (CODE_FOR_truncdfhf2
, to
, from
, UNKNOWN
);
674 #ifdef HAVE_truncxfhf2
675 if (HAVE_truncxfhf2
&& from_mode
== XFmode
&& to_mode
== HFmode
)
677 emit_unop_insn (CODE_FOR_truncxfhf2
, to
, from
, UNKNOWN
);
681 #ifdef HAVE_trunctfhf2
682 if (HAVE_trunctfhf2
&& from_mode
== TFmode
&& to_mode
== HFmode
)
684 emit_unop_insn (CODE_FOR_trunctfhf2
, to
, from
, UNKNOWN
);
689 #ifdef HAVE_truncsftqf2
690 if (HAVE_truncsftqf2
&& from_mode
== SFmode
&& to_mode
== TQFmode
)
692 emit_unop_insn (CODE_FOR_truncsftqf2
, to
, from
, UNKNOWN
);
696 #ifdef HAVE_truncdftqf2
697 if (HAVE_truncdftqf2
&& from_mode
== DFmode
&& to_mode
== TQFmode
)
699 emit_unop_insn (CODE_FOR_truncdftqf2
, to
, from
, UNKNOWN
);
703 #ifdef HAVE_truncxftqf2
704 if (HAVE_truncxftqf2
&& from_mode
== XFmode
&& to_mode
== TQFmode
)
706 emit_unop_insn (CODE_FOR_truncxftqf2
, to
, from
, UNKNOWN
);
710 #ifdef HAVE_trunctftqf2
711 if (HAVE_trunctftqf2
&& from_mode
== TFmode
&& to_mode
== TQFmode
)
713 emit_unop_insn (CODE_FOR_trunctftqf2
, to
, from
, UNKNOWN
);
718 #ifdef HAVE_truncdfsf2
719 if (HAVE_truncdfsf2
&& from_mode
== DFmode
&& to_mode
== SFmode
)
721 emit_unop_insn (CODE_FOR_truncdfsf2
, to
, from
, UNKNOWN
);
725 #ifdef HAVE_truncxfsf2
726 if (HAVE_truncxfsf2
&& from_mode
== XFmode
&& to_mode
== SFmode
)
728 emit_unop_insn (CODE_FOR_truncxfsf2
, to
, from
, UNKNOWN
);
732 #ifdef HAVE_trunctfsf2
733 if (HAVE_trunctfsf2
&& from_mode
== TFmode
&& to_mode
== SFmode
)
735 emit_unop_insn (CODE_FOR_trunctfsf2
, to
, from
, UNKNOWN
);
739 #ifdef HAVE_truncxfdf2
740 if (HAVE_truncxfdf2
&& from_mode
== XFmode
&& to_mode
== DFmode
)
742 emit_unop_insn (CODE_FOR_truncxfdf2
, to
, from
, UNKNOWN
);
746 #ifdef HAVE_trunctfdf2
747 if (HAVE_trunctfdf2
&& from_mode
== TFmode
&& to_mode
== DFmode
)
749 emit_unop_insn (CODE_FOR_trunctfdf2
, to
, from
, UNKNOWN
);
761 libcall
= extendsfdf2_libfunc
;
765 libcall
= extendsfxf2_libfunc
;
769 libcall
= extendsftf2_libfunc
;
778 libcall
= truncdfsf2_libfunc
;
782 libcall
= extenddfxf2_libfunc
;
786 libcall
= extenddftf2_libfunc
;
795 libcall
= truncxfsf2_libfunc
;
799 libcall
= truncxfdf2_libfunc
;
808 libcall
= trunctfsf2_libfunc
;
812 libcall
= trunctfdf2_libfunc
;
818 if (libcall
== (rtx
) 0)
819 /* This conversion is not implemented yet. */
822 value
= emit_library_call_value (libcall
, NULL_RTX
, 1, to_mode
,
824 emit_move_insn (to
, value
);
828 /* Now both modes are integers. */
830 /* Handle expanding beyond a word. */
831 if (GET_MODE_BITSIZE (from_mode
) < GET_MODE_BITSIZE (to_mode
)
832 && GET_MODE_BITSIZE (to_mode
) > BITS_PER_WORD
)
839 enum machine_mode lowpart_mode
;
840 int nwords
= CEIL (GET_MODE_SIZE (to_mode
), UNITS_PER_WORD
);
842 /* Try converting directly if the insn is supported. */
843 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
846 /* If FROM is a SUBREG, put it into a register. Do this
847 so that we always generate the same set of insns for
848 better cse'ing; if an intermediate assignment occurred,
849 we won't be doing the operation directly on the SUBREG. */
850 if (optimize
> 0 && GET_CODE (from
) == SUBREG
)
851 from
= force_reg (from_mode
, from
);
852 emit_unop_insn (code
, to
, from
, equiv_code
);
855 /* Next, try converting via full word. */
856 else if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
857 && ((code
= can_extend_p (to_mode
, word_mode
, unsignedp
))
858 != CODE_FOR_nothing
))
860 if (GET_CODE (to
) == REG
)
861 emit_insn (gen_rtx (CLOBBER
, VOIDmode
, to
));
862 convert_move (gen_lowpart (word_mode
, to
), from
, unsignedp
);
863 emit_unop_insn (code
, to
,
864 gen_lowpart (word_mode
, to
), equiv_code
);
868 /* No special multiword conversion insn; do it by hand. */
871 /* Since we will turn this into a no conflict block, we must ensure
872 that the source does not overlap the target. */
874 if (reg_overlap_mentioned_p (to
, from
))
875 from
= force_reg (from_mode
, from
);
877 /* Get a copy of FROM widened to a word, if necessary. */
878 if (GET_MODE_BITSIZE (from_mode
) < BITS_PER_WORD
)
879 lowpart_mode
= word_mode
;
881 lowpart_mode
= from_mode
;
883 lowfrom
= convert_to_mode (lowpart_mode
, from
, unsignedp
);
885 lowpart
= gen_lowpart (lowpart_mode
, to
);
886 emit_move_insn (lowpart
, lowfrom
);
888 /* Compute the value to put in each remaining word. */
890 fill_value
= const0_rtx
;
895 && insn_operand_mode
[(int) CODE_FOR_slt
][0] == word_mode
896 && STORE_FLAG_VALUE
== -1)
898 emit_cmp_insn (lowfrom
, const0_rtx
, NE
, NULL_RTX
,
900 fill_value
= gen_reg_rtx (word_mode
);
901 emit_insn (gen_slt (fill_value
));
907 = expand_shift (RSHIFT_EXPR
, lowpart_mode
, lowfrom
,
908 size_int (GET_MODE_BITSIZE (lowpart_mode
) - 1),
910 fill_value
= convert_to_mode (word_mode
, fill_value
, 1);
914 /* Fill the remaining words. */
915 for (i
= GET_MODE_SIZE (lowpart_mode
) / UNITS_PER_WORD
; i
< nwords
; i
++)
917 int index
= (WORDS_BIG_ENDIAN
? nwords
- i
- 1 : i
);
918 rtx subword
= operand_subword (to
, index
, 1, to_mode
);
923 if (fill_value
!= subword
)
924 emit_move_insn (subword
, fill_value
);
927 insns
= get_insns ();
930 emit_no_conflict_block (insns
, to
, from
, NULL_RTX
,
931 gen_rtx (equiv_code
, to_mode
, copy_rtx (from
)));
935 /* Truncating multi-word to a word or less. */
936 if (GET_MODE_BITSIZE (from_mode
) > BITS_PER_WORD
937 && GET_MODE_BITSIZE (to_mode
) <= BITS_PER_WORD
)
939 if (!((GET_CODE (from
) == MEM
940 && ! MEM_VOLATILE_P (from
)
941 && direct_load
[(int) to_mode
]
942 && ! mode_dependent_address_p (XEXP (from
, 0)))
943 || GET_CODE (from
) == REG
944 || GET_CODE (from
) == SUBREG
))
945 from
= force_reg (from_mode
, from
);
946 convert_move (to
, gen_lowpart (word_mode
, from
), 0);
950 /* Handle pointer conversion */ /* SPEE 900220 */
951 if (to_mode
== PSImode
)
953 if (from_mode
!= SImode
)
954 from
= convert_to_mode (SImode
, from
, unsignedp
);
956 #ifdef HAVE_truncsipsi2
957 if (HAVE_truncsipsi2
)
959 emit_unop_insn (CODE_FOR_truncsipsi2
, to
, from
, UNKNOWN
);
962 #endif /* HAVE_truncsipsi2 */
966 if (from_mode
== PSImode
)
968 if (to_mode
!= SImode
)
970 from
= convert_to_mode (SImode
, from
, unsignedp
);
975 #ifdef HAVE_extendpsisi2
976 if (HAVE_extendpsisi2
)
978 emit_unop_insn (CODE_FOR_extendpsisi2
, to
, from
, UNKNOWN
);
981 #endif /* HAVE_extendpsisi2 */
986 if (to_mode
== PDImode
)
988 if (from_mode
!= DImode
)
989 from
= convert_to_mode (DImode
, from
, unsignedp
);
991 #ifdef HAVE_truncdipdi2
992 if (HAVE_truncdipdi2
)
994 emit_unop_insn (CODE_FOR_truncdipdi2
, to
, from
, UNKNOWN
);
997 #endif /* HAVE_truncdipdi2 */
1001 if (from_mode
== PDImode
)
1003 if (to_mode
!= DImode
)
1005 from
= convert_to_mode (DImode
, from
, unsignedp
);
1010 #ifdef HAVE_extendpdidi2
1011 if (HAVE_extendpdidi2
)
1013 emit_unop_insn (CODE_FOR_extendpdidi2
, to
, from
, UNKNOWN
);
1016 #endif /* HAVE_extendpdidi2 */
1021 /* Now follow all the conversions between integers
1022 no more than a word long. */
1024 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1025 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
)
1026 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode
),
1027 GET_MODE_BITSIZE (from_mode
)))
1029 if (!((GET_CODE (from
) == MEM
1030 && ! MEM_VOLATILE_P (from
)
1031 && direct_load
[(int) to_mode
]
1032 && ! mode_dependent_address_p (XEXP (from
, 0)))
1033 || GET_CODE (from
) == REG
1034 || GET_CODE (from
) == SUBREG
))
1035 from
= force_reg (from_mode
, from
);
1036 if (GET_CODE (from
) == REG
&& REGNO (from
) < FIRST_PSEUDO_REGISTER
1037 && ! HARD_REGNO_MODE_OK (REGNO (from
), to_mode
))
1038 from
= copy_to_reg (from
);
1039 emit_move_insn (to
, gen_lowpart (to_mode
, from
));
1043 /* Handle extension. */
1044 if (GET_MODE_BITSIZE (to_mode
) > GET_MODE_BITSIZE (from_mode
))
1046 /* Convert directly if that works. */
1047 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
1048 != CODE_FOR_nothing
)
1050 emit_unop_insn (code
, to
, from
, equiv_code
);
1055 enum machine_mode intermediate
;
1057 /* Search for a mode to convert via. */
1058 for (intermediate
= from_mode
; intermediate
!= VOIDmode
;
1059 intermediate
= GET_MODE_WIDER_MODE (intermediate
))
1060 if (((can_extend_p (to_mode
, intermediate
, unsignedp
)
1061 != CODE_FOR_nothing
)
1062 || (GET_MODE_SIZE (to_mode
) < GET_MODE_SIZE (intermediate
)
1063 && TRULY_NOOP_TRUNCATION (to_mode
, intermediate
)))
1064 && (can_extend_p (intermediate
, from_mode
, unsignedp
)
1065 != CODE_FOR_nothing
))
1067 convert_move (to
, convert_to_mode (intermediate
, from
,
1068 unsignedp
), unsignedp
);
1072 /* No suitable intermediate mode. */
1077 /* Support special truncate insns for certain modes. */
1079 if (from_mode
== DImode
&& to_mode
== SImode
)
1081 #ifdef HAVE_truncdisi2
1082 if (HAVE_truncdisi2
)
1084 emit_unop_insn (CODE_FOR_truncdisi2
, to
, from
, UNKNOWN
);
1088 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1092 if (from_mode
== DImode
&& to_mode
== HImode
)
1094 #ifdef HAVE_truncdihi2
1095 if (HAVE_truncdihi2
)
1097 emit_unop_insn (CODE_FOR_truncdihi2
, to
, from
, UNKNOWN
);
1101 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1105 if (from_mode
== DImode
&& to_mode
== QImode
)
1107 #ifdef HAVE_truncdiqi2
1108 if (HAVE_truncdiqi2
)
1110 emit_unop_insn (CODE_FOR_truncdiqi2
, to
, from
, UNKNOWN
);
1114 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1118 if (from_mode
== SImode
&& to_mode
== HImode
)
1120 #ifdef HAVE_truncsihi2
1121 if (HAVE_truncsihi2
)
1123 emit_unop_insn (CODE_FOR_truncsihi2
, to
, from
, UNKNOWN
);
1127 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1131 if (from_mode
== SImode
&& to_mode
== QImode
)
1133 #ifdef HAVE_truncsiqi2
1134 if (HAVE_truncsiqi2
)
1136 emit_unop_insn (CODE_FOR_truncsiqi2
, to
, from
, UNKNOWN
);
1140 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1144 if (from_mode
== HImode
&& to_mode
== QImode
)
1146 #ifdef HAVE_trunchiqi2
1147 if (HAVE_trunchiqi2
)
1149 emit_unop_insn (CODE_FOR_trunchiqi2
, to
, from
, UNKNOWN
);
1153 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1157 if (from_mode
== TImode
&& to_mode
== DImode
)
1159 #ifdef HAVE_trunctidi2
1160 if (HAVE_trunctidi2
)
1162 emit_unop_insn (CODE_FOR_trunctidi2
, to
, from
, UNKNOWN
);
1166 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1170 if (from_mode
== TImode
&& to_mode
== SImode
)
1172 #ifdef HAVE_trunctisi2
1173 if (HAVE_trunctisi2
)
1175 emit_unop_insn (CODE_FOR_trunctisi2
, to
, from
, UNKNOWN
);
1179 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1183 if (from_mode
== TImode
&& to_mode
== HImode
)
1185 #ifdef HAVE_trunctihi2
1186 if (HAVE_trunctihi2
)
1188 emit_unop_insn (CODE_FOR_trunctihi2
, to
, from
, UNKNOWN
);
1192 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1196 if (from_mode
== TImode
&& to_mode
== QImode
)
1198 #ifdef HAVE_trunctiqi2
1199 if (HAVE_trunctiqi2
)
1201 emit_unop_insn (CODE_FOR_trunctiqi2
, to
, from
, UNKNOWN
);
1205 convert_move (to
, force_reg (from_mode
, from
), unsignedp
);
1209 /* Handle truncation of volatile memrefs, and so on;
1210 the things that couldn't be truncated directly,
1211 and for which there was no special instruction. */
1212 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
))
1214 rtx temp
= force_reg (to_mode
, gen_lowpart (to_mode
, from
));
1215 emit_move_insn (to
, temp
);
1219 /* Mode combination is not recognized. */
1223 /* Return an rtx for a value that would result
1224 from converting X to mode MODE.
1225 Both X and MODE may be floating, or both integer.
1226 UNSIGNEDP is nonzero if X is an unsigned value.
1227 This can be done by referring to a part of X in place
1228 or by copying to a new temporary with conversion.
1230 This function *must not* call protect_from_queue
1231 except when putting X into an insn (in which case convert_move does it). */
1234 convert_to_mode (mode
, x
, unsignedp
)
1235 enum machine_mode mode
;
1239 return convert_modes (mode
, VOIDmode
, x
, unsignedp
);
1242 /* Return an rtx for a value that would result
1243 from converting X from mode OLDMODE to mode MODE.
1244 Both modes may be floating, or both integer.
1245 UNSIGNEDP is nonzero if X is an unsigned value.
1247 This can be done by referring to a part of X in place
1248 or by copying to a new temporary with conversion.
1250 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1252 This function *must not* call protect_from_queue
1253 except when putting X into an insn (in which case convert_move does it). */
1256 convert_modes (mode
, oldmode
, x
, unsignedp
)
1257 enum machine_mode mode
, oldmode
;
1263 /* If FROM is a SUBREG that indicates that we have already done at least
1264 the required extension, strip it. */
1266 if (GET_CODE (x
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (x
)
1267 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))) >= GET_MODE_SIZE (mode
)
1268 && SUBREG_PROMOTED_UNSIGNED_P (x
) == unsignedp
)
1269 x
= gen_lowpart (mode
, x
);
1271 if (GET_MODE (x
) != VOIDmode
)
1272 oldmode
= GET_MODE (x
);
1274 if (mode
== oldmode
)
1277 /* There is one case that we must handle specially: If we are converting
1278 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1279 we are to interpret the constant as unsigned, gen_lowpart will do
1280 the wrong if the constant appears negative. What we want to do is
1281 make the high-order word of the constant zero, not all ones. */
1283 if (unsignedp
&& GET_MODE_CLASS (mode
) == MODE_INT
1284 && GET_MODE_BITSIZE (mode
) == 2 * HOST_BITS_PER_WIDE_INT
1285 && GET_CODE (x
) == CONST_INT
&& INTVAL (x
) < 0)
1287 HOST_WIDE_INT val
= INTVAL (x
);
1289 if (oldmode
!= VOIDmode
1290 && HOST_BITS_PER_WIDE_INT
> GET_MODE_BITSIZE (oldmode
))
1292 int width
= GET_MODE_BITSIZE (oldmode
);
1294 /* We need to zero extend VAL. */
1295 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
1298 return immed_double_const (val
, (HOST_WIDE_INT
) 0, mode
);
1301 /* We can do this with a gen_lowpart if both desired and current modes
1302 are integer, and this is either a constant integer, a register, or a
1303 non-volatile MEM. Except for the constant case where MODE is no
1304 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1306 if ((GET_CODE (x
) == CONST_INT
1307 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
1308 || (GET_MODE_CLASS (mode
) == MODE_INT
1309 && GET_MODE_CLASS (oldmode
) == MODE_INT
1310 && (GET_CODE (x
) == CONST_DOUBLE
1311 || (GET_MODE_SIZE (mode
) <= GET_MODE_SIZE (oldmode
)
1312 && ((GET_CODE (x
) == MEM
&& ! MEM_VOLATILE_P (x
)
1313 && direct_load
[(int) mode
])
1314 || (GET_CODE (x
) == REG
1315 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode
),
1316 GET_MODE_BITSIZE (GET_MODE (x
)))))))))
1318 /* ?? If we don't know OLDMODE, we have to assume here that
1319 X does not need sign- or zero-extension. This may not be
1320 the case, but it's the best we can do. */
1321 if (GET_CODE (x
) == CONST_INT
&& oldmode
!= VOIDmode
1322 && GET_MODE_SIZE (mode
) > GET_MODE_SIZE (oldmode
))
1324 HOST_WIDE_INT val
= INTVAL (x
);
1325 int width
= GET_MODE_BITSIZE (oldmode
);
1327 /* We must sign or zero-extend in this case. Start by
1328 zero-extending, then sign extend if we need to. */
1329 val
&= ((HOST_WIDE_INT
) 1 << width
) - 1;
1331 && (val
& ((HOST_WIDE_INT
) 1 << (width
- 1))))
1332 val
|= (HOST_WIDE_INT
) (-1) << width
;
1334 return GEN_INT (val
);
1337 return gen_lowpart (mode
, x
);
1340 temp
= gen_reg_rtx (mode
);
1341 convert_move (temp
, x
, unsignedp
);
1345 /* Generate several move instructions to copy LEN bytes
1346 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1347 The caller must pass FROM and TO
1348 through protect_from_queue before calling.
1349 ALIGN (in bytes) is maximum alignment we can assume. */
1352 move_by_pieces (to
, from
, len
, align
)
1356 struct move_by_pieces data
;
1357 rtx to_addr
= XEXP (to
, 0), from_addr
= XEXP (from
, 0);
1358 int max_size
= MOVE_MAX
+ 1;
1361 data
.to_addr
= to_addr
;
1362 data
.from_addr
= from_addr
;
1366 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
1367 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
1369 = (GET_CODE (from_addr
) == PRE_INC
|| GET_CODE (from_addr
) == PRE_DEC
1370 || GET_CODE (from_addr
) == POST_INC
1371 || GET_CODE (from_addr
) == POST_DEC
);
1373 data
.explicit_inc_from
= 0;
1374 data
.explicit_inc_to
= 0;
1376 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
1377 if (data
.reverse
) data
.offset
= len
;
1380 data
.to_struct
= MEM_IN_STRUCT_P (to
);
1381 data
.from_struct
= MEM_IN_STRUCT_P (from
);
1383 /* If copying requires more than two move insns,
1384 copy addresses to registers (to make displacements shorter)
1385 and use post-increment if available. */
1386 if (!(data
.autinc_from
&& data
.autinc_to
)
1387 && move_by_pieces_ninsns (len
, align
) > 2)
1389 #ifdef HAVE_PRE_DECREMENT
1390 if (data
.reverse
&& ! data
.autinc_from
)
1392 data
.from_addr
= copy_addr_to_reg (plus_constant (from_addr
, len
));
1393 data
.autinc_from
= 1;
1394 data
.explicit_inc_from
= -1;
1397 #ifdef HAVE_POST_INCREMENT
1398 if (! data
.autinc_from
)
1400 data
.from_addr
= copy_addr_to_reg (from_addr
);
1401 data
.autinc_from
= 1;
1402 data
.explicit_inc_from
= 1;
1405 if (!data
.autinc_from
&& CONSTANT_P (from_addr
))
1406 data
.from_addr
= copy_addr_to_reg (from_addr
);
1407 #ifdef HAVE_PRE_DECREMENT
1408 if (data
.reverse
&& ! data
.autinc_to
)
1410 data
.to_addr
= copy_addr_to_reg (plus_constant (to_addr
, len
));
1412 data
.explicit_inc_to
= -1;
1415 #ifdef HAVE_POST_INCREMENT
1416 if (! data
.reverse
&& ! data
.autinc_to
)
1418 data
.to_addr
= copy_addr_to_reg (to_addr
);
1420 data
.explicit_inc_to
= 1;
1423 if (!data
.autinc_to
&& CONSTANT_P (to_addr
))
1424 data
.to_addr
= copy_addr_to_reg (to_addr
);
1427 if (! SLOW_UNALIGNED_ACCESS
1428 || align
> MOVE_MAX
|| align
>= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
)
1431 /* First move what we can in the largest integer mode, then go to
1432 successively smaller modes. */
1434 while (max_size
> 1)
1436 enum machine_mode mode
= VOIDmode
, tmode
;
1437 enum insn_code icode
;
1439 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1440 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1441 if (GET_MODE_SIZE (tmode
) < max_size
)
1444 if (mode
== VOIDmode
)
1447 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1448 if (icode
!= CODE_FOR_nothing
1449 && align
>= MIN (BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
,
1450 GET_MODE_SIZE (mode
)))
1451 move_by_pieces_1 (GEN_FCN (icode
), mode
, &data
);
1453 max_size
= GET_MODE_SIZE (mode
);
1456 /* The code above should have handled everything. */
1461 /* Return number of insns required to move L bytes by pieces.
1462 ALIGN (in bytes) is maximum alignment we can assume. */
1465 move_by_pieces_ninsns (l
, align
)
1469 register int n_insns
= 0;
1470 int max_size
= MOVE_MAX
+ 1;
1472 if (! SLOW_UNALIGNED_ACCESS
1473 || align
> MOVE_MAX
|| align
>= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
)
1476 while (max_size
> 1)
1478 enum machine_mode mode
= VOIDmode
, tmode
;
1479 enum insn_code icode
;
1481 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1482 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1483 if (GET_MODE_SIZE (tmode
) < max_size
)
1486 if (mode
== VOIDmode
)
1489 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
1490 if (icode
!= CODE_FOR_nothing
1491 && align
>= MIN (BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
,
1492 GET_MODE_SIZE (mode
)))
1493 n_insns
+= l
/ GET_MODE_SIZE (mode
), l
%= GET_MODE_SIZE (mode
);
1495 max_size
= GET_MODE_SIZE (mode
);
1501 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1502 with move instructions for mode MODE. GENFUN is the gen_... function
1503 to make a move insn for that mode. DATA has all the other info. */
1506 move_by_pieces_1 (genfun
, mode
, data
)
1508 enum machine_mode mode
;
1509 struct move_by_pieces
*data
;
1511 register int size
= GET_MODE_SIZE (mode
);
1512 register rtx to1
, from1
;
1514 while (data
->len
>= size
)
1516 if (data
->reverse
) data
->offset
-= size
;
1518 to1
= (data
->autinc_to
1519 ? gen_rtx (MEM
, mode
, data
->to_addr
)
1520 : copy_rtx (change_address (data
->to
, mode
,
1521 plus_constant (data
->to_addr
,
1523 MEM_IN_STRUCT_P (to1
) = data
->to_struct
;
1526 = (data
->autinc_from
1527 ? gen_rtx (MEM
, mode
, data
->from_addr
)
1528 : copy_rtx (change_address (data
->from
, mode
,
1529 plus_constant (data
->from_addr
,
1531 MEM_IN_STRUCT_P (from1
) = data
->from_struct
;
1533 #ifdef HAVE_PRE_DECREMENT
1534 if (data
->explicit_inc_to
< 0)
1535 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (-size
)));
1536 if (data
->explicit_inc_from
< 0)
1537 emit_insn (gen_add2_insn (data
->from_addr
, GEN_INT (-size
)));
1540 emit_insn ((*genfun
) (to1
, from1
));
1541 #ifdef HAVE_POST_INCREMENT
1542 if (data
->explicit_inc_to
> 0)
1543 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
1544 if (data
->explicit_inc_from
> 0)
1545 emit_insn (gen_add2_insn (data
->from_addr
, GEN_INT (size
)));
1548 if (! data
->reverse
) data
->offset
+= size
;
1554 /* Emit code to move a block Y to a block X.
1555 This may be done with string-move instructions,
1556 with multiple scalar move instructions, or with a library call.
1558 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1560 SIZE is an rtx that says how long they are.
1561 ALIGN is the maximum alignment we can assume they have,
1562 measured in bytes. */
1565 emit_block_move (x
, y
, size
, align
)
1570 if (GET_MODE (x
) != BLKmode
)
1573 if (GET_MODE (y
) != BLKmode
)
1576 x
= protect_from_queue (x
, 1);
1577 y
= protect_from_queue (y
, 0);
1578 size
= protect_from_queue (size
, 0);
1580 if (GET_CODE (x
) != MEM
)
1582 if (GET_CODE (y
) != MEM
)
1587 if (GET_CODE (size
) == CONST_INT
1588 && (move_by_pieces_ninsns (INTVAL (size
), align
) < MOVE_RATIO
))
1589 move_by_pieces (x
, y
, INTVAL (size
), align
);
1592 /* Try the most limited insn first, because there's no point
1593 including more than one in the machine description unless
1594 the more limited one has some advantage. */
1596 rtx opalign
= GEN_INT (align
);
1597 enum machine_mode mode
;
1599 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
1600 mode
= GET_MODE_WIDER_MODE (mode
))
1602 enum insn_code code
= movstr_optab
[(int) mode
];
1604 if (code
!= CODE_FOR_nothing
1605 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1606 here because if SIZE is less than the mode mask, as it is
1607 returned by the macro, it will definitely be less than the
1608 actual mode mask. */
1609 && ((GET_CODE (size
) == CONST_INT
1610 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
1611 <= GET_MODE_MASK (mode
)))
1612 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
1613 && (insn_operand_predicate
[(int) code
][0] == 0
1614 || (*insn_operand_predicate
[(int) code
][0]) (x
, BLKmode
))
1615 && (insn_operand_predicate
[(int) code
][1] == 0
1616 || (*insn_operand_predicate
[(int) code
][1]) (y
, BLKmode
))
1617 && (insn_operand_predicate
[(int) code
][3] == 0
1618 || (*insn_operand_predicate
[(int) code
][3]) (opalign
,
1622 rtx last
= get_last_insn ();
1625 op2
= convert_to_mode (mode
, size
, 1);
1626 if (insn_operand_predicate
[(int) code
][2] != 0
1627 && ! (*insn_operand_predicate
[(int) code
][2]) (op2
, mode
))
1628 op2
= copy_to_mode_reg (mode
, op2
);
1630 pat
= GEN_FCN ((int) code
) (x
, y
, op2
, opalign
);
1637 delete_insns_since (last
);
1641 #ifdef TARGET_MEM_FUNCTIONS
1642 emit_library_call (memcpy_libfunc
, 0,
1643 VOIDmode
, 3, XEXP (x
, 0), Pmode
,
1645 convert_to_mode (TYPE_MODE (sizetype
), size
,
1646 TREE_UNSIGNED (sizetype
)),
1647 TYPE_MODE (sizetype
));
1649 emit_library_call (bcopy_libfunc
, 0,
1650 VOIDmode
, 3, XEXP (y
, 0), Pmode
,
1652 convert_to_mode (TYPE_MODE (integer_type_node
), size
,
1653 TREE_UNSIGNED (integer_type_node
)),
1654 TYPE_MODE (integer_type_node
));
1659 /* Copy all or part of a value X into registers starting at REGNO.
1660 The number of registers to be filled is NREGS. */
1663 move_block_to_reg (regno
, x
, nregs
, mode
)
1667 enum machine_mode mode
;
1675 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
1676 x
= validize_mem (force_const_mem (mode
, x
));
1678 /* See if the machine can do this with a load multiple insn. */
1679 #ifdef HAVE_load_multiple
1680 if (HAVE_load_multiple
)
1682 last
= get_last_insn ();
1683 pat
= gen_load_multiple (gen_rtx (REG
, word_mode
, regno
), x
,
1691 delete_insns_since (last
);
1695 for (i
= 0; i
< nregs
; i
++)
1696 emit_move_insn (gen_rtx (REG
, word_mode
, regno
+ i
),
1697 operand_subword_force (x
, i
, mode
));
1700 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1701 The number of registers to be filled is NREGS. SIZE indicates the number
1702 of bytes in the object X. */
1706 move_block_from_reg (regno
, x
, nregs
, size
)
1714 enum machine_mode mode
;
1716 /* If SIZE is that of a mode no bigger than a word, just use that
1717 mode's store operation. */
1718 if (size
<= UNITS_PER_WORD
1719 && (mode
= mode_for_size (size
* BITS_PER_UNIT
, MODE_INT
, 0)) != BLKmode
)
1721 emit_move_insn (change_address (x
, mode
, NULL
),
1722 gen_rtx (REG
, mode
, regno
));
1726 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1727 to the left before storing to memory. Note that the previous test
1728 doesn't handle all cases (e.g. SIZE == 3). */
1729 if (size
< UNITS_PER_WORD
&& BYTES_BIG_ENDIAN
)
1731 rtx tem
= operand_subword (x
, 0, 1, BLKmode
);
1737 shift
= expand_shift (LSHIFT_EXPR
, word_mode
,
1738 gen_rtx (REG
, word_mode
, regno
),
1739 build_int_2 ((UNITS_PER_WORD
- size
)
1740 * BITS_PER_UNIT
, 0), NULL_RTX
, 0);
1741 emit_move_insn (tem
, shift
);
1745 /* See if the machine can do this with a store multiple insn. */
1746 #ifdef HAVE_store_multiple
1747 if (HAVE_store_multiple
)
1749 last
= get_last_insn ();
1750 pat
= gen_store_multiple (x
, gen_rtx (REG
, word_mode
, regno
),
1758 delete_insns_since (last
);
1762 for (i
= 0; i
< nregs
; i
++)
1764 rtx tem
= operand_subword (x
, i
, 1, BLKmode
);
1769 emit_move_insn (tem
, gen_rtx (REG
, word_mode
, regno
+ i
));
1773 /* Emit code to move a block Y to a block X, where X is non-consecutive
1774 registers represented by a PARALLEL. */
1777 emit_group_load (x
, y
)
1780 rtx target_reg
, source
;
1783 if (GET_CODE (x
) != PARALLEL
)
1786 /* Check for a NULL entry, used to indicate that the parameter goes
1787 both on the stack and in registers. */
1788 if (XEXP (XVECEXP (x
, 0, 0), 0))
1793 for (; i
< XVECLEN (x
, 0); i
++)
1795 rtx element
= XVECEXP (x
, 0, i
);
1797 target_reg
= XEXP (element
, 0);
1799 if (GET_CODE (y
) == MEM
)
1800 source
= change_address (y
, GET_MODE (target_reg
),
1801 plus_constant (XEXP (y
, 0),
1802 INTVAL (XEXP (element
, 1))));
1803 else if (XEXP (element
, 1) == const0_rtx
)
1805 if (GET_MODE (target_reg
) == GET_MODE (y
))
1807 /* Allow for the target_reg to be smaller than the input register
1808 to allow for AIX with 4 DF arguments after a single SI arg. The
1809 last DF argument will only load 1 word into the integer registers,
1810 but load a DF value into the float registers. */
1811 else if ((GET_MODE_SIZE (GET_MODE (target_reg
))
1812 <= GET_MODE_SIZE (GET_MODE (y
)))
1813 && GET_MODE (target_reg
) == word_mode
)
1814 /* This might be a const_double, so we can't just use SUBREG. */
1815 source
= operand_subword (y
, 0, 0, VOIDmode
);
1822 emit_move_insn (target_reg
, source
);
1826 /* Emit code to move a block Y to a block X, where Y is non-consecutive
1827 registers represented by a PARALLEL. */
1830 emit_group_store (x
, y
)
1833 rtx source_reg
, target
;
1836 if (GET_CODE (y
) != PARALLEL
)
1839 /* Check for a NULL entry, used to indicate that the parameter goes
1840 both on the stack and in registers. */
1841 if (XEXP (XVECEXP (y
, 0, 0), 0))
1846 for (; i
< XVECLEN (y
, 0); i
++)
1848 rtx element
= XVECEXP (y
, 0, i
);
1850 source_reg
= XEXP (element
, 0);
1852 if (GET_CODE (x
) == MEM
)
1853 target
= change_address (x
, GET_MODE (source_reg
),
1854 plus_constant (XEXP (x
, 0),
1855 INTVAL (XEXP (element
, 1))));
1856 else if (XEXP (element
, 1) == const0_rtx
)
1859 if (GET_MODE (target
) != GET_MODE (source_reg
))
1860 target
= gen_lowpart (GET_MODE (source_reg
), target
);
1865 emit_move_insn (target
, source_reg
);
1869 /* Add a USE expression for REG to the (possibly empty) list pointed
1870 to by CALL_FUSAGE. REG must denote a hard register. */
1873 use_reg (call_fusage
, reg
)
1874 rtx
*call_fusage
, reg
;
1876 if (GET_CODE (reg
) != REG
1877 || REGNO (reg
) >= FIRST_PSEUDO_REGISTER
)
1881 = gen_rtx (EXPR_LIST
, VOIDmode
,
1882 gen_rtx (USE
, VOIDmode
, reg
), *call_fusage
);
1885 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
1886 starting at REGNO. All of these registers must be hard registers. */
1889 use_regs (call_fusage
, regno
, nregs
)
1896 if (regno
+ nregs
> FIRST_PSEUDO_REGISTER
)
1899 for (i
= 0; i
< nregs
; i
++)
1900 use_reg (call_fusage
, gen_rtx (REG
, reg_raw_mode
[regno
+ i
], regno
+ i
));
1903 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
1904 PARALLEL REGS. This is for calls that pass values in multiple
1905 non-contiguous locations. The Irix 6 ABI has examples of this. */
1908 use_group_regs (call_fusage
, regs
)
1914 /* Check for a NULL entry, used to indicate that the parameter goes
1915 both on the stack and in registers. */
1916 if (XEXP (XVECEXP (regs
, 0, 0), 0))
1921 for (; i
< XVECLEN (regs
, 0); i
++)
1922 use_reg (call_fusage
, XEXP (XVECEXP (regs
, 0, i
), 0));
1925 /* Generate several move instructions to clear LEN bytes of block TO.
1926 (A MEM rtx with BLKmode). The caller must pass TO through
1927 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
1931 clear_by_pieces (to
, len
, align
)
1935 struct clear_by_pieces data
;
1936 rtx to_addr
= XEXP (to
, 0);
1937 int max_size
= MOVE_MAX
+ 1;
1940 data
.to_addr
= to_addr
;
1943 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
1944 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
1946 data
.explicit_inc_to
= 0;
1948 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
1949 if (data
.reverse
) data
.offset
= len
;
1952 data
.to_struct
= MEM_IN_STRUCT_P (to
);
1954 /* If copying requires more than two move insns,
1955 copy addresses to registers (to make displacements shorter)
1956 and use post-increment if available. */
1958 && move_by_pieces_ninsns (len
, align
) > 2)
1960 #ifdef HAVE_PRE_DECREMENT
1961 if (data
.reverse
&& ! data
.autinc_to
)
1963 data
.to_addr
= copy_addr_to_reg (plus_constant (to_addr
, len
));
1965 data
.explicit_inc_to
= -1;
1968 #ifdef HAVE_POST_INCREMENT
1969 if (! data
.reverse
&& ! data
.autinc_to
)
1971 data
.to_addr
= copy_addr_to_reg (to_addr
);
1973 data
.explicit_inc_to
= 1;
1976 if (!data
.autinc_to
&& CONSTANT_P (to_addr
))
1977 data
.to_addr
= copy_addr_to_reg (to_addr
);
1980 if (! SLOW_UNALIGNED_ACCESS
1981 || align
> MOVE_MAX
|| align
>= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
)
1984 /* First move what we can in the largest integer mode, then go to
1985 successively smaller modes. */
1987 while (max_size
> 1)
1989 enum machine_mode mode
= VOIDmode
, tmode
;
1990 enum insn_code icode
;
1992 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1993 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1994 if (GET_MODE_SIZE (tmode
) < max_size
)
1997 if (mode
== VOIDmode
)
2000 icode
= mov_optab
->handlers
[(int) mode
].insn_code
;
2001 if (icode
!= CODE_FOR_nothing
2002 && align
>= MIN (BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
,
2003 GET_MODE_SIZE (mode
)))
2004 clear_by_pieces_1 (GEN_FCN (icode
), mode
, &data
);
2006 max_size
= GET_MODE_SIZE (mode
);
2009 /* The code above should have handled everything. */
2014 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2015 with move instructions for mode MODE. GENFUN is the gen_... function
2016 to make a move insn for that mode. DATA has all the other info. */
2019 clear_by_pieces_1 (genfun
, mode
, data
)
2021 enum machine_mode mode
;
2022 struct clear_by_pieces
*data
;
2024 register int size
= GET_MODE_SIZE (mode
);
2027 while (data
->len
>= size
)
2029 if (data
->reverse
) data
->offset
-= size
;
2031 to1
= (data
->autinc_to
2032 ? gen_rtx (MEM
, mode
, data
->to_addr
)
2033 : copy_rtx (change_address (data
->to
, mode
,
2034 plus_constant (data
->to_addr
,
2036 MEM_IN_STRUCT_P (to1
) = data
->to_struct
;
2038 #ifdef HAVE_PRE_DECREMENT
2039 if (data
->explicit_inc_to
< 0)
2040 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (-size
)));
2043 emit_insn ((*genfun
) (to1
, const0_rtx
));
2044 #ifdef HAVE_POST_INCREMENT
2045 if (data
->explicit_inc_to
> 0)
2046 emit_insn (gen_add2_insn (data
->to_addr
, GEN_INT (size
)));
2049 if (! data
->reverse
) data
->offset
+= size
;
2055 /* Write zeros through the storage of OBJECT.
2056 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
2057 the maximum alignment we can is has, measured in bytes. */
2060 clear_storage (object
, size
, align
)
2065 if (GET_MODE (object
) == BLKmode
)
2067 object
= protect_from_queue (object
, 1);
2068 size
= protect_from_queue (size
, 0);
2070 if (GET_CODE (size
) == CONST_INT
2071 && (move_by_pieces_ninsns (INTVAL (size
), align
) < MOVE_RATIO
))
2072 clear_by_pieces (object
, INTVAL (size
), align
);
2076 /* Try the most limited insn first, because there's no point
2077 including more than one in the machine description unless
2078 the more limited one has some advantage. */
2080 rtx opalign
= GEN_INT (align
);
2081 enum machine_mode mode
;
2083 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
2084 mode
= GET_MODE_WIDER_MODE (mode
))
2086 enum insn_code code
= clrstr_optab
[(int) mode
];
2088 if (code
!= CODE_FOR_nothing
2089 /* We don't need MODE to be narrower than
2090 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2091 the mode mask, as it is returned by the macro, it will
2092 definitely be less than the actual mode mask. */
2093 && ((GET_CODE (size
) == CONST_INT
2094 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
2095 <= GET_MODE_MASK (mode
)))
2096 || GET_MODE_BITSIZE (mode
) >= BITS_PER_WORD
)
2097 && (insn_operand_predicate
[(int) code
][0] == 0
2098 || (*insn_operand_predicate
[(int) code
][0]) (object
,
2100 && (insn_operand_predicate
[(int) code
][2] == 0
2101 || (*insn_operand_predicate
[(int) code
][2]) (opalign
,
2105 rtx last
= get_last_insn ();
2108 op1
= convert_to_mode (mode
, size
, 1);
2109 if (insn_operand_predicate
[(int) code
][1] != 0
2110 && ! (*insn_operand_predicate
[(int) code
][1]) (op1
,
2112 op1
= copy_to_mode_reg (mode
, op1
);
2114 pat
= GEN_FCN ((int) code
) (object
, op1
, opalign
);
2121 delete_insns_since (last
);
2126 #ifdef TARGET_MEM_FUNCTIONS
2127 emit_library_call (memset_libfunc
, 0,
2129 XEXP (object
, 0), Pmode
,
2130 const0_rtx
, TYPE_MODE (integer_type_node
),
2131 convert_to_mode (TYPE_MODE (sizetype
),
2132 size
, TREE_UNSIGNED (sizetype
)),
2133 TYPE_MODE (sizetype
));
2135 emit_library_call (bzero_libfunc
, 0,
2137 XEXP (object
, 0), Pmode
,
2138 convert_to_mode (TYPE_MODE (integer_type_node
),
2140 TREE_UNSIGNED (integer_type_node
)),
2141 TYPE_MODE (integer_type_node
));
2146 emit_move_insn (object
, const0_rtx
);
2149 /* Generate code to copy Y into X.
2150 Both Y and X must have the same mode, except that
2151 Y can be a constant with VOIDmode.
2152 This mode cannot be BLKmode; use emit_block_move for that.
2154 Return the last instruction emitted. */
2157 emit_move_insn (x
, y
)
2160 enum machine_mode mode
= GET_MODE (x
);
2162 x
= protect_from_queue (x
, 1);
2163 y
= protect_from_queue (y
, 0);
2165 if (mode
== BLKmode
|| (GET_MODE (y
) != mode
&& GET_MODE (y
) != VOIDmode
))
2168 if (CONSTANT_P (y
) && ! LEGITIMATE_CONSTANT_P (y
))
2169 y
= force_const_mem (mode
, y
);
2171 /* If X or Y are memory references, verify that their addresses are valid
2173 if (GET_CODE (x
) == MEM
2174 && ((! memory_address_p (GET_MODE (x
), XEXP (x
, 0))
2175 && ! push_operand (x
, GET_MODE (x
)))
2177 && CONSTANT_ADDRESS_P (XEXP (x
, 0)))))
2178 x
= change_address (x
, VOIDmode
, XEXP (x
, 0));
2180 if (GET_CODE (y
) == MEM
2181 && (! memory_address_p (GET_MODE (y
), XEXP (y
, 0))
2183 && CONSTANT_ADDRESS_P (XEXP (y
, 0)))))
2184 y
= change_address (y
, VOIDmode
, XEXP (y
, 0));
2186 if (mode
== BLKmode
)
2189 return emit_move_insn_1 (x
, y
);
2192 /* Low level part of emit_move_insn.
2193 Called just like emit_move_insn, but assumes X and Y
2194 are basically valid. */
2197 emit_move_insn_1 (x
, y
)
2200 enum machine_mode mode
= GET_MODE (x
);
2201 enum machine_mode submode
;
2202 enum mode_class
class = GET_MODE_CLASS (mode
);
2205 if (mov_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
2207 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) mode
].insn_code
) (x
, y
));
2209 /* Expand complex moves by moving real part and imag part, if possible. */
2210 else if ((class == MODE_COMPLEX_FLOAT
|| class == MODE_COMPLEX_INT
)
2211 && BLKmode
!= (submode
= mode_for_size ((GET_MODE_UNIT_SIZE (mode
)
2213 (class == MODE_COMPLEX_INT
2214 ? MODE_INT
: MODE_FLOAT
),
2216 && (mov_optab
->handlers
[(int) submode
].insn_code
2217 != CODE_FOR_nothing
))
2219 /* Don't split destination if it is a stack push. */
2220 int stack
= push_operand (x
, GET_MODE (x
));
2223 /* If this is a stack, push the highpart first, so it
2224 will be in the argument order.
2226 In that case, change_address is used only to convert
2227 the mode, not to change the address. */
2230 /* Note that the real part always precedes the imag part in memory
2231 regardless of machine's endianness. */
2232 #ifdef STACK_GROWS_DOWNWARD
2233 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2234 (gen_rtx (MEM
, submode
, (XEXP (x
, 0))),
2235 gen_imagpart (submode
, y
)));
2236 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2237 (gen_rtx (MEM
, submode
, (XEXP (x
, 0))),
2238 gen_realpart (submode
, y
)));
2240 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2241 (gen_rtx (MEM
, submode
, (XEXP (x
, 0))),
2242 gen_realpart (submode
, y
)));
2243 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2244 (gen_rtx (MEM
, submode
, (XEXP (x
, 0))),
2245 gen_imagpart (submode
, y
)));
2250 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2251 (gen_realpart (submode
, x
), gen_realpart (submode
, y
)));
2252 emit_insn (GEN_FCN (mov_optab
->handlers
[(int) submode
].insn_code
)
2253 (gen_imagpart (submode
, x
), gen_imagpart (submode
, y
)));
2256 return get_last_insn ();
2259 /* This will handle any multi-word mode that lacks a move_insn pattern.
2260 However, you will get better code if you define such patterns,
2261 even if they must turn into multiple assembler instructions. */
2262 else if (GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
2267 #ifdef PUSH_ROUNDING
2269 /* If X is a push on the stack, do the push now and replace
2270 X with a reference to the stack pointer. */
2271 if (push_operand (x
, GET_MODE (x
)))
2273 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x
))));
2274 x
= change_address (x
, VOIDmode
, stack_pointer_rtx
);
2278 /* Show the output dies here. */
2280 emit_insn (gen_rtx (CLOBBER
, VOIDmode
, x
));
2283 i
< (GET_MODE_SIZE (mode
) + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
;
2286 rtx xpart
= operand_subword (x
, i
, 1, mode
);
2287 rtx ypart
= operand_subword (y
, i
, 1, mode
);
2289 /* If we can't get a part of Y, put Y into memory if it is a
2290 constant. Otherwise, force it into a register. If we still
2291 can't get a part of Y, abort. */
2292 if (ypart
== 0 && CONSTANT_P (y
))
2294 y
= force_const_mem (mode
, y
);
2295 ypart
= operand_subword (y
, i
, 1, mode
);
2297 else if (ypart
== 0)
2298 ypart
= operand_subword_force (y
, i
, mode
);
2300 if (xpart
== 0 || ypart
== 0)
2303 last_insn
= emit_move_insn (xpart
, ypart
);
2312 /* Pushing data onto the stack. */
2314 /* Push a block of length SIZE (perhaps variable)
2315 and return an rtx to address the beginning of the block.
2316 Note that it is not possible for the value returned to be a QUEUED.
2317 The value may be virtual_outgoing_args_rtx.
2319 EXTRA is the number of bytes of padding to push in addition to SIZE.
2320 BELOW nonzero means this padding comes at low addresses;
2321 otherwise, the padding comes at high addresses. */
2324 push_block (size
, extra
, below
)
2330 size
= convert_modes (Pmode
, ptr_mode
, size
, 1);
2331 if (CONSTANT_P (size
))
2332 anti_adjust_stack (plus_constant (size
, extra
));
2333 else if (GET_CODE (size
) == REG
&& extra
== 0)
2334 anti_adjust_stack (size
);
2337 rtx temp
= copy_to_mode_reg (Pmode
, size
);
2339 temp
= expand_binop (Pmode
, add_optab
, temp
, GEN_INT (extra
),
2340 temp
, 0, OPTAB_LIB_WIDEN
);
2341 anti_adjust_stack (temp
);
2344 #ifdef STACK_GROWS_DOWNWARD
2345 temp
= virtual_outgoing_args_rtx
;
2346 if (extra
!= 0 && below
)
2347 temp
= plus_constant (temp
, extra
);
2349 if (GET_CODE (size
) == CONST_INT
)
2350 temp
= plus_constant (virtual_outgoing_args_rtx
,
2351 - INTVAL (size
) - (below
? 0 : extra
));
2352 else if (extra
!= 0 && !below
)
2353 temp
= gen_rtx (PLUS
, Pmode
, virtual_outgoing_args_rtx
,
2354 negate_rtx (Pmode
, plus_constant (size
, extra
)));
2356 temp
= gen_rtx (PLUS
, Pmode
, virtual_outgoing_args_rtx
,
2357 negate_rtx (Pmode
, size
));
2360 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT
), temp
);
2366 return gen_rtx (STACK_PUSH_CODE
, Pmode
, stack_pointer_rtx
);
2369 /* Generate code to push X onto the stack, assuming it has mode MODE and
2371 MODE is redundant except when X is a CONST_INT (since they don't
2373 SIZE is an rtx for the size of data to be copied (in bytes),
2374 needed only if X is BLKmode.
2376 ALIGN (in bytes) is maximum alignment we can assume.
2378 If PARTIAL and REG are both nonzero, then copy that many of the first
2379 words of X into registers starting with REG, and push the rest of X.
2380 The amount of space pushed is decreased by PARTIAL words,
2381 rounded *down* to a multiple of PARM_BOUNDARY.
2382 REG must be a hard register in this case.
2383 If REG is zero but PARTIAL is not, take any all others actions for an
2384 argument partially in registers, but do not actually load any
2387 EXTRA is the amount in bytes of extra space to leave next to this arg.
2388 This is ignored if an argument block has already been allocated.
2390 On a machine that lacks real push insns, ARGS_ADDR is the address of
2391 the bottom of the argument block for this call. We use indexing off there
2392 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2393 argument block has not been preallocated.
2395 ARGS_SO_FAR is the size of args previously pushed for this call. */
2398 emit_push_insn (x
, mode
, type
, size
, align
, partial
, reg
, extra
,
2399 args_addr
, args_so_far
)
2401 enum machine_mode mode
;
2412 enum direction stack_direction
2413 #ifdef STACK_GROWS_DOWNWARD
2419 /* Decide where to pad the argument: `downward' for below,
2420 `upward' for above, or `none' for don't pad it.
2421 Default is below for small data on big-endian machines; else above. */
2422 enum direction where_pad
= FUNCTION_ARG_PADDING (mode
, type
);
2424 /* Invert direction if stack is post-update. */
2425 if (STACK_PUSH_CODE
== POST_INC
|| STACK_PUSH_CODE
== POST_DEC
)
2426 if (where_pad
!= none
)
2427 where_pad
= (where_pad
== downward
? upward
: downward
);
2429 xinner
= x
= protect_from_queue (x
, 0);
2431 if (mode
== BLKmode
)
2433 /* Copy a block into the stack, entirely or partially. */
2436 int used
= partial
* UNITS_PER_WORD
;
2437 int offset
= used
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
2445 /* USED is now the # of bytes we need not copy to the stack
2446 because registers will take care of them. */
2449 xinner
= change_address (xinner
, BLKmode
,
2450 plus_constant (XEXP (xinner
, 0), used
));
2452 /* If the partial register-part of the arg counts in its stack size,
2453 skip the part of stack space corresponding to the registers.
2454 Otherwise, start copying to the beginning of the stack space,
2455 by setting SKIP to 0. */
2456 #ifndef REG_PARM_STACK_SPACE
2462 #ifdef PUSH_ROUNDING
2463 /* Do it with several push insns if that doesn't take lots of insns
2464 and if there is no difficulty with push insns that skip bytes
2465 on the stack for alignment purposes. */
2467 && GET_CODE (size
) == CONST_INT
2469 && (move_by_pieces_ninsns ((unsigned) INTVAL (size
) - used
, align
)
2471 /* Here we avoid the case of a structure whose weak alignment
2472 forces many pushes of a small amount of data,
2473 and such small pushes do rounding that causes trouble. */
2474 && ((! SLOW_UNALIGNED_ACCESS
)
2475 || align
>= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
2476 || PUSH_ROUNDING (align
) == align
)
2477 && PUSH_ROUNDING (INTVAL (size
)) == INTVAL (size
))
2479 /* Push padding now if padding above and stack grows down,
2480 or if padding below and stack grows up.
2481 But if space already allocated, this has already been done. */
2482 if (extra
&& args_addr
== 0
2483 && where_pad
!= none
&& where_pad
!= stack_direction
)
2484 anti_adjust_stack (GEN_INT (extra
));
2486 move_by_pieces (gen_rtx (MEM
, BLKmode
, gen_push_operand ()), xinner
,
2487 INTVAL (size
) - used
, align
);
2490 #endif /* PUSH_ROUNDING */
2492 /* Otherwise make space on the stack and copy the data
2493 to the address of that space. */
2495 /* Deduct words put into registers from the size we must copy. */
2498 if (GET_CODE (size
) == CONST_INT
)
2499 size
= GEN_INT (INTVAL (size
) - used
);
2501 size
= expand_binop (GET_MODE (size
), sub_optab
, size
,
2502 GEN_INT (used
), NULL_RTX
, 0,
2506 /* Get the address of the stack space.
2507 In this case, we do not deal with EXTRA separately.
2508 A single stack adjust will do. */
2511 temp
= push_block (size
, extra
, where_pad
== downward
);
2514 else if (GET_CODE (args_so_far
) == CONST_INT
)
2515 temp
= memory_address (BLKmode
,
2516 plus_constant (args_addr
,
2517 skip
+ INTVAL (args_so_far
)));
2519 temp
= memory_address (BLKmode
,
2520 plus_constant (gen_rtx (PLUS
, Pmode
,
2521 args_addr
, args_so_far
),
2524 /* TEMP is the address of the block. Copy the data there. */
2525 if (GET_CODE (size
) == CONST_INT
2526 && (move_by_pieces_ninsns ((unsigned) INTVAL (size
), align
)
2529 move_by_pieces (gen_rtx (MEM
, BLKmode
, temp
), xinner
,
2530 INTVAL (size
), align
);
2533 /* Try the most limited insn first, because there's no point
2534 including more than one in the machine description unless
2535 the more limited one has some advantage. */
2536 #ifdef HAVE_movstrqi
2538 && GET_CODE (size
) == CONST_INT
2539 && ((unsigned) INTVAL (size
)
2540 < (1 << (GET_MODE_BITSIZE (QImode
) - 1))))
2542 rtx pat
= gen_movstrqi (gen_rtx (MEM
, BLKmode
, temp
),
2543 xinner
, size
, GEN_INT (align
));
2551 #ifdef HAVE_movstrhi
2553 && GET_CODE (size
) == CONST_INT
2554 && ((unsigned) INTVAL (size
)
2555 < (1 << (GET_MODE_BITSIZE (HImode
) - 1))))
2557 rtx pat
= gen_movstrhi (gen_rtx (MEM
, BLKmode
, temp
),
2558 xinner
, size
, GEN_INT (align
));
2566 #ifdef HAVE_movstrsi
2569 rtx pat
= gen_movstrsi (gen_rtx (MEM
, BLKmode
, temp
),
2570 xinner
, size
, GEN_INT (align
));
2578 #ifdef HAVE_movstrdi
2581 rtx pat
= gen_movstrdi (gen_rtx (MEM
, BLKmode
, temp
),
2582 xinner
, size
, GEN_INT (align
));
2591 #ifndef ACCUMULATE_OUTGOING_ARGS
2592 /* If the source is referenced relative to the stack pointer,
2593 copy it to another register to stabilize it. We do not need
2594 to do this if we know that we won't be changing sp. */
2596 if (reg_mentioned_p (virtual_stack_dynamic_rtx
, temp
)
2597 || reg_mentioned_p (virtual_outgoing_args_rtx
, temp
))
2598 temp
= copy_to_reg (temp
);
2601 /* Make inhibit_defer_pop nonzero around the library call
2602 to force it to pop the bcopy-arguments right away. */
2604 #ifdef TARGET_MEM_FUNCTIONS
2605 emit_library_call (memcpy_libfunc
, 0,
2606 VOIDmode
, 3, temp
, Pmode
, XEXP (xinner
, 0), Pmode
,
2607 convert_to_mode (TYPE_MODE (sizetype
),
2608 size
, TREE_UNSIGNED (sizetype
)),
2609 TYPE_MODE (sizetype
));
2611 emit_library_call (bcopy_libfunc
, 0,
2612 VOIDmode
, 3, XEXP (xinner
, 0), Pmode
, temp
, Pmode
,
2613 convert_to_mode (TYPE_MODE (integer_type_node
),
2615 TREE_UNSIGNED (integer_type_node
)),
2616 TYPE_MODE (integer_type_node
));
2621 else if (partial
> 0)
2623 /* Scalar partly in registers. */
2625 int size
= GET_MODE_SIZE (mode
) / UNITS_PER_WORD
;
2628 /* # words of start of argument
2629 that we must make space for but need not store. */
2630 int offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_WORD
);
2631 int args_offset
= INTVAL (args_so_far
);
2634 /* Push padding now if padding above and stack grows down,
2635 or if padding below and stack grows up.
2636 But if space already allocated, this has already been done. */
2637 if (extra
&& args_addr
== 0
2638 && where_pad
!= none
&& where_pad
!= stack_direction
)
2639 anti_adjust_stack (GEN_INT (extra
));
2641 /* If we make space by pushing it, we might as well push
2642 the real data. Otherwise, we can leave OFFSET nonzero
2643 and leave the space uninitialized. */
2647 /* Now NOT_STACK gets the number of words that we don't need to
2648 allocate on the stack. */
2649 not_stack
= partial
- offset
;
2651 /* If the partial register-part of the arg counts in its stack size,
2652 skip the part of stack space corresponding to the registers.
2653 Otherwise, start copying to the beginning of the stack space,
2654 by setting SKIP to 0. */
2655 #ifndef REG_PARM_STACK_SPACE
2661 if (CONSTANT_P (x
) && ! LEGITIMATE_CONSTANT_P (x
))
2662 x
= validize_mem (force_const_mem (mode
, x
));
2664 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2665 SUBREGs of such registers are not allowed. */
2666 if ((GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
2667 && GET_MODE_CLASS (GET_MODE (x
)) != MODE_INT
))
2668 x
= copy_to_reg (x
);
2670 /* Loop over all the words allocated on the stack for this arg. */
2671 /* We can do it by words, because any scalar bigger than a word
2672 has a size a multiple of a word. */
2673 #ifndef PUSH_ARGS_REVERSED
2674 for (i
= not_stack
; i
< size
; i
++)
2676 for (i
= size
- 1; i
>= not_stack
; i
--)
2678 if (i
>= not_stack
+ offset
)
2679 emit_push_insn (operand_subword_force (x
, i
, mode
),
2680 word_mode
, NULL_TREE
, NULL_RTX
, align
, 0, NULL_RTX
,
2682 GEN_INT (args_offset
+ ((i
- not_stack
+ skip
)
2683 * UNITS_PER_WORD
)));
2689 /* Push padding now if padding above and stack grows down,
2690 or if padding below and stack grows up.
2691 But if space already allocated, this has already been done. */
2692 if (extra
&& args_addr
== 0
2693 && where_pad
!= none
&& where_pad
!= stack_direction
)
2694 anti_adjust_stack (GEN_INT (extra
));
2696 #ifdef PUSH_ROUNDING
2698 addr
= gen_push_operand ();
2701 if (GET_CODE (args_so_far
) == CONST_INT
)
2703 = memory_address (mode
,
2704 plus_constant (args_addr
, INTVAL (args_so_far
)));
2706 addr
= memory_address (mode
, gen_rtx (PLUS
, Pmode
, args_addr
,
2709 emit_move_insn (gen_rtx (MEM
, mode
, addr
), x
);
2713 /* If part should go in registers, copy that part
2714 into the appropriate registers. Do this now, at the end,
2715 since mem-to-mem copies above may do function calls. */
2716 if (partial
> 0 && reg
!= 0)
2718 /* Handle calls that pass values in multiple non-contiguous locations.
2719 The Irix 6 ABI has examples of this. */
2720 if (GET_CODE (reg
) == PARALLEL
)
2721 emit_group_load (reg
, x
);
2723 move_block_to_reg (REGNO (reg
), x
, partial
, mode
);
2726 if (extra
&& args_addr
== 0 && where_pad
== stack_direction
)
2727 anti_adjust_stack (GEN_INT (extra
));
2730 /* Expand an assignment that stores the value of FROM into TO.
2731 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2732 (This may contain a QUEUED rtx;
2733 if the value is constant, this rtx is a constant.)
2734 Otherwise, the returned value is NULL_RTX.
2736 SUGGEST_REG is no longer actually used.
2737 It used to mean, copy the value through a register
2738 and return that register, if that is possible.
2739 We now use WANT_VALUE to decide whether to do this. */
2742 expand_assignment (to
, from
, want_value
, suggest_reg
)
2747 register rtx to_rtx
= 0;
2750 /* Don't crash if the lhs of the assignment was erroneous. */
2752 if (TREE_CODE (to
) == ERROR_MARK
)
2754 result
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
2755 return want_value
? result
: NULL_RTX
;
2758 if (output_bytecode
)
2760 tree dest_innermost
;
2762 bc_expand_expr (from
);
2763 bc_emit_instruction (duplicate
);
2765 dest_innermost
= bc_expand_address (to
);
2767 /* Can't deduce from TYPE that we're dealing with a bitfield, so
2768 take care of it here. */
2770 bc_store_memory (TREE_TYPE (to
), dest_innermost
);
2774 /* Assignment of a structure component needs special treatment
2775 if the structure component's rtx is not simply a MEM.
2776 Assignment of an array element at a constant index, and assignment of
2777 an array element in an unaligned packed structure field, has the same
2780 if (TREE_CODE (to
) == COMPONENT_REF
2781 || TREE_CODE (to
) == BIT_FIELD_REF
2782 || (TREE_CODE (to
) == ARRAY_REF
2783 && ((TREE_CODE (TREE_OPERAND (to
, 1)) == INTEGER_CST
2784 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to
))) == INTEGER_CST
)
2785 || (SLOW_UNALIGNED_ACCESS
&& get_inner_unaligned_p (to
)))))
2787 enum machine_mode mode1
;
2797 tem
= get_inner_reference (to
, &bitsize
, &bitpos
, &offset
, &mode1
,
2798 &unsignedp
, &volatilep
, &alignment
);
2800 /* If we are going to use store_bit_field and extract_bit_field,
2801 make sure to_rtx will be safe for multiple use. */
2803 if (mode1
== VOIDmode
&& want_value
)
2804 tem
= stabilize_reference (tem
);
2806 to_rtx
= expand_expr (tem
, NULL_RTX
, VOIDmode
, 0);
2809 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
2811 if (GET_CODE (to_rtx
) != MEM
)
2813 to_rtx
= change_address (to_rtx
, VOIDmode
,
2814 gen_rtx (PLUS
, ptr_mode
, XEXP (to_rtx
, 0),
2815 force_reg (ptr_mode
, offset_rtx
)));
2819 if (GET_CODE (to_rtx
) == MEM
)
2821 /* When the offset is zero, to_rtx is the address of the
2822 structure we are storing into, and hence may be shared.
2823 We must make a new MEM before setting the volatile bit. */
2825 to_rtx
= copy_rtx (to_rtx
);
2827 MEM_VOLATILE_P (to_rtx
) = 1;
2829 #if 0 /* This was turned off because, when a field is volatile
2830 in an object which is not volatile, the object may be in a register,
2831 and then we would abort over here. */
2837 result
= store_field (to_rtx
, bitsize
, bitpos
, mode1
, from
,
2839 /* Spurious cast makes HPUX compiler happy. */
2840 ? (enum machine_mode
) TYPE_MODE (TREE_TYPE (to
))
2843 /* Required alignment of containing datum. */
2845 int_size_in_bytes (TREE_TYPE (tem
)));
2846 preserve_temp_slots (result
);
2850 /* If the value is meaningful, convert RESULT to the proper mode.
2851 Otherwise, return nothing. */
2852 return (want_value
? convert_modes (TYPE_MODE (TREE_TYPE (to
)),
2853 TYPE_MODE (TREE_TYPE (from
)),
2855 TREE_UNSIGNED (TREE_TYPE (to
)))
2859 /* If the rhs is a function call and its value is not an aggregate,
2860 call the function before we start to compute the lhs.
2861 This is needed for correct code for cases such as
2862 val = setjmp (buf) on machines where reference to val
2863 requires loading up part of an address in a separate insn.
2865 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
2866 a promoted variable where the zero- or sign- extension needs to be done.
2867 Handling this in the normal way is safe because no computation is done
2869 if (TREE_CODE (from
) == CALL_EXPR
&& ! aggregate_value_p (from
)
2870 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from
))) == INTEGER_CST
2871 && ! (TREE_CODE (to
) == VAR_DECL
&& GET_CODE (DECL_RTL (to
)) == REG
))
2876 value
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
2878 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, 0);
2880 /* Handle calls that return values in multiple non-contiguous locations.
2881 The Irix 6 ABI has examples of this. */
2882 if (GET_CODE (to_rtx
) == PARALLEL
)
2883 emit_group_load (to_rtx
, value
);
2884 else if (GET_MODE (to_rtx
) == BLKmode
)
2885 emit_block_move (to_rtx
, value
, expr_size (from
),
2886 TYPE_ALIGN (TREE_TYPE (from
)) / BITS_PER_UNIT
);
2888 emit_move_insn (to_rtx
, value
);
2889 preserve_temp_slots (to_rtx
);
2892 return want_value
? to_rtx
: NULL_RTX
;
2895 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2896 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2899 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, 0);
2901 /* Don't move directly into a return register. */
2902 if (TREE_CODE (to
) == RESULT_DECL
&& GET_CODE (to_rtx
) == REG
)
2907 temp
= expand_expr (from
, 0, GET_MODE (to_rtx
), 0);
2908 emit_move_insn (to_rtx
, temp
);
2909 preserve_temp_slots (to_rtx
);
2912 return want_value
? to_rtx
: NULL_RTX
;
2915 /* In case we are returning the contents of an object which overlaps
2916 the place the value is being stored, use a safe function when copying
2917 a value through a pointer into a structure value return block. */
2918 if (TREE_CODE (to
) == RESULT_DECL
&& TREE_CODE (from
) == INDIRECT_REF
2919 && current_function_returns_struct
2920 && !current_function_returns_pcc_struct
)
2925 size
= expr_size (from
);
2926 from_rtx
= expand_expr (from
, NULL_RTX
, VOIDmode
, 0);
2928 #ifdef TARGET_MEM_FUNCTIONS
2929 emit_library_call (memcpy_libfunc
, 0,
2930 VOIDmode
, 3, XEXP (to_rtx
, 0), Pmode
,
2931 XEXP (from_rtx
, 0), Pmode
,
2932 convert_to_mode (TYPE_MODE (sizetype
),
2933 size
, TREE_UNSIGNED (sizetype
)),
2934 TYPE_MODE (sizetype
));
2936 emit_library_call (bcopy_libfunc
, 0,
2937 VOIDmode
, 3, XEXP (from_rtx
, 0), Pmode
,
2938 XEXP (to_rtx
, 0), Pmode
,
2939 convert_to_mode (TYPE_MODE (integer_type_node
),
2940 size
, TREE_UNSIGNED (integer_type_node
)),
2941 TYPE_MODE (integer_type_node
));
2944 preserve_temp_slots (to_rtx
);
2947 return want_value
? to_rtx
: NULL_RTX
;
2950 /* Compute FROM and store the value in the rtx we got. */
2953 result
= store_expr (from
, to_rtx
, want_value
);
2954 preserve_temp_slots (result
);
2957 return want_value
? result
: NULL_RTX
;
2960 /* Generate code for computing expression EXP,
2961 and storing the value into TARGET.
2962 TARGET may contain a QUEUED rtx.
2964 If WANT_VALUE is nonzero, return a copy of the value
2965 not in TARGET, so that we can be sure to use the proper
2966 value in a containing expression even if TARGET has something
2967 else stored in it. If possible, we copy the value through a pseudo
2968 and return that pseudo. Or, if the value is constant, we try to
2969 return the constant. In some cases, we return a pseudo
2970 copied *from* TARGET.
2972 If the mode is BLKmode then we may return TARGET itself.
2973 It turns out that in BLKmode it doesn't cause a problem.
2974 because C has no operators that could combine two different
2975 assignments into the same BLKmode object with different values
2976 with no sequence point. Will other languages need this to
2979 If WANT_VALUE is 0, we return NULL, to make sure
2980 to catch quickly any cases where the caller uses the value
2981 and fails to set WANT_VALUE. */
2984 store_expr (exp
, target
, want_value
)
2986 register rtx target
;
2990 int dont_return_target
= 0;
2992 if (TREE_CODE (exp
) == COMPOUND_EXPR
)
2994 /* Perform first part of compound expression, then assign from second
2996 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
2998 return store_expr (TREE_OPERAND (exp
, 1), target
, want_value
);
3000 else if (TREE_CODE (exp
) == COND_EXPR
&& GET_MODE (target
) == BLKmode
)
3002 /* For conditional expression, get safe form of the target. Then
3003 test the condition, doing the appropriate assignment on either
3004 side. This avoids the creation of unnecessary temporaries.
3005 For non-BLKmode, it is more efficient not to do this. */
3007 rtx lab1
= gen_label_rtx (), lab2
= gen_label_rtx ();
3010 target
= protect_from_queue (target
, 1);
3012 do_pending_stack_adjust ();
3014 jumpifnot (TREE_OPERAND (exp
, 0), lab1
);
3015 start_cleanup_deferal ();
3016 store_expr (TREE_OPERAND (exp
, 1), target
, 0);
3017 end_cleanup_deferal ();
3019 emit_jump_insn (gen_jump (lab2
));
3022 start_cleanup_deferal ();
3023 store_expr (TREE_OPERAND (exp
, 2), target
, 0);
3024 end_cleanup_deferal ();
3029 return want_value
? target
: NULL_RTX
;
3031 else if (want_value
&& GET_CODE (target
) == MEM
&& ! MEM_VOLATILE_P (target
)
3032 && GET_MODE (target
) != BLKmode
)
3033 /* If target is in memory and caller wants value in a register instead,
3034 arrange that. Pass TARGET as target for expand_expr so that,
3035 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3036 We know expand_expr will not use the target in that case.
3037 Don't do this if TARGET is volatile because we are supposed
3038 to write it and then read it. */
3040 temp
= expand_expr (exp
, cse_not_expected
? NULL_RTX
: target
,
3041 GET_MODE (target
), 0);
3042 if (GET_MODE (temp
) != BLKmode
&& GET_MODE (temp
) != VOIDmode
)
3043 temp
= copy_to_reg (temp
);
3044 dont_return_target
= 1;
3046 else if (queued_subexp_p (target
))
3047 /* If target contains a postincrement, let's not risk
3048 using it as the place to generate the rhs. */
3050 if (GET_MODE (target
) != BLKmode
&& GET_MODE (target
) != VOIDmode
)
3052 /* Expand EXP into a new pseudo. */
3053 temp
= gen_reg_rtx (GET_MODE (target
));
3054 temp
= expand_expr (exp
, temp
, GET_MODE (target
), 0);
3057 temp
= expand_expr (exp
, NULL_RTX
, GET_MODE (target
), 0);
3059 /* If target is volatile, ANSI requires accessing the value
3060 *from* the target, if it is accessed. So make that happen.
3061 In no case return the target itself. */
3062 if (! MEM_VOLATILE_P (target
) && want_value
)
3063 dont_return_target
= 1;
3065 else if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
3066 /* If this is an scalar in a register that is stored in a wider mode
3067 than the declared mode, compute the result into its declared mode
3068 and then convert to the wider mode. Our value is the computed
3071 /* If we don't want a value, we can do the conversion inside EXP,
3072 which will often result in some optimizations. Do the conversion
3073 in two steps: first change the signedness, if needed, then
3074 the extend. But don't do this if the type of EXP is a subtype
3075 of something else since then the conversion might involve
3076 more than just converting modes. */
3077 if (! want_value
&& INTEGRAL_TYPE_P (TREE_TYPE (exp
))
3078 && TREE_TYPE (TREE_TYPE (exp
)) == 0)
3080 if (TREE_UNSIGNED (TREE_TYPE (exp
))
3081 != SUBREG_PROMOTED_UNSIGNED_P (target
))
3084 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target
),
3088 exp
= convert (type_for_mode (GET_MODE (SUBREG_REG (target
)),
3089 SUBREG_PROMOTED_UNSIGNED_P (target
)),
3093 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
3095 /* If TEMP is a volatile MEM and we want a result value, make
3096 the access now so it gets done only once. Likewise if
3097 it contains TARGET. */
3098 if (GET_CODE (temp
) == MEM
&& want_value
3099 && (MEM_VOLATILE_P (temp
)
3100 || reg_mentioned_p (SUBREG_REG (target
), XEXP (temp
, 0))))
3101 temp
= copy_to_reg (temp
);
3103 /* If TEMP is a VOIDmode constant, use convert_modes to make
3104 sure that we properly convert it. */
3105 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
)
3106 temp
= convert_modes (GET_MODE (SUBREG_REG (target
)),
3107 TYPE_MODE (TREE_TYPE (exp
)), temp
,
3108 SUBREG_PROMOTED_UNSIGNED_P (target
));
3110 convert_move (SUBREG_REG (target
), temp
,
3111 SUBREG_PROMOTED_UNSIGNED_P (target
));
3112 return want_value
? temp
: NULL_RTX
;
3116 temp
= expand_expr (exp
, target
, GET_MODE (target
), 0);
3117 /* Return TARGET if it's a specified hardware register.
3118 If TARGET is a volatile mem ref, either return TARGET
3119 or return a reg copied *from* TARGET; ANSI requires this.
3121 Otherwise, if TEMP is not TARGET, return TEMP
3122 if it is constant (for efficiency),
3123 or if we really want the correct value. */
3124 if (!(target
&& GET_CODE (target
) == REG
3125 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)
3126 && !(GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
))
3127 && ! rtx_equal_p (temp
, target
)
3128 && (CONSTANT_P (temp
) || want_value
))
3129 dont_return_target
= 1;
3132 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3133 the same as that of TARGET, adjust the constant. This is needed, for
3134 example, in case it is a CONST_DOUBLE and we want only a word-sized
3136 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
3137 && TREE_CODE (exp
) != ERROR_MARK
3138 && GET_MODE (target
) != TYPE_MODE (TREE_TYPE (exp
)))
3139 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
3140 temp
, TREE_UNSIGNED (TREE_TYPE (exp
)));
3142 /* If value was not generated in the target, store it there.
3143 Convert the value to TARGET's type first if nec. */
3145 if (! rtx_equal_p (temp
, target
) && TREE_CODE (exp
) != ERROR_MARK
)
3147 target
= protect_from_queue (target
, 1);
3148 if (GET_MODE (temp
) != GET_MODE (target
)
3149 && GET_MODE (temp
) != VOIDmode
)
3151 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (exp
));
3152 if (dont_return_target
)
3154 /* In this case, we will return TEMP,
3155 so make sure it has the proper mode.
3156 But don't forget to store the value into TARGET. */
3157 temp
= convert_to_mode (GET_MODE (target
), temp
, unsignedp
);
3158 emit_move_insn (target
, temp
);
3161 convert_move (target
, temp
, unsignedp
);
3164 else if (GET_MODE (temp
) == BLKmode
&& TREE_CODE (exp
) == STRING_CST
)
3166 /* Handle copying a string constant into an array.
3167 The string constant may be shorter than the array.
3168 So copy just the string's actual length, and clear the rest. */
3172 /* Get the size of the data type of the string,
3173 which is actually the size of the target. */
3174 size
= expr_size (exp
);
3175 if (GET_CODE (size
) == CONST_INT
3176 && INTVAL (size
) < TREE_STRING_LENGTH (exp
))
3177 emit_block_move (target
, temp
, size
,
3178 TYPE_ALIGN (TREE_TYPE (exp
)) / BITS_PER_UNIT
);
3181 /* Compute the size of the data to copy from the string. */
3183 = size_binop (MIN_EXPR
,
3184 make_tree (sizetype
, size
),
3186 build_int_2 (TREE_STRING_LENGTH (exp
), 0)));
3187 rtx copy_size_rtx
= expand_expr (copy_size
, NULL_RTX
,
3191 /* Copy that much. */
3192 emit_block_move (target
, temp
, copy_size_rtx
,
3193 TYPE_ALIGN (TREE_TYPE (exp
)) / BITS_PER_UNIT
);
3195 /* Figure out how much is left in TARGET that we have to clear.
3196 Do all calculations in ptr_mode. */
3198 addr
= XEXP (target
, 0);
3199 addr
= convert_modes (ptr_mode
, Pmode
, addr
, 1);
3201 if (GET_CODE (copy_size_rtx
) == CONST_INT
)
3203 addr
= plus_constant (addr
, TREE_STRING_LENGTH (exp
));
3204 size
= plus_constant (size
, - TREE_STRING_LENGTH (exp
));
3208 addr
= force_reg (ptr_mode
, addr
);
3209 addr
= expand_binop (ptr_mode
, add_optab
, addr
,
3210 copy_size_rtx
, NULL_RTX
, 0,
3213 size
= expand_binop (ptr_mode
, sub_optab
, size
,
3214 copy_size_rtx
, NULL_RTX
, 0,
3217 emit_cmp_insn (size
, const0_rtx
, LT
, NULL_RTX
,
3218 GET_MODE (size
), 0, 0);
3219 label
= gen_label_rtx ();
3220 emit_jump_insn (gen_blt (label
));
3223 if (size
!= const0_rtx
)
3225 #ifdef TARGET_MEM_FUNCTIONS
3226 emit_library_call (memset_libfunc
, 0, VOIDmode
, 3,
3228 const0_rtx
, TYPE_MODE (integer_type_node
),
3229 convert_to_mode (TYPE_MODE (sizetype
),
3231 TREE_UNSIGNED (sizetype
)),
3232 TYPE_MODE (sizetype
));
3234 emit_library_call (bzero_libfunc
, 0, VOIDmode
, 2,
3236 convert_to_mode (TYPE_MODE (integer_type_node
),
3238 TREE_UNSIGNED (integer_type_node
)),
3239 TYPE_MODE (integer_type_node
));
3247 /* Handle calls that return values in multiple non-contiguous locations.
3248 The Irix 6 ABI has examples of this. */
3249 else if (GET_CODE (target
) == PARALLEL
)
3250 emit_group_load (target
, temp
);
3251 else if (GET_MODE (temp
) == BLKmode
)
3252 emit_block_move (target
, temp
, expr_size (exp
),
3253 TYPE_ALIGN (TREE_TYPE (exp
)) / BITS_PER_UNIT
);
3255 emit_move_insn (target
, temp
);
3258 /* If we don't want a value, return NULL_RTX. */
3262 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3263 ??? The latter test doesn't seem to make sense. */
3264 else if (dont_return_target
&& GET_CODE (temp
) != MEM
)
3267 /* Return TARGET itself if it is a hard register. */
3268 else if (want_value
&& GET_MODE (target
) != BLKmode
3269 && ! (GET_CODE (target
) == REG
3270 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
3271 return copy_to_reg (target
);
3277 /* Return 1 if EXP just contains zeros. */
3285 switch (TREE_CODE (exp
))
3289 case NON_LVALUE_EXPR
:
3290 return is_zeros_p (TREE_OPERAND (exp
, 0));
3293 return TREE_INT_CST_LOW (exp
) == 0 && TREE_INT_CST_HIGH (exp
) == 0;
3297 is_zeros_p (TREE_REALPART (exp
)) && is_zeros_p (TREE_IMAGPART (exp
));
3300 return REAL_VALUES_EQUAL (TREE_REAL_CST (exp
), dconst0
);
3303 if (TREE_TYPE (exp
) && TREE_CODE (TREE_TYPE (exp
)) == SET_TYPE
)
3304 return CONSTRUCTOR_ELTS (exp
) == NULL_TREE
;
3305 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
3306 if (! is_zeros_p (TREE_VALUE (elt
)))
3315 /* Return 1 if EXP contains mostly (3/4) zeros. */
3318 mostly_zeros_p (exp
)
3321 if (TREE_CODE (exp
) == CONSTRUCTOR
)
3323 int elts
= 0, zeros
= 0;
3324 tree elt
= CONSTRUCTOR_ELTS (exp
);
3325 if (TREE_TYPE (exp
) && TREE_CODE (TREE_TYPE (exp
)) == SET_TYPE
)
3327 /* If there are no ranges of true bits, it is all zero. */
3328 return elt
== NULL_TREE
;
3330 for (; elt
; elt
= TREE_CHAIN (elt
))
3332 /* We do not handle the case where the index is a RANGE_EXPR,
3333 so the statistic will be somewhat inaccurate.
3334 We do make a more accurate count in store_constructor itself,
3335 so since this function is only used for nested array elements,
3336 this should be close enough. */
3337 if (mostly_zeros_p (TREE_VALUE (elt
)))
3342 return 4 * zeros
>= 3 * elts
;
3345 return is_zeros_p (exp
);
3348 /* Helper function for store_constructor.
3349 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
3350 TYPE is the type of the CONSTRUCTOR, not the element type.
3351 CLEARED is as for store_constructor.
3353 This provides a recursive shortcut back to store_constructor when it isn't
3354 necessary to go through store_field. This is so that we can pass through
3355 the cleared field to let store_constructor know that we may not have to
3356 clear a substructure if the outer structure has already been cleared. */
3359 store_constructor_field (target
, bitsize
, bitpos
,
3360 mode
, exp
, type
, cleared
)
3362 int bitsize
, bitpos
;
3363 enum machine_mode mode
;
3367 if (TREE_CODE (exp
) == CONSTRUCTOR
3368 && bitpos
% BITS_PER_UNIT
== 0
3369 /* If we have a non-zero bitpos for a register target, then we just
3370 let store_field do the bitfield handling. This is unlikely to
3371 generate unnecessary clear instructions anyways. */
3372 && (bitpos
== 0 || GET_CODE (target
) == MEM
))
3375 target
= change_address (target
, VOIDmode
,
3376 plus_constant (XEXP (target
, 0),
3377 bitpos
/ BITS_PER_UNIT
));
3378 store_constructor (exp
, target
, cleared
);
3381 store_field (target
, bitsize
, bitpos
, mode
, exp
,
3382 VOIDmode
, 0, TYPE_ALIGN (type
) / BITS_PER_UNIT
,
3383 int_size_in_bytes (type
));
3386 /* Store the value of constructor EXP into the rtx TARGET.
3387 TARGET is either a REG or a MEM.
3388 CLEARED is true if TARGET is known to have been zero'd. */
3391 store_constructor (exp
, target
, cleared
)
3396 tree type
= TREE_TYPE (exp
);
3398 /* We know our target cannot conflict, since safe_from_p has been called. */
3400 /* Don't try copying piece by piece into a hard register
3401 since that is vulnerable to being clobbered by EXP.
3402 Instead, construct in a pseudo register and then copy it all. */
3403 if (GET_CODE (target
) == REG
&& REGNO (target
) < FIRST_PSEUDO_REGISTER
)
3405 rtx temp
= gen_reg_rtx (GET_MODE (target
));
3406 store_constructor (exp
, temp
, 0);
3407 emit_move_insn (target
, temp
);
3412 if (TREE_CODE (type
) == RECORD_TYPE
|| TREE_CODE (type
) == UNION_TYPE
3413 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
3417 /* Inform later passes that the whole union value is dead. */
3418 if (TREE_CODE (type
) == UNION_TYPE
3419 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
3420 emit_insn (gen_rtx (CLOBBER
, VOIDmode
, target
));
3422 /* If we are building a static constructor into a register,
3423 set the initial value as zero so we can fold the value into
3424 a constant. But if more than one register is involved,
3425 this probably loses. */
3426 else if (GET_CODE (target
) == REG
&& TREE_STATIC (exp
)
3427 && GET_MODE_SIZE (GET_MODE (target
)) <= UNITS_PER_WORD
)
3430 emit_move_insn (target
, const0_rtx
);
3435 /* If the constructor has fewer fields than the structure
3436 or if we are initializing the structure to mostly zeros,
3437 clear the whole structure first. */
3438 else if ((list_length (CONSTRUCTOR_ELTS (exp
))
3439 != list_length (TYPE_FIELDS (type
)))
3440 || mostly_zeros_p (exp
))
3443 clear_storage (target
, expr_size (exp
),
3444 TYPE_ALIGN (type
) / BITS_PER_UNIT
);
3449 /* Inform later passes that the old value is dead. */
3450 emit_insn (gen_rtx (CLOBBER
, VOIDmode
, target
));
3452 /* Store each element of the constructor into
3453 the corresponding field of TARGET. */
3455 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
3457 register tree field
= TREE_PURPOSE (elt
);
3458 register enum machine_mode mode
;
3462 tree pos
, constant
= 0, offset
= 0;
3463 rtx to_rtx
= target
;
3465 /* Just ignore missing fields.
3466 We cleared the whole structure, above,
3467 if any fields are missing. */
3471 if (cleared
&& is_zeros_p (TREE_VALUE (elt
)))
3474 bitsize
= TREE_INT_CST_LOW (DECL_SIZE (field
));
3475 unsignedp
= TREE_UNSIGNED (field
);
3476 mode
= DECL_MODE (field
);
3477 if (DECL_BIT_FIELD (field
))
3480 pos
= DECL_FIELD_BITPOS (field
);
3481 if (TREE_CODE (pos
) == INTEGER_CST
)
3483 else if (TREE_CODE (pos
) == PLUS_EXPR
3484 && TREE_CODE (TREE_OPERAND (pos
, 1)) == INTEGER_CST
)
3485 constant
= TREE_OPERAND (pos
, 1), offset
= TREE_OPERAND (pos
, 0);
3490 bitpos
= TREE_INT_CST_LOW (constant
);
3496 if (contains_placeholder_p (offset
))
3497 offset
= build (WITH_RECORD_EXPR
, sizetype
,
3500 offset
= size_binop (FLOOR_DIV_EXPR
, offset
,
3501 size_int (BITS_PER_UNIT
));
3503 offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
3504 if (GET_CODE (to_rtx
) != MEM
)
3508 = change_address (to_rtx
, VOIDmode
,
3509 gen_rtx (PLUS
, ptr_mode
, XEXP (to_rtx
, 0),
3510 force_reg (ptr_mode
, offset_rtx
)));
3512 if (TREE_READONLY (field
))
3514 if (GET_CODE (to_rtx
) == MEM
)
3515 to_rtx
= copy_rtx (to_rtx
);
3517 RTX_UNCHANGING_P (to_rtx
) = 1;
3520 store_constructor_field (to_rtx
, bitsize
, bitpos
,
3521 mode
, TREE_VALUE (elt
), type
, cleared
);
3524 else if (TREE_CODE (type
) == ARRAY_TYPE
)
3529 tree domain
= TYPE_DOMAIN (type
);
3530 HOST_WIDE_INT minelt
= TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain
));
3531 HOST_WIDE_INT maxelt
= TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain
));
3532 tree elttype
= TREE_TYPE (type
);
3534 /* If the constructor has fewer elements than the array,
3535 clear the whole array first. Similarly if this this is
3536 static constructor of a non-BLKmode object. */
3537 if (cleared
|| (GET_CODE (target
) == REG
&& TREE_STATIC (exp
)))
3541 HOST_WIDE_INT count
= 0, zero_count
= 0;
3543 /* This loop is a more accurate version of the loop in
3544 mostly_zeros_p (it handles RANGE_EXPR in an index).
3545 It is also needed to check for missing elements. */
3546 for (elt
= CONSTRUCTOR_ELTS (exp
);
3548 elt
= TREE_CHAIN (elt
))
3550 tree index
= TREE_PURPOSE (elt
);
3551 HOST_WIDE_INT this_node_count
;
3552 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
3554 tree lo_index
= TREE_OPERAND (index
, 0);
3555 tree hi_index
= TREE_OPERAND (index
, 1);
3556 if (TREE_CODE (lo_index
) != INTEGER_CST
3557 || TREE_CODE (hi_index
) != INTEGER_CST
)
3562 this_node_count
= TREE_INT_CST_LOW (hi_index
)
3563 - TREE_INT_CST_LOW (lo_index
) + 1;
3566 this_node_count
= 1;
3567 count
+= this_node_count
;
3568 if (mostly_zeros_p (TREE_VALUE (elt
)))
3569 zero_count
+= this_node_count
;
3571 /* Clear the entire array first if there are any missing elements,
3572 or if the incidence of zero elements is >= 75%. */
3573 if (count
< maxelt
- minelt
+ 1
3574 || 4 * zero_count
>= 3 * count
)
3580 clear_storage (target
, expr_size (exp
),
3581 TYPE_ALIGN (type
) / BITS_PER_UNIT
);
3585 /* Inform later passes that the old value is dead. */
3586 emit_insn (gen_rtx (CLOBBER
, VOIDmode
, target
));
3588 /* Store each element of the constructor into
3589 the corresponding element of TARGET, determined
3590 by counting the elements. */
3591 for (elt
= CONSTRUCTOR_ELTS (exp
), i
= 0;
3593 elt
= TREE_CHAIN (elt
), i
++)
3595 register enum machine_mode mode
;
3599 tree value
= TREE_VALUE (elt
);
3600 tree index
= TREE_PURPOSE (elt
);
3601 rtx xtarget
= target
;
3603 if (cleared
&& is_zeros_p (value
))
3606 mode
= TYPE_MODE (elttype
);
3607 bitsize
= GET_MODE_BITSIZE (mode
);
3608 unsignedp
= TREE_UNSIGNED (elttype
);
3610 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
3612 tree lo_index
= TREE_OPERAND (index
, 0);
3613 tree hi_index
= TREE_OPERAND (index
, 1);
3614 rtx index_r
, pos_rtx
, addr
, hi_r
, loop_top
, loop_end
;
3615 struct nesting
*loop
;
3616 HOST_WIDE_INT lo
, hi
, count
;
3619 /* If the range is constant and "small", unroll the loop. */
3620 if (TREE_CODE (lo_index
) == INTEGER_CST
3621 && TREE_CODE (hi_index
) == INTEGER_CST
3622 && (lo
= TREE_INT_CST_LOW (lo_index
),
3623 hi
= TREE_INT_CST_LOW (hi_index
),
3624 count
= hi
- lo
+ 1,
3625 (GET_CODE (target
) != MEM
3627 || (TREE_CODE (TYPE_SIZE (elttype
)) == INTEGER_CST
3628 && TREE_INT_CST_LOW (TYPE_SIZE (elttype
)) * count
3631 lo
-= minelt
; hi
-= minelt
;
3632 for (; lo
<= hi
; lo
++)
3634 bitpos
= lo
* TREE_INT_CST_LOW (TYPE_SIZE (elttype
));
3635 store_constructor_field (target
, bitsize
, bitpos
,
3636 mode
, value
, type
, cleared
);
3641 hi_r
= expand_expr (hi_index
, NULL_RTX
, VOIDmode
, 0);
3642 loop_top
= gen_label_rtx ();
3643 loop_end
= gen_label_rtx ();
3645 unsignedp
= TREE_UNSIGNED (domain
);
3647 index
= build_decl (VAR_DECL
, NULL_TREE
, domain
);
3649 DECL_RTL (index
) = index_r
3650 = gen_reg_rtx (promote_mode (domain
, DECL_MODE (index
),
3653 if (TREE_CODE (value
) == SAVE_EXPR
3654 && SAVE_EXPR_RTL (value
) == 0)
3656 /* Make sure value gets expanded once before the
3658 expand_expr (value
, const0_rtx
, VOIDmode
, 0);
3661 store_expr (lo_index
, index_r
, 0);
3662 loop
= expand_start_loop (0);
3664 /* Assign value to element index. */
3665 position
= size_binop (EXACT_DIV_EXPR
, TYPE_SIZE (elttype
),
3666 size_int (BITS_PER_UNIT
));
3667 position
= size_binop (MULT_EXPR
,
3668 size_binop (MINUS_EXPR
, index
,
3669 TYPE_MIN_VALUE (domain
)),
3671 pos_rtx
= expand_expr (position
, 0, VOIDmode
, 0);
3672 addr
= gen_rtx (PLUS
, Pmode
, XEXP (target
, 0), pos_rtx
);
3673 xtarget
= change_address (target
, mode
, addr
);
3674 if (TREE_CODE (value
) == CONSTRUCTOR
)
3675 store_constructor (value
, xtarget
, cleared
);
3677 store_expr (value
, xtarget
, 0);
3679 expand_exit_loop_if_false (loop
,
3680 build (LT_EXPR
, integer_type_node
,
3683 expand_increment (build (PREINCREMENT_EXPR
,
3685 index
, integer_one_node
), 0, 0);
3687 emit_label (loop_end
);
3689 /* Needed by stupid register allocation. to extend the
3690 lifetime of pseudo-regs used by target past the end
3692 emit_insn (gen_rtx (USE
, GET_MODE (target
), target
));
3695 else if ((index
!= 0 && TREE_CODE (index
) != INTEGER_CST
)
3696 || TREE_CODE (TYPE_SIZE (elttype
)) != INTEGER_CST
)
3702 index
= size_int (i
);
3705 index
= size_binop (MINUS_EXPR
, index
,
3706 TYPE_MIN_VALUE (domain
));
3707 position
= size_binop (EXACT_DIV_EXPR
, TYPE_SIZE (elttype
),
3708 size_int (BITS_PER_UNIT
));
3709 position
= size_binop (MULT_EXPR
, index
, position
);
3710 pos_rtx
= expand_expr (position
, 0, VOIDmode
, 0);
3711 addr
= gen_rtx (PLUS
, Pmode
, XEXP (target
, 0), pos_rtx
);
3712 xtarget
= change_address (target
, mode
, addr
);
3713 store_expr (value
, xtarget
, 0);
3718 bitpos
= ((TREE_INT_CST_LOW (index
) - minelt
)
3719 * TREE_INT_CST_LOW (TYPE_SIZE (elttype
)));
3721 bitpos
= (i
* TREE_INT_CST_LOW (TYPE_SIZE (elttype
)));
3722 store_constructor_field (target
, bitsize
, bitpos
,
3723 mode
, value
, type
, cleared
);
3727 /* set constructor assignments */
3728 else if (TREE_CODE (type
) == SET_TYPE
)
3730 tree elt
= CONSTRUCTOR_ELTS (exp
);
3731 rtx xtarget
= XEXP (target
, 0);
3732 int set_word_size
= TYPE_ALIGN (type
);
3733 int nbytes
= int_size_in_bytes (type
), nbits
;
3734 tree domain
= TYPE_DOMAIN (type
);
3735 tree domain_min
, domain_max
, bitlength
;
3737 /* The default implementation strategy is to extract the constant
3738 parts of the constructor, use that to initialize the target,
3739 and then "or" in whatever non-constant ranges we need in addition.
3741 If a large set is all zero or all ones, it is
3742 probably better to set it using memset (if available) or bzero.
3743 Also, if a large set has just a single range, it may also be
3744 better to first clear all the first clear the set (using
3745 bzero/memset), and set the bits we want. */
3747 /* Check for all zeros. */
3748 if (elt
== NULL_TREE
)
3751 clear_storage (target
, expr_size (exp
),
3752 TYPE_ALIGN (type
) / BITS_PER_UNIT
);
3756 domain_min
= convert (sizetype
, TYPE_MIN_VALUE (domain
));
3757 domain_max
= convert (sizetype
, TYPE_MAX_VALUE (domain
));
3758 bitlength
= size_binop (PLUS_EXPR
,
3759 size_binop (MINUS_EXPR
, domain_max
, domain_min
),
3762 if (nbytes
< 0 || TREE_CODE (bitlength
) != INTEGER_CST
)
3764 nbits
= TREE_INT_CST_LOW (bitlength
);
3766 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
3767 are "complicated" (more than one range), initialize (the
3768 constant parts) by copying from a constant. */
3769 if (GET_MODE (target
) != BLKmode
|| nbits
<= 2 * BITS_PER_WORD
3770 || (nbytes
<= 32 && TREE_CHAIN (elt
) != NULL_TREE
))
3772 int set_word_size
= TYPE_ALIGN (TREE_TYPE (exp
));
3773 enum machine_mode mode
= mode_for_size (set_word_size
, MODE_INT
, 1);
3774 char *bit_buffer
= (char *) alloca (nbits
);
3775 HOST_WIDE_INT word
= 0;
3778 int offset
= 0; /* In bytes from beginning of set. */
3779 elt
= get_set_constructor_bits (exp
, bit_buffer
, nbits
);
3782 if (bit_buffer
[ibit
])
3784 if (BYTES_BIG_ENDIAN
)
3785 word
|= (1 << (set_word_size
- 1 - bit_pos
));
3787 word
|= 1 << bit_pos
;
3790 if (bit_pos
>= set_word_size
|| ibit
== nbits
)
3792 if (word
!= 0 || ! cleared
)
3794 rtx datum
= GEN_INT (word
);
3796 /* The assumption here is that it is safe to use
3797 XEXP if the set is multi-word, but not if
3798 it's single-word. */
3799 if (GET_CODE (target
) == MEM
)
3801 to_rtx
= plus_constant (XEXP (target
, 0), offset
);
3802 to_rtx
= change_address (target
, mode
, to_rtx
);
3804 else if (offset
== 0)
3808 emit_move_insn (to_rtx
, datum
);
3814 offset
+= set_word_size
/ BITS_PER_UNIT
;
3820 /* Don't bother clearing storage if the set is all ones. */
3821 if (TREE_CHAIN (elt
) != NULL_TREE
3822 || (TREE_PURPOSE (elt
) == NULL_TREE
3824 : (TREE_CODE (TREE_VALUE (elt
)) != INTEGER_CST
3825 || TREE_CODE (TREE_PURPOSE (elt
)) != INTEGER_CST
3826 || (TREE_INT_CST_LOW (TREE_VALUE (elt
))
3827 - TREE_INT_CST_LOW (TREE_PURPOSE (elt
)) + 1
3829 clear_storage (target
, expr_size (exp
),
3830 TYPE_ALIGN (type
) / BITS_PER_UNIT
);
3833 for (; elt
!= NULL_TREE
; elt
= TREE_CHAIN (elt
))
3835 /* start of range of element or NULL */
3836 tree startbit
= TREE_PURPOSE (elt
);
3837 /* end of range of element, or element value */
3838 tree endbit
= TREE_VALUE (elt
);
3839 HOST_WIDE_INT startb
, endb
;
3840 rtx bitlength_rtx
, startbit_rtx
, endbit_rtx
, targetx
;
3842 bitlength_rtx
= expand_expr (bitlength
,
3843 NULL_RTX
, MEM
, EXPAND_CONST_ADDRESS
);
3845 /* handle non-range tuple element like [ expr ] */
3846 if (startbit
== NULL_TREE
)
3848 startbit
= save_expr (endbit
);
3851 startbit
= convert (sizetype
, startbit
);
3852 endbit
= convert (sizetype
, endbit
);
3853 if (! integer_zerop (domain_min
))
3855 startbit
= size_binop (MINUS_EXPR
, startbit
, domain_min
);
3856 endbit
= size_binop (MINUS_EXPR
, endbit
, domain_min
);
3858 startbit_rtx
= expand_expr (startbit
, NULL_RTX
, MEM
,
3859 EXPAND_CONST_ADDRESS
);
3860 endbit_rtx
= expand_expr (endbit
, NULL_RTX
, MEM
,
3861 EXPAND_CONST_ADDRESS
);
3865 targetx
= assign_stack_temp (GET_MODE (target
),
3866 GET_MODE_SIZE (GET_MODE (target
)),
3868 emit_move_insn (targetx
, target
);
3870 else if (GET_CODE (target
) == MEM
)
3875 #ifdef TARGET_MEM_FUNCTIONS
3876 /* Optimization: If startbit and endbit are
3877 constants divisible by BITS_PER_UNIT,
3878 call memset instead. */
3879 if (TREE_CODE (startbit
) == INTEGER_CST
3880 && TREE_CODE (endbit
) == INTEGER_CST
3881 && (startb
= TREE_INT_CST_LOW (startbit
)) % BITS_PER_UNIT
== 0
3882 && (endb
= TREE_INT_CST_LOW (endbit
) + 1) % BITS_PER_UNIT
== 0)
3884 emit_library_call (memset_libfunc
, 0,
3886 plus_constant (XEXP (targetx
, 0),
3887 startb
/ BITS_PER_UNIT
),
3889 constm1_rtx
, TYPE_MODE (integer_type_node
),
3890 GEN_INT ((endb
- startb
) / BITS_PER_UNIT
),
3891 TYPE_MODE (sizetype
));
3896 emit_library_call (gen_rtx (SYMBOL_REF
, Pmode
, "__setbits"),
3897 0, VOIDmode
, 4, XEXP (targetx
, 0), Pmode
,
3898 bitlength_rtx
, TYPE_MODE (sizetype
),
3899 startbit_rtx
, TYPE_MODE (sizetype
),
3900 endbit_rtx
, TYPE_MODE (sizetype
));
3903 emit_move_insn (target
, targetx
);
3911 /* Store the value of EXP (an expression tree)
3912 into a subfield of TARGET which has mode MODE and occupies
3913 BITSIZE bits, starting BITPOS bits from the start of TARGET.
3914 If MODE is VOIDmode, it means that we are storing into a bit-field.
3916 If VALUE_MODE is VOIDmode, return nothing in particular.
3917 UNSIGNEDP is not used in this case.
3919 Otherwise, return an rtx for the value stored. This rtx
3920 has mode VALUE_MODE if that is convenient to do.
3921 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
3923 ALIGN is the alignment that TARGET is known to have, measured in bytes.
3924 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
3927 store_field (target
, bitsize
, bitpos
, mode
, exp
, value_mode
,
3928 unsignedp
, align
, total_size
)
3930 int bitsize
, bitpos
;
3931 enum machine_mode mode
;
3933 enum machine_mode value_mode
;
3938 HOST_WIDE_INT width_mask
= 0;
3940 if (bitsize
< HOST_BITS_PER_WIDE_INT
)
3941 width_mask
= ((HOST_WIDE_INT
) 1 << bitsize
) - 1;
3943 /* If we are storing into an unaligned field of an aligned union that is
3944 in a register, we may have the mode of TARGET being an integer mode but
3945 MODE == BLKmode. In that case, get an aligned object whose size and
3946 alignment are the same as TARGET and store TARGET into it (we can avoid
3947 the store if the field being stored is the entire width of TARGET). Then
3948 call ourselves recursively to store the field into a BLKmode version of
3949 that object. Finally, load from the object into TARGET. This is not
3950 very efficient in general, but should only be slightly more expensive
3951 than the otherwise-required unaligned accesses. Perhaps this can be
3952 cleaned up later. */
3955 && (GET_CODE (target
) == REG
|| GET_CODE (target
) == SUBREG
))
3957 rtx object
= assign_stack_temp (GET_MODE (target
),
3958 GET_MODE_SIZE (GET_MODE (target
)), 0);
3959 rtx blk_object
= copy_rtx (object
);
3961 MEM_IN_STRUCT_P (object
) = 1;
3962 MEM_IN_STRUCT_P (blk_object
) = 1;
3963 PUT_MODE (blk_object
, BLKmode
);
3965 if (bitsize
!= GET_MODE_BITSIZE (GET_MODE (target
)))
3966 emit_move_insn (object
, target
);
3968 store_field (blk_object
, bitsize
, bitpos
, mode
, exp
, VOIDmode
, 0,
3971 /* Even though we aren't returning target, we need to
3972 give it the updated value. */
3973 emit_move_insn (target
, object
);
3978 /* If the structure is in a register or if the component
3979 is a bit field, we cannot use addressing to access it.
3980 Use bit-field techniques or SUBREG to store in it. */
3982 if (mode
== VOIDmode
3983 || (mode
!= BLKmode
&& ! direct_store
[(int) mode
])
3984 || GET_CODE (target
) == REG
3985 || GET_CODE (target
) == SUBREG
3986 /* If the field isn't aligned enough to store as an ordinary memref,
3987 store it as a bit field. */
3988 || (SLOW_UNALIGNED_ACCESS
3989 && align
* BITS_PER_UNIT
< GET_MODE_ALIGNMENT (mode
))
3990 || (SLOW_UNALIGNED_ACCESS
&& bitpos
% GET_MODE_ALIGNMENT (mode
) != 0))
3992 rtx temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
3994 /* If BITSIZE is narrower than the size of the type of EXP
3995 we will be narrowing TEMP. Normally, what's wanted are the
3996 low-order bits. However, if EXP's type is a record and this is
3997 big-endian machine, we want the upper BITSIZE bits. */
3998 if (BYTES_BIG_ENDIAN
&& GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
3999 && bitsize
< GET_MODE_BITSIZE (GET_MODE (temp
))
4000 && TREE_CODE (TREE_TYPE (exp
)) == RECORD_TYPE
)
4001 temp
= expand_shift (RSHIFT_EXPR
, GET_MODE (temp
), temp
,
4002 size_int (GET_MODE_BITSIZE (GET_MODE (temp
))
4006 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4008 if (mode
!= VOIDmode
&& mode
!= BLKmode
4009 && mode
!= TYPE_MODE (TREE_TYPE (exp
)))
4010 temp
= convert_modes (mode
, TYPE_MODE (TREE_TYPE (exp
)), temp
, 1);
4012 /* If the modes of TARGET and TEMP are both BLKmode, both
4013 must be in memory and BITPOS must be aligned on a byte
4014 boundary. If so, we simply do a block copy. */
4015 if (GET_MODE (target
) == BLKmode
&& GET_MODE (temp
) == BLKmode
)
4017 if (GET_CODE (target
) != MEM
|| GET_CODE (temp
) != MEM
4018 || bitpos
% BITS_PER_UNIT
!= 0)
4021 target
= change_address (target
, VOIDmode
,
4022 plus_constant (XEXP (target
, 0),
4023 bitpos
/ BITS_PER_UNIT
));
4025 emit_block_move (target
, temp
,
4026 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
4030 return value_mode
== VOIDmode
? const0_rtx
: target
;
4033 /* Store the value in the bitfield. */
4034 store_bit_field (target
, bitsize
, bitpos
, mode
, temp
, align
, total_size
);
4035 if (value_mode
!= VOIDmode
)
4037 /* The caller wants an rtx for the value. */
4038 /* If possible, avoid refetching from the bitfield itself. */
4040 && ! (GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
)))
4043 enum machine_mode tmode
;
4046 return expand_and (temp
, GEN_INT (width_mask
), NULL_RTX
);
4047 tmode
= GET_MODE (temp
);
4048 if (tmode
== VOIDmode
)
4050 count
= build_int_2 (GET_MODE_BITSIZE (tmode
) - bitsize
, 0);
4051 temp
= expand_shift (LSHIFT_EXPR
, tmode
, temp
, count
, 0, 0);
4052 return expand_shift (RSHIFT_EXPR
, tmode
, temp
, count
, 0, 0);
4054 return extract_bit_field (target
, bitsize
, bitpos
, unsignedp
,
4055 NULL_RTX
, value_mode
, 0, align
,
4062 rtx addr
= XEXP (target
, 0);
4065 /* If a value is wanted, it must be the lhs;
4066 so make the address stable for multiple use. */
4068 if (value_mode
!= VOIDmode
&& GET_CODE (addr
) != REG
4069 && ! CONSTANT_ADDRESS_P (addr
)
4070 /* A frame-pointer reference is already stable. */
4071 && ! (GET_CODE (addr
) == PLUS
4072 && GET_CODE (XEXP (addr
, 1)) == CONST_INT
4073 && (XEXP (addr
, 0) == virtual_incoming_args_rtx
4074 || XEXP (addr
, 0) == virtual_stack_vars_rtx
)))
4075 addr
= copy_to_reg (addr
);
4077 /* Now build a reference to just the desired component. */
4079 to_rtx
= copy_rtx (change_address (target
, mode
,
4080 plus_constant (addr
,
4082 / BITS_PER_UNIT
))));
4083 MEM_IN_STRUCT_P (to_rtx
) = 1;
4085 return store_expr (exp
, to_rtx
, value_mode
!= VOIDmode
);
4089 /* Return true if any object containing the innermost array is an unaligned
4090 packed structure field. */
4093 get_inner_unaligned_p (exp
)
4096 int needed_alignment
= TYPE_ALIGN (TREE_TYPE (exp
));
4100 if (TREE_CODE (exp
) == COMPONENT_REF
|| TREE_CODE (exp
) == BIT_FIELD_REF
)
4102 if (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0)))
4106 else if (TREE_CODE (exp
) != ARRAY_REF
4107 && TREE_CODE (exp
) != NON_LVALUE_EXPR
4108 && ! ((TREE_CODE (exp
) == NOP_EXPR
4109 || TREE_CODE (exp
) == CONVERT_EXPR
)
4110 && (TYPE_MODE (TREE_TYPE (exp
))
4111 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))))
4114 exp
= TREE_OPERAND (exp
, 0);
4120 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4121 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
4122 ARRAY_REFs and find the ultimate containing object, which we return.
4124 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4125 bit position, and *PUNSIGNEDP to the signedness of the field.
4126 If the position of the field is variable, we store a tree
4127 giving the variable offset (in units) in *POFFSET.
4128 This offset is in addition to the bit position.
4129 If the position is not variable, we store 0 in *POFFSET.
4130 We set *PALIGNMENT to the alignment in bytes of the address that will be
4131 computed. This is the alignment of the thing we return if *POFFSET
4132 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
4134 If any of the extraction expressions is volatile,
4135 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4137 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4138 is a mode that can be used to access the field. In that case, *PBITSIZE
4141 If the field describes a variable-sized object, *PMODE is set to
4142 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
4143 this case, but the address of the object can be found. */
4146 get_inner_reference (exp
, pbitsize
, pbitpos
, poffset
, pmode
,
4147 punsignedp
, pvolatilep
, palignment
)
4152 enum machine_mode
*pmode
;
4157 tree orig_exp
= exp
;
4159 enum machine_mode mode
= VOIDmode
;
4160 tree offset
= integer_zero_node
;
4161 int alignment
= BIGGEST_ALIGNMENT
;
4163 if (TREE_CODE (exp
) == COMPONENT_REF
)
4165 size_tree
= DECL_SIZE (TREE_OPERAND (exp
, 1));
4166 if (! DECL_BIT_FIELD (TREE_OPERAND (exp
, 1)))
4167 mode
= DECL_MODE (TREE_OPERAND (exp
, 1));
4168 *punsignedp
= TREE_UNSIGNED (TREE_OPERAND (exp
, 1));
4170 else if (TREE_CODE (exp
) == BIT_FIELD_REF
)
4172 size_tree
= TREE_OPERAND (exp
, 1);
4173 *punsignedp
= TREE_UNSIGNED (exp
);
4177 mode
= TYPE_MODE (TREE_TYPE (exp
));
4178 *pbitsize
= GET_MODE_BITSIZE (mode
);
4179 *punsignedp
= TREE_UNSIGNED (TREE_TYPE (exp
));
4184 if (TREE_CODE (size_tree
) != INTEGER_CST
)
4185 mode
= BLKmode
, *pbitsize
= -1;
4187 *pbitsize
= TREE_INT_CST_LOW (size_tree
);
4190 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4191 and find the ultimate containing object. */
4197 if (TREE_CODE (exp
) == COMPONENT_REF
|| TREE_CODE (exp
) == BIT_FIELD_REF
)
4199 tree pos
= (TREE_CODE (exp
) == COMPONENT_REF
4200 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp
, 1))
4201 : TREE_OPERAND (exp
, 2));
4202 tree constant
= integer_zero_node
, var
= pos
;
4204 /* If this field hasn't been filled in yet, don't go
4205 past it. This should only happen when folding expressions
4206 made during type construction. */
4210 /* Assume here that the offset is a multiple of a unit.
4211 If not, there should be an explicitly added constant. */
4212 if (TREE_CODE (pos
) == PLUS_EXPR
4213 && TREE_CODE (TREE_OPERAND (pos
, 1)) == INTEGER_CST
)
4214 constant
= TREE_OPERAND (pos
, 1), var
= TREE_OPERAND (pos
, 0);
4215 else if (TREE_CODE (pos
) == INTEGER_CST
)
4216 constant
= pos
, var
= integer_zero_node
;
4218 *pbitpos
+= TREE_INT_CST_LOW (constant
);
4219 offset
= size_binop (PLUS_EXPR
, offset
,
4220 size_binop (EXACT_DIV_EXPR
, var
,
4221 size_int (BITS_PER_UNIT
)));
4224 else if (TREE_CODE (exp
) == ARRAY_REF
)
4226 /* This code is based on the code in case ARRAY_REF in expand_expr
4227 below. We assume here that the size of an array element is
4228 always an integral multiple of BITS_PER_UNIT. */
4230 tree index
= TREE_OPERAND (exp
, 1);
4231 tree domain
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp
, 0)));
4233 = domain
? TYPE_MIN_VALUE (domain
) : integer_zero_node
;
4234 tree index_type
= TREE_TYPE (index
);
4236 if (! integer_zerop (low_bound
))
4237 index
= fold (build (MINUS_EXPR
, index_type
, index
, low_bound
));
4239 if (TYPE_PRECISION (index_type
) != TYPE_PRECISION (sizetype
))
4241 index
= convert (type_for_size (TYPE_PRECISION (sizetype
), 0),
4243 index_type
= TREE_TYPE (index
);
4246 index
= fold (build (MULT_EXPR
, index_type
, index
,
4247 convert (index_type
,
4248 TYPE_SIZE (TREE_TYPE (exp
)))));
4250 if (TREE_CODE (index
) == INTEGER_CST
4251 && TREE_INT_CST_HIGH (index
) == 0)
4252 *pbitpos
+= TREE_INT_CST_LOW (index
);
4254 offset
= size_binop (PLUS_EXPR
, offset
,
4255 size_binop (FLOOR_DIV_EXPR
, index
,
4256 size_int (BITS_PER_UNIT
)));
4258 else if (TREE_CODE (exp
) != NON_LVALUE_EXPR
4259 && ! ((TREE_CODE (exp
) == NOP_EXPR
4260 || TREE_CODE (exp
) == CONVERT_EXPR
)
4261 && ! (TREE_CODE (TREE_TYPE (exp
)) == UNION_TYPE
4262 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))
4264 && (TYPE_MODE (TREE_TYPE (exp
))
4265 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))))
4268 /* If any reference in the chain is volatile, the effect is volatile. */
4269 if (TREE_THIS_VOLATILE (exp
))
4272 /* If the offset is non-constant already, then we can't assume any
4273 alignment more than the alignment here. */
4274 if (! integer_zerop (offset
))
4275 alignment
= MIN (alignment
, TYPE_ALIGN (TREE_TYPE (exp
)));
4277 exp
= TREE_OPERAND (exp
, 0);
4280 if (TREE_CODE_CLASS (TREE_CODE (exp
)) == 'd')
4281 alignment
= MIN (alignment
, DECL_ALIGN (exp
));
4282 else if (TREE_TYPE (exp
) != 0)
4283 alignment
= MIN (alignment
, TYPE_ALIGN (TREE_TYPE (exp
)));
4285 if (integer_zerop (offset
))
4288 if (offset
!= 0 && contains_placeholder_p (offset
))
4289 offset
= build (WITH_RECORD_EXPR
, sizetype
, offset
, orig_exp
);
4293 *palignment
= alignment
/ BITS_PER_UNIT
;
4297 /* Given an rtx VALUE that may contain additions and multiplications,
4298 return an equivalent value that just refers to a register or memory.
4299 This is done by generating instructions to perform the arithmetic
4300 and returning a pseudo-register containing the value.
4302 The returned value may be a REG, SUBREG, MEM or constant. */
4305 force_operand (value
, target
)
4308 register optab binoptab
= 0;
4309 /* Use a temporary to force order of execution of calls to
4313 /* Use subtarget as the target for operand 0 of a binary operation. */
4314 register rtx subtarget
= (target
!= 0 && GET_CODE (target
) == REG
? target
: 0);
4316 if (GET_CODE (value
) == PLUS
)
4317 binoptab
= add_optab
;
4318 else if (GET_CODE (value
) == MINUS
)
4319 binoptab
= sub_optab
;
4320 else if (GET_CODE (value
) == MULT
)
4322 op2
= XEXP (value
, 1);
4323 if (!CONSTANT_P (op2
)
4324 && !(GET_CODE (op2
) == REG
&& op2
!= subtarget
))
4326 tmp
= force_operand (XEXP (value
, 0), subtarget
);
4327 return expand_mult (GET_MODE (value
), tmp
,
4328 force_operand (op2
, NULL_RTX
),
4334 op2
= XEXP (value
, 1);
4335 if (!CONSTANT_P (op2
)
4336 && !(GET_CODE (op2
) == REG
&& op2
!= subtarget
))
4338 if (binoptab
== sub_optab
&& GET_CODE (op2
) == CONST_INT
)
4340 binoptab
= add_optab
;
4341 op2
= negate_rtx (GET_MODE (value
), op2
);
4344 /* Check for an addition with OP2 a constant integer and our first
4345 operand a PLUS of a virtual register and something else. In that
4346 case, we want to emit the sum of the virtual register and the
4347 constant first and then add the other value. This allows virtual
4348 register instantiation to simply modify the constant rather than
4349 creating another one around this addition. */
4350 if (binoptab
== add_optab
&& GET_CODE (op2
) == CONST_INT
4351 && GET_CODE (XEXP (value
, 0)) == PLUS
4352 && GET_CODE (XEXP (XEXP (value
, 0), 0)) == REG
4353 && REGNO (XEXP (XEXP (value
, 0), 0)) >= FIRST_VIRTUAL_REGISTER
4354 && REGNO (XEXP (XEXP (value
, 0), 0)) <= LAST_VIRTUAL_REGISTER
)
4356 rtx temp
= expand_binop (GET_MODE (value
), binoptab
,
4357 XEXP (XEXP (value
, 0), 0), op2
,
4358 subtarget
, 0, OPTAB_LIB_WIDEN
);
4359 return expand_binop (GET_MODE (value
), binoptab
, temp
,
4360 force_operand (XEXP (XEXP (value
, 0), 1), 0),
4361 target
, 0, OPTAB_LIB_WIDEN
);
4364 tmp
= force_operand (XEXP (value
, 0), subtarget
);
4365 return expand_binop (GET_MODE (value
), binoptab
, tmp
,
4366 force_operand (op2
, NULL_RTX
),
4367 target
, 0, OPTAB_LIB_WIDEN
);
4368 /* We give UNSIGNEDP = 0 to expand_binop
4369 because the only operations we are expanding here are signed ones. */
4374 /* Subroutine of expand_expr:
4375 save the non-copied parts (LIST) of an expr (LHS), and return a list
4376 which can restore these values to their previous values,
4377 should something modify their storage. */
4380 save_noncopied_parts (lhs
, list
)
4387 for (tail
= list
; tail
; tail
= TREE_CHAIN (tail
))
4388 if (TREE_CODE (TREE_VALUE (tail
)) == TREE_LIST
)
4389 parts
= chainon (parts
, save_noncopied_parts (lhs
, TREE_VALUE (tail
)));
4392 tree part
= TREE_VALUE (tail
);
4393 tree part_type
= TREE_TYPE (part
);
4394 tree to_be_saved
= build (COMPONENT_REF
, part_type
, lhs
, part
);
4395 rtx target
= assign_temp (part_type
, 0, 1, 1);
4396 if (! memory_address_p (TYPE_MODE (part_type
), XEXP (target
, 0)))
4397 target
= change_address (target
, TYPE_MODE (part_type
), NULL_RTX
);
4398 parts
= tree_cons (to_be_saved
,
4399 build (RTL_EXPR
, part_type
, NULL_TREE
,
4402 store_expr (TREE_PURPOSE (parts
), RTL_EXPR_RTL (TREE_VALUE (parts
)), 0);
4407 /* Subroutine of expand_expr:
4408 record the non-copied parts (LIST) of an expr (LHS), and return a list
4409 which specifies the initial values of these parts. */
4412 init_noncopied_parts (lhs
, list
)
4419 for (tail
= list
; tail
; tail
= TREE_CHAIN (tail
))
4420 if (TREE_CODE (TREE_VALUE (tail
)) == TREE_LIST
)
4421 parts
= chainon (parts
, init_noncopied_parts (lhs
, TREE_VALUE (tail
)));
4424 tree part
= TREE_VALUE (tail
);
4425 tree part_type
= TREE_TYPE (part
);
4426 tree to_be_initialized
= build (COMPONENT_REF
, part_type
, lhs
, part
);
4427 parts
= tree_cons (TREE_PURPOSE (tail
), to_be_initialized
, parts
);
4432 /* Subroutine of expand_expr: return nonzero iff there is no way that
4433 EXP can reference X, which is being modified. */
4436 safe_from_p (x
, exp
)
4444 /* If EXP has varying size, we MUST use a target since we currently
4445 have no way of allocating temporaries of variable size
4446 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
4447 So we assume here that something at a higher level has prevented a
4448 clash. This is somewhat bogus, but the best we can do. Only
4449 do this when X is BLKmode. */
4450 || (TREE_TYPE (exp
) != 0 && TYPE_SIZE (TREE_TYPE (exp
)) != 0
4451 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
4452 && (TREE_CODE (TREE_TYPE (exp
)) != ARRAY_TYPE
4453 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)) == NULL_TREE
4454 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)))
4456 && GET_MODE (x
) == BLKmode
))
4459 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
4460 find the underlying pseudo. */
4461 if (GET_CODE (x
) == SUBREG
)
4464 if (GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
4468 /* If X is a location in the outgoing argument area, it is always safe. */
4469 if (GET_CODE (x
) == MEM
4470 && (XEXP (x
, 0) == virtual_outgoing_args_rtx
4471 || (GET_CODE (XEXP (x
, 0)) == PLUS
4472 && XEXP (XEXP (x
, 0), 0) == virtual_outgoing_args_rtx
)))
4475 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
4478 exp_rtl
= DECL_RTL (exp
);
4485 if (TREE_CODE (exp
) == TREE_LIST
)
4486 return ((TREE_VALUE (exp
) == 0
4487 || safe_from_p (x
, TREE_VALUE (exp
)))
4488 && (TREE_CHAIN (exp
) == 0
4489 || safe_from_p (x
, TREE_CHAIN (exp
))));
4494 return safe_from_p (x
, TREE_OPERAND (exp
, 0));
4498 return (safe_from_p (x
, TREE_OPERAND (exp
, 0))
4499 && safe_from_p (x
, TREE_OPERAND (exp
, 1)));
4503 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
4504 the expression. If it is set, we conflict iff we are that rtx or
4505 both are in memory. Otherwise, we check all operands of the
4506 expression recursively. */
4508 switch (TREE_CODE (exp
))
4511 return (staticp (TREE_OPERAND (exp
, 0))
4512 || safe_from_p (x
, TREE_OPERAND (exp
, 0)));
4515 if (GET_CODE (x
) == MEM
)
4520 exp_rtl
= CALL_EXPR_RTL (exp
);
4523 /* Assume that the call will clobber all hard registers and
4525 if ((GET_CODE (x
) == REG
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
)
4526 || GET_CODE (x
) == MEM
)
4533 /* If a sequence exists, we would have to scan every instruction
4534 in the sequence to see if it was safe. This is probably not
4536 if (RTL_EXPR_SEQUENCE (exp
))
4539 exp_rtl
= RTL_EXPR_RTL (exp
);
4542 case WITH_CLEANUP_EXPR
:
4543 exp_rtl
= RTL_EXPR_RTL (exp
);
4546 case CLEANUP_POINT_EXPR
:
4547 return safe_from_p (x
, TREE_OPERAND (exp
, 0));
4550 exp_rtl
= SAVE_EXPR_RTL (exp
);
4554 /* The only operand we look at is operand 1. The rest aren't
4555 part of the expression. */
4556 return safe_from_p (x
, TREE_OPERAND (exp
, 1));
4558 case METHOD_CALL_EXPR
:
4559 /* This takes a rtx argument, but shouldn't appear here. */
4563 /* If we have an rtx, we do not need to scan our operands. */
4567 nops
= tree_code_length
[(int) TREE_CODE (exp
)];
4568 for (i
= 0; i
< nops
; i
++)
4569 if (TREE_OPERAND (exp
, i
) != 0
4570 && ! safe_from_p (x
, TREE_OPERAND (exp
, i
)))
4574 /* If we have an rtl, find any enclosed object. Then see if we conflict
4578 if (GET_CODE (exp_rtl
) == SUBREG
)
4580 exp_rtl
= SUBREG_REG (exp_rtl
);
4581 if (GET_CODE (exp_rtl
) == REG
4582 && REGNO (exp_rtl
) < FIRST_PSEUDO_REGISTER
)
4586 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
4587 are memory and EXP is not readonly. */
4588 return ! (rtx_equal_p (x
, exp_rtl
)
4589 || (GET_CODE (x
) == MEM
&& GET_CODE (exp_rtl
) == MEM
4590 && ! TREE_READONLY (exp
)));
4593 /* If we reach here, it is safe. */
4597 /* Subroutine of expand_expr: return nonzero iff EXP is an
4598 expression whose type is statically determinable. */
4604 if (TREE_CODE (exp
) == PARM_DECL
4605 || TREE_CODE (exp
) == VAR_DECL
4606 || TREE_CODE (exp
) == CALL_EXPR
|| TREE_CODE (exp
) == TARGET_EXPR
4607 || TREE_CODE (exp
) == COMPONENT_REF
4608 || TREE_CODE (exp
) == ARRAY_REF
)
4613 /* Subroutine of expand_expr: return rtx if EXP is a
4614 variable or parameter; else return 0. */
4621 switch (TREE_CODE (exp
))
4625 return DECL_RTL (exp
);
4631 /* expand_expr: generate code for computing expression EXP.
4632 An rtx for the computed value is returned. The value is never null.
4633 In the case of a void EXP, const0_rtx is returned.
4635 The value may be stored in TARGET if TARGET is nonzero.
4636 TARGET is just a suggestion; callers must assume that
4637 the rtx returned may not be the same as TARGET.
4639 If TARGET is CONST0_RTX, it means that the value will be ignored.
4641 If TMODE is not VOIDmode, it suggests generating the
4642 result in mode TMODE. But this is done only when convenient.
4643 Otherwise, TMODE is ignored and the value generated in its natural mode.
4644 TMODE is just a suggestion; callers must assume that
4645 the rtx returned may not have mode TMODE.
4647 Note that TARGET may have neither TMODE nor MODE. In that case, it
4648 probably will not be used.
4650 If MODIFIER is EXPAND_SUM then when EXP is an addition
4651 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
4652 or a nest of (PLUS ...) and (MINUS ...) where the terms are
4653 products as above, or REG or MEM, or constant.
4654 Ordinarily in such cases we would output mul or add instructions
4655 and then return a pseudo reg containing the sum.
4657 EXPAND_INITIALIZER is much like EXPAND_SUM except that
4658 it also marks a label as absolutely required (it can't be dead).
4659 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
4660 This is used for outputting expressions used in initializers.
4662 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
4663 with a constant address even if that address is not normally legitimate.
4664 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
4667 expand_expr (exp
, target
, tmode
, modifier
)
4670 enum machine_mode tmode
;
4671 enum expand_modifier modifier
;
4673 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
4674 This is static so it will be accessible to our recursive callees. */
4675 static tree placeholder_list
= 0;
4676 register rtx op0
, op1
, temp
;
4677 tree type
= TREE_TYPE (exp
);
4678 int unsignedp
= TREE_UNSIGNED (type
);
4679 register enum machine_mode mode
= TYPE_MODE (type
);
4680 register enum tree_code code
= TREE_CODE (exp
);
4682 /* Use subtarget as the target for operand 0 of a binary operation. */
4683 rtx subtarget
= (target
!= 0 && GET_CODE (target
) == REG
? target
: 0);
4684 rtx original_target
= target
;
4685 /* Maybe defer this until sure not doing bytecode? */
4686 int ignore
= (target
== const0_rtx
4687 || ((code
== NON_LVALUE_EXPR
|| code
== NOP_EXPR
4688 || code
== CONVERT_EXPR
|| code
== REFERENCE_EXPR
4689 || code
== COND_EXPR
)
4690 && TREE_CODE (type
) == VOID_TYPE
));
4694 if (output_bytecode
&& modifier
!= EXPAND_INITIALIZER
)
4696 bc_expand_expr (exp
);
4700 /* Don't use hard regs as subtargets, because the combiner
4701 can only handle pseudo regs. */
4702 if (subtarget
&& REGNO (subtarget
) < FIRST_PSEUDO_REGISTER
)
4704 /* Avoid subtargets inside loops,
4705 since they hide some invariant expressions. */
4706 if (preserve_subexpressions_p ())
4709 /* If we are going to ignore this result, we need only do something
4710 if there is a side-effect somewhere in the expression. If there
4711 is, short-circuit the most common cases here. Note that we must
4712 not call expand_expr with anything but const0_rtx in case this
4713 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
4717 if (! TREE_SIDE_EFFECTS (exp
))
4720 /* Ensure we reference a volatile object even if value is ignored. */
4721 if (TREE_THIS_VOLATILE (exp
)
4722 && TREE_CODE (exp
) != FUNCTION_DECL
4723 && mode
!= VOIDmode
&& mode
!= BLKmode
)
4725 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, modifier
);
4726 if (GET_CODE (temp
) == MEM
)
4727 temp
= copy_to_reg (temp
);
4731 if (TREE_CODE_CLASS (code
) == '1')
4732 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
,
4733 VOIDmode
, modifier
);
4734 else if (TREE_CODE_CLASS (code
) == '2'
4735 || TREE_CODE_CLASS (code
) == '<')
4737 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, modifier
);
4738 expand_expr (TREE_OPERAND (exp
, 1), const0_rtx
, VOIDmode
, modifier
);
4741 else if ((code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
)
4742 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 1)))
4743 /* If the second operand has no side effects, just evaluate
4745 return expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
,
4746 VOIDmode
, modifier
);
4751 /* If will do cse, generate all results into pseudo registers
4752 since 1) that allows cse to find more things
4753 and 2) otherwise cse could produce an insn the machine
4756 if (! cse_not_expected
&& mode
!= BLKmode
&& target
4757 && (GET_CODE (target
) != REG
|| REGNO (target
) < FIRST_PSEUDO_REGISTER
))
4764 tree function
= decl_function_context (exp
);
4765 /* Handle using a label in a containing function. */
4766 if (function
!= current_function_decl
&& function
!= 0)
4768 struct function
*p
= find_function_data (function
);
4769 /* Allocate in the memory associated with the function
4770 that the label is in. */
4771 push_obstacks (p
->function_obstack
,
4772 p
->function_maybepermanent_obstack
);
4774 p
->forced_labels
= gen_rtx (EXPR_LIST
, VOIDmode
,
4775 label_rtx (exp
), p
->forced_labels
);
4778 else if (modifier
== EXPAND_INITIALIZER
)
4779 forced_labels
= gen_rtx (EXPR_LIST
, VOIDmode
,
4780 label_rtx (exp
), forced_labels
);
4781 temp
= gen_rtx (MEM
, FUNCTION_MODE
,
4782 gen_rtx (LABEL_REF
, Pmode
, label_rtx (exp
)));
4783 if (function
!= current_function_decl
&& function
!= 0)
4784 LABEL_REF_NONLOCAL_P (XEXP (temp
, 0)) = 1;
4789 if (DECL_RTL (exp
) == 0)
4791 error_with_decl (exp
, "prior parameter's size depends on `%s'");
4792 return CONST0_RTX (mode
);
4795 /* ... fall through ... */
4798 /* If a static var's type was incomplete when the decl was written,
4799 but the type is complete now, lay out the decl now. */
4800 if (DECL_SIZE (exp
) == 0 && TYPE_SIZE (TREE_TYPE (exp
)) != 0
4801 && (TREE_STATIC (exp
) || DECL_EXTERNAL (exp
)))
4803 push_obstacks_nochange ();
4804 end_temporary_allocation ();
4805 layout_decl (exp
, 0);
4806 PUT_MODE (DECL_RTL (exp
), DECL_MODE (exp
));
4810 /* ... fall through ... */
4814 if (DECL_RTL (exp
) == 0)
4817 /* Ensure variable marked as used even if it doesn't go through
4818 a parser. If it hasn't be used yet, write out an external
4820 if (! TREE_USED (exp
))
4822 assemble_external (exp
);
4823 TREE_USED (exp
) = 1;
4826 /* Show we haven't gotten RTL for this yet. */
4829 /* Handle variables inherited from containing functions. */
4830 context
= decl_function_context (exp
);
4832 /* We treat inline_function_decl as an alias for the current function
4833 because that is the inline function whose vars, types, etc.
4834 are being merged into the current function.
4835 See expand_inline_function. */
4837 if (context
!= 0 && context
!= current_function_decl
4838 && context
!= inline_function_decl
4839 /* If var is static, we don't need a static chain to access it. */
4840 && ! (GET_CODE (DECL_RTL (exp
)) == MEM
4841 && CONSTANT_P (XEXP (DECL_RTL (exp
), 0))))
4845 /* Mark as non-local and addressable. */
4846 DECL_NONLOCAL (exp
) = 1;
4847 if (DECL_NO_STATIC_CHAIN (current_function_decl
))
4849 mark_addressable (exp
);
4850 if (GET_CODE (DECL_RTL (exp
)) != MEM
)
4852 addr
= XEXP (DECL_RTL (exp
), 0);
4853 if (GET_CODE (addr
) == MEM
)
4854 addr
= gen_rtx (MEM
, Pmode
,
4855 fix_lexical_addr (XEXP (addr
, 0), exp
));
4857 addr
= fix_lexical_addr (addr
, exp
);
4858 temp
= change_address (DECL_RTL (exp
), mode
, addr
);
4861 /* This is the case of an array whose size is to be determined
4862 from its initializer, while the initializer is still being parsed.
4865 else if (GET_CODE (DECL_RTL (exp
)) == MEM
4866 && GET_CODE (XEXP (DECL_RTL (exp
), 0)) == REG
)
4867 temp
= change_address (DECL_RTL (exp
), GET_MODE (DECL_RTL (exp
)),
4868 XEXP (DECL_RTL (exp
), 0));
4870 /* If DECL_RTL is memory, we are in the normal case and either
4871 the address is not valid or it is not a register and -fforce-addr
4872 is specified, get the address into a register. */
4874 else if (GET_CODE (DECL_RTL (exp
)) == MEM
4875 && modifier
!= EXPAND_CONST_ADDRESS
4876 && modifier
!= EXPAND_SUM
4877 && modifier
!= EXPAND_INITIALIZER
4878 && (! memory_address_p (DECL_MODE (exp
),
4879 XEXP (DECL_RTL (exp
), 0))
4881 && GET_CODE (XEXP (DECL_RTL (exp
), 0)) != REG
)))
4882 temp
= change_address (DECL_RTL (exp
), VOIDmode
,
4883 copy_rtx (XEXP (DECL_RTL (exp
), 0)));
4885 /* If we got something, return it. But first, set the alignment
4886 the address is a register. */
4889 if (GET_CODE (temp
) == MEM
&& GET_CODE (XEXP (temp
, 0)) == REG
)
4890 mark_reg_pointer (XEXP (temp
, 0),
4891 DECL_ALIGN (exp
) / BITS_PER_UNIT
);
4896 /* If the mode of DECL_RTL does not match that of the decl, it
4897 must be a promoted value. We return a SUBREG of the wanted mode,
4898 but mark it so that we know that it was already extended. */
4900 if (GET_CODE (DECL_RTL (exp
)) == REG
4901 && GET_MODE (DECL_RTL (exp
)) != mode
)
4903 /* Get the signedness used for this variable. Ensure we get the
4904 same mode we got when the variable was declared. */
4905 if (GET_MODE (DECL_RTL (exp
))
4906 != promote_mode (type
, DECL_MODE (exp
), &unsignedp
, 0))
4909 temp
= gen_rtx (SUBREG
, mode
, DECL_RTL (exp
), 0);
4910 SUBREG_PROMOTED_VAR_P (temp
) = 1;
4911 SUBREG_PROMOTED_UNSIGNED_P (temp
) = unsignedp
;
4915 return DECL_RTL (exp
);
4918 return immed_double_const (TREE_INT_CST_LOW (exp
),
4919 TREE_INT_CST_HIGH (exp
),
4923 return expand_expr (DECL_INITIAL (exp
), target
, VOIDmode
, 0);
4926 /* If optimized, generate immediate CONST_DOUBLE
4927 which will be turned into memory by reload if necessary.
4929 We used to force a register so that loop.c could see it. But
4930 this does not allow gen_* patterns to perform optimizations with
4931 the constants. It also produces two insns in cases like "x = 1.0;".
4932 On most machines, floating-point constants are not permitted in
4933 many insns, so we'd end up copying it to a register in any case.
4935 Now, we do the copying in expand_binop, if appropriate. */
4936 return immed_real_const (exp
);
4940 if (! TREE_CST_RTL (exp
))
4941 output_constant_def (exp
);
4943 /* TREE_CST_RTL probably contains a constant address.
4944 On RISC machines where a constant address isn't valid,
4945 make some insns to get that address into a register. */
4946 if (GET_CODE (TREE_CST_RTL (exp
)) == MEM
4947 && modifier
!= EXPAND_CONST_ADDRESS
4948 && modifier
!= EXPAND_INITIALIZER
4949 && modifier
!= EXPAND_SUM
4950 && (! memory_address_p (mode
, XEXP (TREE_CST_RTL (exp
), 0))
4952 && GET_CODE (XEXP (TREE_CST_RTL (exp
), 0)) != REG
)))
4953 return change_address (TREE_CST_RTL (exp
), VOIDmode
,
4954 copy_rtx (XEXP (TREE_CST_RTL (exp
), 0)));
4955 return TREE_CST_RTL (exp
);
4958 context
= decl_function_context (exp
);
4960 /* We treat inline_function_decl as an alias for the current function
4961 because that is the inline function whose vars, types, etc.
4962 are being merged into the current function.
4963 See expand_inline_function. */
4964 if (context
== current_function_decl
|| context
== inline_function_decl
)
4967 /* If this is non-local, handle it. */
4970 temp
= SAVE_EXPR_RTL (exp
);
4971 if (temp
&& GET_CODE (temp
) == REG
)
4973 put_var_into_stack (exp
);
4974 temp
= SAVE_EXPR_RTL (exp
);
4976 if (temp
== 0 || GET_CODE (temp
) != MEM
)
4978 return change_address (temp
, mode
,
4979 fix_lexical_addr (XEXP (temp
, 0), exp
));
4981 if (SAVE_EXPR_RTL (exp
) == 0)
4983 if (mode
== VOIDmode
)
4986 temp
= assign_temp (type
, 0, 0, 0);
4988 SAVE_EXPR_RTL (exp
) = temp
;
4989 if (!optimize
&& GET_CODE (temp
) == REG
)
4990 save_expr_regs
= gen_rtx (EXPR_LIST
, VOIDmode
, temp
,
4993 /* If the mode of TEMP does not match that of the expression, it
4994 must be a promoted value. We pass store_expr a SUBREG of the
4995 wanted mode but mark it so that we know that it was already
4996 extended. Note that `unsignedp' was modified above in
4999 if (GET_CODE (temp
) == REG
&& GET_MODE (temp
) != mode
)
5001 temp
= gen_rtx (SUBREG
, mode
, SAVE_EXPR_RTL (exp
), 0);
5002 SUBREG_PROMOTED_VAR_P (temp
) = 1;
5003 SUBREG_PROMOTED_UNSIGNED_P (temp
) = unsignedp
;
5006 if (temp
== const0_rtx
)
5007 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
5009 store_expr (TREE_OPERAND (exp
, 0), temp
, 0);
5012 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
5013 must be a promoted value. We return a SUBREG of the wanted mode,
5014 but mark it so that we know that it was already extended. */
5016 if (GET_CODE (SAVE_EXPR_RTL (exp
)) == REG
5017 && GET_MODE (SAVE_EXPR_RTL (exp
)) != mode
)
5019 /* Compute the signedness and make the proper SUBREG. */
5020 promote_mode (type
, mode
, &unsignedp
, 0);
5021 temp
= gen_rtx (SUBREG
, mode
, SAVE_EXPR_RTL (exp
), 0);
5022 SUBREG_PROMOTED_VAR_P (temp
) = 1;
5023 SUBREG_PROMOTED_UNSIGNED_P (temp
) = unsignedp
;
5027 return SAVE_EXPR_RTL (exp
);
5032 temp
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
5033 TREE_OPERAND (exp
, 0) = unsave_expr_now (TREE_OPERAND (exp
, 0));
5037 case PLACEHOLDER_EXPR
:
5038 /* If there is an object on the head of the placeholder list,
5039 see if some object in it's references is of type TYPE. For
5040 further information, see tree.def. */
5041 if (placeholder_list
)
5043 tree need_type
= TYPE_MAIN_VARIANT (type
);
5045 tree old_list
= placeholder_list
;
5048 /* See if the object is the type that we want. Then see if
5049 the operand of any reference is the type we want. */
5050 if ((TYPE_MAIN_VARIANT (TREE_TYPE (TREE_PURPOSE (placeholder_list
)))
5052 object
= TREE_PURPOSE (placeholder_list
);
5054 /* Find the innermost reference that is of the type we want. */
5055 for (elt
= TREE_PURPOSE (placeholder_list
);
5057 && (TREE_CODE_CLASS (TREE_CODE (elt
)) == 'r'
5058 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '1'
5059 || TREE_CODE_CLASS (TREE_CODE (elt
)) == '2'
5060 || TREE_CODE_CLASS (TREE_CODE (elt
)) == 'e');
5061 elt
= ((TREE_CODE (elt
) == COMPOUND_EXPR
5062 || TREE_CODE (elt
) == COND_EXPR
)
5063 ? TREE_OPERAND (elt
, 1) : TREE_OPERAND (elt
, 0)))
5064 if (TREE_CODE_CLASS (TREE_CODE (elt
)) == 'r'
5065 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (elt
, 0)))
5067 object
= TREE_OPERAND (elt
, 0);
5071 /* Expand this object skipping the list entries before
5072 it was found in case it is also a PLACEHOLDER_EXPR.
5073 In that case, we want to translate it using subsequent
5075 placeholder_list
= TREE_CHAIN (placeholder_list
);
5076 temp
= expand_expr (object
, original_target
, tmode
, modifier
);
5077 placeholder_list
= old_list
;
5082 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
5085 case WITH_RECORD_EXPR
:
5086 /* Put the object on the placeholder list, expand our first operand,
5087 and pop the list. */
5088 placeholder_list
= tree_cons (TREE_OPERAND (exp
, 1), NULL_TREE
,
5090 target
= expand_expr (TREE_OPERAND (exp
, 0), original_target
,
5092 placeholder_list
= TREE_CHAIN (placeholder_list
);
5096 expand_exit_loop_if_false (NULL_PTR
,
5097 invert_truthvalue (TREE_OPERAND (exp
, 0)));
5102 expand_start_loop (1);
5103 expand_expr_stmt (TREE_OPERAND (exp
, 0));
5111 tree vars
= TREE_OPERAND (exp
, 0);
5112 int vars_need_expansion
= 0;
5114 /* Need to open a binding contour here because
5115 if there are any cleanups they must be contained here. */
5116 expand_start_bindings (0);
5118 /* Mark the corresponding BLOCK for output in its proper place. */
5119 if (TREE_OPERAND (exp
, 2) != 0
5120 && ! TREE_USED (TREE_OPERAND (exp
, 2)))
5121 insert_block (TREE_OPERAND (exp
, 2));
5123 /* If VARS have not yet been expanded, expand them now. */
5126 if (DECL_RTL (vars
) == 0)
5128 vars_need_expansion
= 1;
5131 expand_decl_init (vars
);
5132 vars
= TREE_CHAIN (vars
);
5135 temp
= expand_expr (TREE_OPERAND (exp
, 1), target
, tmode
, modifier
);
5137 expand_end_bindings (TREE_OPERAND (exp
, 0), 0, 0);
5143 if (RTL_EXPR_SEQUENCE (exp
))
5145 if (RTL_EXPR_SEQUENCE (exp
) == const0_rtx
)
5147 emit_insns (RTL_EXPR_SEQUENCE (exp
));
5148 RTL_EXPR_SEQUENCE (exp
) = const0_rtx
;
5150 preserve_rtl_expr_result (RTL_EXPR_RTL (exp
));
5151 free_temps_for_rtl_expr (exp
);
5152 return RTL_EXPR_RTL (exp
);
5155 /* If we don't need the result, just ensure we evaluate any
5160 for (elt
= CONSTRUCTOR_ELTS (exp
); elt
; elt
= TREE_CHAIN (elt
))
5161 expand_expr (TREE_VALUE (elt
), const0_rtx
, VOIDmode
, 0);
5165 /* All elts simple constants => refer to a constant in memory. But
5166 if this is a non-BLKmode mode, let it store a field at a time
5167 since that should make a CONST_INT or CONST_DOUBLE when we
5168 fold. Likewise, if we have a target we can use, it is best to
5169 store directly into the target unless the type is large enough
5170 that memcpy will be used. If we are making an initializer and
5171 all operands are constant, put it in memory as well. */
5172 else if ((TREE_STATIC (exp
)
5173 && ((mode
== BLKmode
5174 && ! (target
!= 0 && safe_from_p (target
, exp
)))
5175 || TREE_ADDRESSABLE (exp
)
5176 || (TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
5177 && (move_by_pieces_ninsns
5178 (TREE_INT_CST_LOW (TYPE_SIZE (type
))/BITS_PER_UNIT
,
5179 TYPE_ALIGN (type
) / BITS_PER_UNIT
)
5181 && ! mostly_zeros_p (exp
))))
5182 || (modifier
== EXPAND_INITIALIZER
&& TREE_CONSTANT (exp
)))
5184 rtx constructor
= output_constant_def (exp
);
5185 if (modifier
!= EXPAND_CONST_ADDRESS
5186 && modifier
!= EXPAND_INITIALIZER
5187 && modifier
!= EXPAND_SUM
5188 && (! memory_address_p (GET_MODE (constructor
),
5189 XEXP (constructor
, 0))
5191 && GET_CODE (XEXP (constructor
, 0)) != REG
)))
5192 constructor
= change_address (constructor
, VOIDmode
,
5193 XEXP (constructor
, 0));
5199 /* Handle calls that pass values in multiple non-contiguous
5200 locations. The Irix 6 ABI has examples of this. */
5201 if (target
== 0 || ! safe_from_p (target
, exp
)
5202 || GET_CODE (target
) == PARALLEL
)
5204 if (mode
!= BLKmode
&& ! TREE_ADDRESSABLE (exp
))
5205 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
5207 target
= assign_temp (type
, 0, 1, 1);
5210 if (TREE_READONLY (exp
))
5212 if (GET_CODE (target
) == MEM
)
5213 target
= copy_rtx (target
);
5215 RTX_UNCHANGING_P (target
) = 1;
5218 store_constructor (exp
, target
, 0);
5224 tree exp1
= TREE_OPERAND (exp
, 0);
5227 op0
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
5228 op0
= memory_address (mode
, op0
);
5230 temp
= gen_rtx (MEM
, mode
, op0
);
5231 /* If address was computed by addition,
5232 mark this as an element of an aggregate. */
5233 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == PLUS_EXPR
5234 || (TREE_CODE (TREE_OPERAND (exp
, 0)) == SAVE_EXPR
5235 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)) == PLUS_EXPR
)
5236 || AGGREGATE_TYPE_P (TREE_TYPE (exp
))
5237 || (TREE_CODE (exp1
) == ADDR_EXPR
5238 && (exp2
= TREE_OPERAND (exp1
, 0))
5239 && AGGREGATE_TYPE_P (TREE_TYPE (exp2
))))
5240 MEM_IN_STRUCT_P (temp
) = 1;
5241 MEM_VOLATILE_P (temp
) = TREE_THIS_VOLATILE (exp
) | flag_volatile
;
5243 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
5244 here, because, in C and C++, the fact that a location is accessed
5245 through a pointer to const does not mean that the value there can
5246 never change. Languages where it can never change should
5247 also set TREE_STATIC. */
5248 RTX_UNCHANGING_P (temp
) = TREE_READONLY (exp
) & TREE_STATIC (exp
);
5253 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) != ARRAY_TYPE
)
5257 tree array
= TREE_OPERAND (exp
, 0);
5258 tree domain
= TYPE_DOMAIN (TREE_TYPE (array
));
5259 tree low_bound
= domain
? TYPE_MIN_VALUE (domain
) : integer_zero_node
;
5260 tree index
= TREE_OPERAND (exp
, 1);
5261 tree index_type
= TREE_TYPE (index
);
5264 if (TREE_CODE (low_bound
) != INTEGER_CST
5265 && contains_placeholder_p (low_bound
))
5266 low_bound
= build (WITH_RECORD_EXPR
, sizetype
, low_bound
, exp
);
5268 /* Optimize the special-case of a zero lower bound.
5270 We convert the low_bound to sizetype to avoid some problems
5271 with constant folding. (E.g. suppose the lower bound is 1,
5272 and its mode is QI. Without the conversion, (ARRAY
5273 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
5274 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
5276 But sizetype isn't quite right either (especially if
5277 the lowbound is negative). FIXME */
5279 if (! integer_zerop (low_bound
))
5280 index
= fold (build (MINUS_EXPR
, index_type
, index
,
5281 convert (sizetype
, low_bound
)));
5283 if ((TREE_CODE (index
) != INTEGER_CST
5284 || TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
)
5285 && (! SLOW_UNALIGNED_ACCESS
|| ! get_inner_unaligned_p (exp
)))
5287 /* Nonconstant array index or nonconstant element size, and
5288 not an array in an unaligned (packed) structure field.
5289 Generate the tree for *(&array+index) and expand that,
5290 except do it in a language-independent way
5291 and don't complain about non-lvalue arrays.
5292 `mark_addressable' should already have been called
5293 for any array for which this case will be reached. */
5295 /* Don't forget the const or volatile flag from the array
5297 tree variant_type
= build_type_variant (type
,
5298 TREE_READONLY (exp
),
5299 TREE_THIS_VOLATILE (exp
));
5300 tree array_adr
= build1 (ADDR_EXPR
,
5301 build_pointer_type (variant_type
), array
);
5303 tree size
= size_in_bytes (type
);
5305 /* Convert the integer argument to a type the same size as sizetype
5306 so the multiply won't overflow spuriously. */
5307 if (TYPE_PRECISION (index_type
) != TYPE_PRECISION (sizetype
))
5308 index
= convert (type_for_size (TYPE_PRECISION (sizetype
), 0),
5311 if (TREE_CODE (size
) != INTEGER_CST
5312 && contains_placeholder_p (size
))
5313 size
= build (WITH_RECORD_EXPR
, sizetype
, size
, exp
);
5315 /* Don't think the address has side effects
5316 just because the array does.
5317 (In some cases the address might have side effects,
5318 and we fail to record that fact here. However, it should not
5319 matter, since expand_expr should not care.) */
5320 TREE_SIDE_EFFECTS (array_adr
) = 0;
5324 (INDIRECT_REF
, type
,
5325 fold (build (PLUS_EXPR
,
5326 TYPE_POINTER_TO (variant_type
),
5331 TYPE_POINTER_TO (variant_type
),
5332 fold (build (MULT_EXPR
, TREE_TYPE (index
),
5334 convert (TREE_TYPE (index
),
5337 /* Volatility, etc., of new expression is same as old
5339 TREE_SIDE_EFFECTS (elt
) = TREE_SIDE_EFFECTS (exp
);
5340 TREE_THIS_VOLATILE (elt
) = TREE_THIS_VOLATILE (exp
);
5341 TREE_READONLY (elt
) = TREE_READONLY (exp
);
5343 return expand_expr (elt
, target
, tmode
, modifier
);
5346 /* Fold an expression like: "foo"[2].
5347 This is not done in fold so it won't happen inside &.
5348 Don't fold if this is for wide characters since it's too
5349 difficult to do correctly and this is a very rare case. */
5351 if (TREE_CODE (array
) == STRING_CST
5352 && TREE_CODE (index
) == INTEGER_CST
5353 && !TREE_INT_CST_HIGH (index
)
5354 && (i
= TREE_INT_CST_LOW (index
)) < TREE_STRING_LENGTH (array
)
5355 && GET_MODE_CLASS (mode
) == MODE_INT
5356 && GET_MODE_SIZE (mode
) == 1)
5357 return GEN_INT (TREE_STRING_POINTER (array
)[i
]);
5359 /* If this is a constant index into a constant array,
5360 just get the value from the array. Handle both the cases when
5361 we have an explicit constructor and when our operand is a variable
5362 that was declared const. */
5364 if (TREE_CODE (array
) == CONSTRUCTOR
&& ! TREE_SIDE_EFFECTS (array
))
5366 if (TREE_CODE (index
) == INTEGER_CST
5367 && TREE_INT_CST_HIGH (index
) == 0)
5369 tree elem
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0));
5371 i
= TREE_INT_CST_LOW (index
);
5373 elem
= TREE_CHAIN (elem
);
5375 return expand_expr (fold (TREE_VALUE (elem
)), target
,
5380 else if (optimize
>= 1
5381 && TREE_READONLY (array
) && ! TREE_SIDE_EFFECTS (array
)
5382 && TREE_CODE (array
) == VAR_DECL
&& DECL_INITIAL (array
)
5383 && TREE_CODE (DECL_INITIAL (array
)) != ERROR_MARK
)
5385 if (TREE_CODE (index
) == INTEGER_CST
5386 && TREE_INT_CST_HIGH (index
) == 0)
5388 tree init
= DECL_INITIAL (array
);
5390 i
= TREE_INT_CST_LOW (index
);
5391 if (TREE_CODE (init
) == CONSTRUCTOR
)
5393 tree elem
= CONSTRUCTOR_ELTS (init
);
5396 && !tree_int_cst_equal (TREE_PURPOSE (elem
), index
))
5397 elem
= TREE_CHAIN (elem
);
5399 return expand_expr (fold (TREE_VALUE (elem
)), target
,
5402 else if (TREE_CODE (init
) == STRING_CST
5403 && i
< TREE_STRING_LENGTH (init
))
5404 return GEN_INT (TREE_STRING_POINTER (init
)[i
]);
5409 /* Treat array-ref with constant index as a component-ref. */
5413 /* If the operand is a CONSTRUCTOR, we can just extract the
5414 appropriate field if it is present. Don't do this if we have
5415 already written the data since we want to refer to that copy
5416 and varasm.c assumes that's what we'll do. */
5417 if (code
!= ARRAY_REF
5418 && TREE_CODE (TREE_OPERAND (exp
, 0)) == CONSTRUCTOR
5419 && TREE_CST_RTL (TREE_OPERAND (exp
, 0)) == 0)
5423 for (elt
= CONSTRUCTOR_ELTS (TREE_OPERAND (exp
, 0)); elt
;
5424 elt
= TREE_CHAIN (elt
))
5425 if (TREE_PURPOSE (elt
) == TREE_OPERAND (exp
, 1)
5426 /* We can normally use the value of the field in the
5427 CONSTRUCTOR. However, if this is a bitfield in
5428 an integral mode that we can fit in a HOST_WIDE_INT,
5429 we must mask only the number of bits in the bitfield,
5430 since this is done implicitly by the constructor. If
5431 the bitfield does not meet either of those conditions,
5432 we can't do this optimization. */
5433 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt
))
5434 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt
)))
5436 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt
)))
5437 <= HOST_BITS_PER_WIDE_INT
))))
5439 op0
= expand_expr (TREE_VALUE (elt
), target
, tmode
, modifier
);
5440 if (DECL_BIT_FIELD (TREE_PURPOSE (elt
)))
5442 int bitsize
= DECL_FIELD_SIZE (TREE_PURPOSE (elt
));
5443 enum machine_mode imode
5444 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt
)));
5446 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt
))))
5448 op1
= GEN_INT (((HOST_WIDE_INT
) 1 << bitsize
) - 1);
5449 op0
= expand_and (op0
, op1
, target
);
5454 = build_int_2 (imode
- bitsize
, 0);
5456 op0
= expand_shift (LSHIFT_EXPR
, imode
, op0
, count
,
5458 op0
= expand_shift (RSHIFT_EXPR
, imode
, op0
, count
,
5468 enum machine_mode mode1
;
5474 tree tem
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
5475 &mode1
, &unsignedp
, &volatilep
,
5478 /* If we got back the original object, something is wrong. Perhaps
5479 we are evaluating an expression too early. In any event, don't
5480 infinitely recurse. */
5484 /* If TEM's type is a union of variable size, pass TARGET to the inner
5485 computation, since it will need a temporary and TARGET is known
5486 to have to do. This occurs in unchecked conversion in Ada. */
5488 op0
= expand_expr (tem
,
5489 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
5490 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
5492 ? target
: NULL_RTX
),
5494 modifier
== EXPAND_INITIALIZER
? modifier
: 0);
5496 /* If this is a constant, put it into a register if it is a
5497 legitimate constant and memory if it isn't. */
5498 if (CONSTANT_P (op0
))
5500 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (tem
));
5501 if (mode
!= BLKmode
&& LEGITIMATE_CONSTANT_P (op0
))
5502 op0
= force_reg (mode
, op0
);
5504 op0
= validize_mem (force_const_mem (mode
, op0
));
5509 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, 0);
5511 if (GET_CODE (op0
) != MEM
)
5513 op0
= change_address (op0
, VOIDmode
,
5514 gen_rtx (PLUS
, ptr_mode
, XEXP (op0
, 0),
5515 force_reg (ptr_mode
, offset_rtx
)));
5518 /* Don't forget about volatility even if this is a bitfield. */
5519 if (GET_CODE (op0
) == MEM
&& volatilep
&& ! MEM_VOLATILE_P (op0
))
5521 op0
= copy_rtx (op0
);
5522 MEM_VOLATILE_P (op0
) = 1;
5525 /* In cases where an aligned union has an unaligned object
5526 as a field, we might be extracting a BLKmode value from
5527 an integer-mode (e.g., SImode) object. Handle this case
5528 by doing the extract into an object as wide as the field
5529 (which we know to be the width of a basic mode), then
5530 storing into memory, and changing the mode to BLKmode.
5531 If we ultimately want the address (EXPAND_CONST_ADDRESS or
5532 EXPAND_INITIALIZER), then we must not copy to a temporary. */
5533 if (mode1
== VOIDmode
5534 || GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
5535 || (modifier
!= EXPAND_CONST_ADDRESS
5536 && modifier
!= EXPAND_INITIALIZER
5537 && ((mode1
!= BLKmode
&& ! direct_load
[(int) mode1
]
5538 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
5539 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
)
5540 /* If the field isn't aligned enough to fetch as a memref,
5541 fetch it as a bit field. */
5542 || (SLOW_UNALIGNED_ACCESS
5543 && ((TYPE_ALIGN (TREE_TYPE (tem
)) < GET_MODE_ALIGNMENT (mode
))
5544 || (bitpos
% GET_MODE_ALIGNMENT (mode
) != 0))))))
5546 enum machine_mode ext_mode
= mode
;
5548 if (ext_mode
== BLKmode
)
5549 ext_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
5551 if (ext_mode
== BLKmode
)
5553 /* In this case, BITPOS must start at a byte boundary and
5554 TARGET, if specified, must be a MEM. */
5555 if (GET_CODE (op0
) != MEM
5556 || (target
!= 0 && GET_CODE (target
) != MEM
)
5557 || bitpos
% BITS_PER_UNIT
!= 0)
5560 op0
= change_address (op0
, VOIDmode
,
5561 plus_constant (XEXP (op0
, 0),
5562 bitpos
/ BITS_PER_UNIT
));
5564 target
= assign_temp (type
, 0, 1, 1);
5566 emit_block_move (target
, op0
,
5567 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
5574 op0
= validize_mem (op0
);
5576 if (GET_CODE (op0
) == MEM
&& GET_CODE (XEXP (op0
, 0)) == REG
)
5577 mark_reg_pointer (XEXP (op0
, 0), alignment
);
5579 op0
= extract_bit_field (op0
, bitsize
, bitpos
,
5580 unsignedp
, target
, ext_mode
, ext_mode
,
5582 int_size_in_bytes (TREE_TYPE (tem
)));
5584 /* If the result is a record type and BITSIZE is narrower than
5585 the mode of OP0, an integral mode, and this is a big endian
5586 machine, we must put the field into the high-order bits. */
5587 if (TREE_CODE (type
) == RECORD_TYPE
&& BYTES_BIG_ENDIAN
5588 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
5589 && bitsize
< GET_MODE_BITSIZE (GET_MODE (op0
)))
5590 op0
= expand_shift (LSHIFT_EXPR
, GET_MODE (op0
), op0
,
5591 size_int (GET_MODE_BITSIZE (GET_MODE (op0
))
5595 if (mode
== BLKmode
)
5597 rtx
new = assign_stack_temp (ext_mode
,
5598 bitsize
/ BITS_PER_UNIT
, 0);
5600 emit_move_insn (new, op0
);
5601 op0
= copy_rtx (new);
5602 PUT_MODE (op0
, BLKmode
);
5603 MEM_IN_STRUCT_P (op0
) = 1;
5609 /* If the result is BLKmode, use that to access the object
5611 if (mode
== BLKmode
)
5614 /* Get a reference to just this component. */
5615 if (modifier
== EXPAND_CONST_ADDRESS
5616 || modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
5617 op0
= gen_rtx (MEM
, mode1
, plus_constant (XEXP (op0
, 0),
5618 (bitpos
/ BITS_PER_UNIT
)));
5620 op0
= change_address (op0
, mode1
,
5621 plus_constant (XEXP (op0
, 0),
5622 (bitpos
/ BITS_PER_UNIT
)));
5623 if (GET_CODE (XEXP (op0
, 0)) == REG
)
5624 mark_reg_pointer (XEXP (op0
, 0), alignment
);
5626 MEM_IN_STRUCT_P (op0
) = 1;
5627 MEM_VOLATILE_P (op0
) |= volatilep
;
5628 if (mode
== mode1
|| mode1
== BLKmode
|| mode1
== tmode
5629 || modifier
== EXPAND_CONST_ADDRESS
5630 || modifier
== EXPAND_INITIALIZER
)
5632 else if (target
== 0)
5633 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
5635 convert_move (target
, op0
, unsignedp
);
5639 /* Intended for a reference to a buffer of a file-object in Pascal.
5640 But it's not certain that a special tree code will really be
5641 necessary for these. INDIRECT_REF might work for them. */
5647 /* Pascal set IN expression.
5650 rlo = set_low - (set_low%bits_per_word);
5651 the_word = set [ (index - rlo)/bits_per_word ];
5652 bit_index = index % bits_per_word;
5653 bitmask = 1 << bit_index;
5654 return !!(the_word & bitmask); */
5656 tree set
= TREE_OPERAND (exp
, 0);
5657 tree index
= TREE_OPERAND (exp
, 1);
5658 int iunsignedp
= TREE_UNSIGNED (TREE_TYPE (index
));
5659 tree set_type
= TREE_TYPE (set
);
5660 tree set_low_bound
= TYPE_MIN_VALUE (TYPE_DOMAIN (set_type
));
5661 tree set_high_bound
= TYPE_MAX_VALUE (TYPE_DOMAIN (set_type
));
5662 rtx index_val
= expand_expr (index
, 0, VOIDmode
, 0);
5663 rtx lo_r
= expand_expr (set_low_bound
, 0, VOIDmode
, 0);
5664 rtx hi_r
= expand_expr (set_high_bound
, 0, VOIDmode
, 0);
5665 rtx setval
= expand_expr (set
, 0, VOIDmode
, 0);
5666 rtx setaddr
= XEXP (setval
, 0);
5667 enum machine_mode index_mode
= TYPE_MODE (TREE_TYPE (index
));
5669 rtx diff
, quo
, rem
, addr
, bit
, result
;
5671 preexpand_calls (exp
);
5673 /* If domain is empty, answer is no. Likewise if index is constant
5674 and out of bounds. */
5675 if ((TREE_CODE (set_high_bound
) == INTEGER_CST
5676 && TREE_CODE (set_low_bound
) == INTEGER_CST
5677 && tree_int_cst_lt (set_high_bound
, set_low_bound
)
5678 || (TREE_CODE (index
) == INTEGER_CST
5679 && TREE_CODE (set_low_bound
) == INTEGER_CST
5680 && tree_int_cst_lt (index
, set_low_bound
))
5681 || (TREE_CODE (set_high_bound
) == INTEGER_CST
5682 && TREE_CODE (index
) == INTEGER_CST
5683 && tree_int_cst_lt (set_high_bound
, index
))))
5687 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
5689 /* If we get here, we have to generate the code for both cases
5690 (in range and out of range). */
5692 op0
= gen_label_rtx ();
5693 op1
= gen_label_rtx ();
5695 if (! (GET_CODE (index_val
) == CONST_INT
5696 && GET_CODE (lo_r
) == CONST_INT
))
5698 emit_cmp_insn (index_val
, lo_r
, LT
, NULL_RTX
,
5699 GET_MODE (index_val
), iunsignedp
, 0);
5700 emit_jump_insn (gen_blt (op1
));
5703 if (! (GET_CODE (index_val
) == CONST_INT
5704 && GET_CODE (hi_r
) == CONST_INT
))
5706 emit_cmp_insn (index_val
, hi_r
, GT
, NULL_RTX
,
5707 GET_MODE (index_val
), iunsignedp
, 0);
5708 emit_jump_insn (gen_bgt (op1
));
5711 /* Calculate the element number of bit zero in the first word
5713 if (GET_CODE (lo_r
) == CONST_INT
)
5714 rlow
= GEN_INT (INTVAL (lo_r
)
5715 & ~ ((HOST_WIDE_INT
) 1 << BITS_PER_UNIT
));
5717 rlow
= expand_binop (index_mode
, and_optab
, lo_r
,
5718 GEN_INT (~((HOST_WIDE_INT
) 1 << BITS_PER_UNIT
)),
5719 NULL_RTX
, iunsignedp
, OPTAB_LIB_WIDEN
);
5721 diff
= expand_binop (index_mode
, sub_optab
, index_val
, rlow
,
5722 NULL_RTX
, iunsignedp
, OPTAB_LIB_WIDEN
);
5724 quo
= expand_divmod (0, TRUNC_DIV_EXPR
, index_mode
, diff
,
5725 GEN_INT (BITS_PER_UNIT
), NULL_RTX
, iunsignedp
);
5726 rem
= expand_divmod (1, TRUNC_MOD_EXPR
, index_mode
, index_val
,
5727 GEN_INT (BITS_PER_UNIT
), NULL_RTX
, iunsignedp
);
5729 addr
= memory_address (byte_mode
,
5730 expand_binop (index_mode
, add_optab
, diff
,
5731 setaddr
, NULL_RTX
, iunsignedp
,
5734 /* Extract the bit we want to examine */
5735 bit
= expand_shift (RSHIFT_EXPR
, byte_mode
,
5736 gen_rtx (MEM
, byte_mode
, addr
),
5737 make_tree (TREE_TYPE (index
), rem
),
5739 result
= expand_binop (byte_mode
, and_optab
, bit
, const1_rtx
,
5740 GET_MODE (target
) == byte_mode
? target
: 0,
5741 1, OPTAB_LIB_WIDEN
);
5743 if (result
!= target
)
5744 convert_move (target
, result
, 1);
5746 /* Output the code to handle the out-of-range case. */
5749 emit_move_insn (target
, const0_rtx
);
5754 case WITH_CLEANUP_EXPR
:
5755 if (RTL_EXPR_RTL (exp
) == 0)
5758 = expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
5759 expand_decl_cleanup (NULL_TREE
, TREE_OPERAND (exp
, 2));
5761 /* That's it for this cleanup. */
5762 TREE_OPERAND (exp
, 2) = 0;
5764 return RTL_EXPR_RTL (exp
);
5766 case CLEANUP_POINT_EXPR
:
5768 extern int temp_slot_level
;
5769 /* Start a new binding layer that will keep track of all cleanup
5770 actions to be performed. */
5771 expand_start_bindings (0);
5773 target_temp_slot_level
= temp_slot_level
;
5775 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
5776 /* If we're going to use this value, load it up now. */
5778 op0
= force_not_mem (op0
);
5779 preserve_temp_slots (op0
);
5780 expand_end_bindings (NULL_TREE
, 0, 0);
5785 /* Check for a built-in function. */
5786 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
5787 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
5789 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
5790 return expand_builtin (exp
, target
, subtarget
, tmode
, ignore
);
5792 /* If this call was expanded already by preexpand_calls,
5793 just return the result we got. */
5794 if (CALL_EXPR_RTL (exp
) != 0)
5795 return CALL_EXPR_RTL (exp
);
5797 return expand_call (exp
, target
, ignore
);
5799 case NON_LVALUE_EXPR
:
5802 case REFERENCE_EXPR
:
5803 if (TREE_CODE (type
) == UNION_TYPE
)
5805 tree valtype
= TREE_TYPE (TREE_OPERAND (exp
, 0));
5808 if (mode
!= BLKmode
)
5809 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
5811 target
= assign_temp (type
, 0, 1, 1);
5814 if (GET_CODE (target
) == MEM
)
5815 /* Store data into beginning of memory target. */
5816 store_expr (TREE_OPERAND (exp
, 0),
5817 change_address (target
, TYPE_MODE (valtype
), 0), 0);
5819 else if (GET_CODE (target
) == REG
)
5820 /* Store this field into a union of the proper type. */
5821 store_field (target
, GET_MODE_BITSIZE (TYPE_MODE (valtype
)), 0,
5822 TYPE_MODE (valtype
), TREE_OPERAND (exp
, 0),
5824 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp
, 0))));
5828 /* Return the entire union. */
5832 if (mode
== TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
5834 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
,
5837 /* If the signedness of the conversion differs and OP0 is
5838 a promoted SUBREG, clear that indication since we now
5839 have to do the proper extension. */
5840 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))) != unsignedp
5841 && GET_CODE (op0
) == SUBREG
)
5842 SUBREG_PROMOTED_VAR_P (op0
) = 0;
5847 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, mode
, 0);
5848 if (GET_MODE (op0
) == mode
)
5851 /* If OP0 is a constant, just convert it into the proper mode. */
5852 if (CONSTANT_P (op0
))
5854 convert_modes (mode
, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
5855 op0
, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
5857 if (modifier
== EXPAND_INITIALIZER
)
5858 return gen_rtx (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
);
5862 convert_to_mode (mode
, op0
,
5863 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
5865 convert_move (target
, op0
,
5866 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
5870 /* We come here from MINUS_EXPR when the second operand is a
5873 this_optab
= add_optab
;
5875 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
5876 something else, make sure we add the register to the constant and
5877 then to the other thing. This case can occur during strength
5878 reduction and doing it this way will produce better code if the
5879 frame pointer or argument pointer is eliminated.
5881 fold-const.c will ensure that the constant is always in the inner
5882 PLUS_EXPR, so the only case we need to do anything about is if
5883 sp, ap, or fp is our second argument, in which case we must swap
5884 the innermost first argument and our second argument. */
5886 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == PLUS_EXPR
5887 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1)) == INTEGER_CST
5888 && TREE_CODE (TREE_OPERAND (exp
, 1)) == RTL_EXPR
5889 && (RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == frame_pointer_rtx
5890 || RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == stack_pointer_rtx
5891 || RTL_EXPR_RTL (TREE_OPERAND (exp
, 1)) == arg_pointer_rtx
))
5893 tree t
= TREE_OPERAND (exp
, 1);
5895 TREE_OPERAND (exp
, 1) = TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
5896 TREE_OPERAND (TREE_OPERAND (exp
, 0), 0) = t
;
5899 /* If the result is to be ptr_mode and we are adding an integer to
5900 something, we might be forming a constant. So try to use
5901 plus_constant. If it produces a sum and we can't accept it,
5902 use force_operand. This allows P = &ARR[const] to generate
5903 efficient code on machines where a SYMBOL_REF is not a valid
5906 If this is an EXPAND_SUM call, always return the sum. */
5907 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
5908 || mode
== ptr_mode
)
5910 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
5911 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
5912 && TREE_CONSTANT (TREE_OPERAND (exp
, 1)))
5914 op1
= expand_expr (TREE_OPERAND (exp
, 1), subtarget
, VOIDmode
,
5916 op1
= plus_constant (op1
, TREE_INT_CST_LOW (TREE_OPERAND (exp
, 0)));
5917 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
5918 op1
= force_operand (op1
, target
);
5922 else if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
5923 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_INT
5924 && TREE_CONSTANT (TREE_OPERAND (exp
, 0)))
5926 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
,
5928 if (! CONSTANT_P (op0
))
5930 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
5931 VOIDmode
, modifier
);
5932 /* Don't go to both_summands if modifier
5933 says it's not right to return a PLUS. */
5934 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
5938 op0
= plus_constant (op0
, TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)));
5939 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
5940 op0
= force_operand (op0
, target
);
5945 /* No sense saving up arithmetic to be done
5946 if it's all in the wrong mode to form part of an address.
5947 And force_operand won't know whether to sign-extend or
5949 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
5950 || mode
!= ptr_mode
)
5953 preexpand_calls (exp
);
5954 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1)))
5957 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, modifier
);
5958 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, modifier
);
5961 /* Make sure any term that's a sum with a constant comes last. */
5962 if (GET_CODE (op0
) == PLUS
5963 && CONSTANT_P (XEXP (op0
, 1)))
5969 /* If adding to a sum including a constant,
5970 associate it to put the constant outside. */
5971 if (GET_CODE (op1
) == PLUS
5972 && CONSTANT_P (XEXP (op1
, 1)))
5974 rtx constant_term
= const0_rtx
;
5976 temp
= simplify_binary_operation (PLUS
, mode
, XEXP (op1
, 0), op0
);
5979 /* Ensure that MULT comes first if there is one. */
5980 else if (GET_CODE (op0
) == MULT
)
5981 op0
= gen_rtx (PLUS
, mode
, op0
, XEXP (op1
, 0));
5983 op0
= gen_rtx (PLUS
, mode
, XEXP (op1
, 0), op0
);
5985 /* Let's also eliminate constants from op0 if possible. */
5986 op0
= eliminate_constant_term (op0
, &constant_term
);
5988 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
5989 their sum should be a constant. Form it into OP1, since the
5990 result we want will then be OP0 + OP1. */
5992 temp
= simplify_binary_operation (PLUS
, mode
, constant_term
,
5997 op1
= gen_rtx (PLUS
, mode
, constant_term
, XEXP (op1
, 1));
6000 /* Put a constant term last and put a multiplication first. */
6001 if (CONSTANT_P (op0
) || GET_CODE (op1
) == MULT
)
6002 temp
= op1
, op1
= op0
, op0
= temp
;
6004 temp
= simplify_binary_operation (PLUS
, mode
, op0
, op1
);
6005 return temp
? temp
: gen_rtx (PLUS
, mode
, op0
, op1
);
6008 /* For initializers, we are allowed to return a MINUS of two
6009 symbolic constants. Here we handle all cases when both operands
6011 /* Handle difference of two symbolic constants,
6012 for the sake of an initializer. */
6013 if ((modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
6014 && really_constant_p (TREE_OPERAND (exp
, 0))
6015 && really_constant_p (TREE_OPERAND (exp
, 1)))
6017 rtx op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
,
6018 VOIDmode
, modifier
);
6019 rtx op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
6020 VOIDmode
, modifier
);
6022 /* If the last operand is a CONST_INT, use plus_constant of
6023 the negated constant. Else make the MINUS. */
6024 if (GET_CODE (op1
) == CONST_INT
)
6025 return plus_constant (op0
, - INTVAL (op1
));
6027 return gen_rtx (MINUS
, mode
, op0
, op1
);
6029 /* Convert A - const to A + (-const). */
6030 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
6032 tree negated
= fold (build1 (NEGATE_EXPR
, type
,
6033 TREE_OPERAND (exp
, 1)));
6035 /* Deal with the case where we can't negate the constant
6037 if (TREE_UNSIGNED (type
) || TREE_OVERFLOW (negated
))
6039 tree newtype
= signed_type (type
);
6040 tree newop0
= convert (newtype
, TREE_OPERAND (exp
, 0));
6041 tree newop1
= convert (newtype
, TREE_OPERAND (exp
, 1));
6042 tree newneg
= fold (build1 (NEGATE_EXPR
, newtype
, newop1
));
6044 if (! TREE_OVERFLOW (newneg
))
6045 return expand_expr (convert (type
,
6046 build (PLUS_EXPR
, newtype
,
6048 target
, tmode
, modifier
);
6052 exp
= build (PLUS_EXPR
, type
, TREE_OPERAND (exp
, 0), negated
);
6056 this_optab
= sub_optab
;
6060 preexpand_calls (exp
);
6061 /* If first operand is constant, swap them.
6062 Thus the following special case checks need only
6063 check the second operand. */
6064 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
)
6066 register tree t1
= TREE_OPERAND (exp
, 0);
6067 TREE_OPERAND (exp
, 0) = TREE_OPERAND (exp
, 1);
6068 TREE_OPERAND (exp
, 1) = t1
;
6071 /* Attempt to return something suitable for generating an
6072 indexed address, for machines that support that. */
6074 if (modifier
== EXPAND_SUM
&& mode
== ptr_mode
6075 && TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
6076 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
6078 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, EXPAND_SUM
);
6080 /* Apply distributive law if OP0 is x+c. */
6081 if (GET_CODE (op0
) == PLUS
6082 && GET_CODE (XEXP (op0
, 1)) == CONST_INT
)
6083 return gen_rtx (PLUS
, mode
,
6084 gen_rtx (MULT
, mode
, XEXP (op0
, 0),
6085 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)))),
6086 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))
6087 * INTVAL (XEXP (op0
, 1))));
6089 if (GET_CODE (op0
) != REG
)
6090 op0
= force_operand (op0
, NULL_RTX
);
6091 if (GET_CODE (op0
) != REG
)
6092 op0
= copy_to_mode_reg (mode
, op0
);
6094 return gen_rtx (MULT
, mode
, op0
,
6095 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))));
6098 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1)))
6101 /* Check for multiplying things that have been extended
6102 from a narrower type. If this machine supports multiplying
6103 in that narrower type with a result in the desired type,
6104 do it that way, and avoid the explicit type-conversion. */
6105 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == NOP_EXPR
6106 && TREE_CODE (type
) == INTEGER_TYPE
6107 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
6108 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))))
6109 && ((TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
6110 && int_fits_type_p (TREE_OPERAND (exp
, 1),
6111 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
6112 /* Don't use a widening multiply if a shift will do. */
6113 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
6114 > HOST_BITS_PER_WIDE_INT
)
6115 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1))) < 0))
6117 (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
6118 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
6120 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))))
6121 /* If both operands are extended, they must either both
6122 be zero-extended or both be sign-extended. */
6123 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)))
6125 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))))))
6127 enum machine_mode innermode
6128 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)));
6129 optab other_optab
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
6130 ? smul_widen_optab
: umul_widen_optab
);
6131 this_optab
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
6132 ? umul_widen_optab
: smul_widen_optab
);
6133 if (mode
== GET_MODE_WIDER_MODE (innermode
))
6135 if (this_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
6137 op0
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
6138 NULL_RTX
, VOIDmode
, 0);
6139 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
6140 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
6143 op1
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
6144 NULL_RTX
, VOIDmode
, 0);
6147 else if (other_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
6148 && innermode
== word_mode
)
6151 op0
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
6152 NULL_RTX
, VOIDmode
, 0);
6153 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
6154 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
,
6157 op1
= expand_expr (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0),
6158 NULL_RTX
, VOIDmode
, 0);
6159 temp
= expand_binop (mode
, other_optab
, op0
, op1
, target
,
6160 unsignedp
, OPTAB_LIB_WIDEN
);
6161 htem
= expand_mult_highpart_adjust (innermode
,
6162 gen_highpart (innermode
, temp
),
6164 gen_highpart (innermode
, temp
),
6166 emit_move_insn (gen_highpart (innermode
, temp
), htem
);
6171 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
6172 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
6173 return expand_mult (mode
, op0
, op1
, target
, unsignedp
);
6175 case TRUNC_DIV_EXPR
:
6176 case FLOOR_DIV_EXPR
:
6178 case ROUND_DIV_EXPR
:
6179 case EXACT_DIV_EXPR
:
6180 preexpand_calls (exp
);
6181 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1)))
6183 /* Possible optimization: compute the dividend with EXPAND_SUM
6184 then if the divisor is constant can optimize the case
6185 where some terms of the dividend have coeffs divisible by it. */
6186 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
6187 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
6188 return expand_divmod (0, code
, mode
, op0
, op1
, target
, unsignedp
);
6191 this_optab
= flodiv_optab
;
6194 case TRUNC_MOD_EXPR
:
6195 case FLOOR_MOD_EXPR
:
6197 case ROUND_MOD_EXPR
:
6198 preexpand_calls (exp
);
6199 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1)))
6201 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
6202 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
6203 return expand_divmod (1, code
, mode
, op0
, op1
, target
, unsignedp
);
6205 case FIX_ROUND_EXPR
:
6206 case FIX_FLOOR_EXPR
:
6208 abort (); /* Not used for C. */
6210 case FIX_TRUNC_EXPR
:
6211 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
6213 target
= gen_reg_rtx (mode
);
6214 expand_fix (target
, op0
, unsignedp
);
6218 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
6220 target
= gen_reg_rtx (mode
);
6221 /* expand_float can't figure out what to do if FROM has VOIDmode.
6222 So give it the correct mode. With -O, cse will optimize this. */
6223 if (GET_MODE (op0
) == VOIDmode
)
6224 op0
= copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
6226 expand_float (target
, op0
,
6227 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))));
6231 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
6232 temp
= expand_unop (mode
, neg_optab
, op0
, target
, 0);
6238 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
6240 /* Handle complex values specially. */
6241 if (GET_MODE_CLASS (mode
) == MODE_COMPLEX_INT
6242 || GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
)
6243 return expand_complex_abs (mode
, op0
, target
, unsignedp
);
6245 /* Unsigned abs is simply the operand. Testing here means we don't
6246 risk generating incorrect code below. */
6247 if (TREE_UNSIGNED (type
))
6250 return expand_abs (mode
, op0
, target
, unsignedp
,
6251 safe_from_p (target
, TREE_OPERAND (exp
, 0)));
6255 target
= original_target
;
6256 if (target
== 0 || ! safe_from_p (target
, TREE_OPERAND (exp
, 1))
6257 || (GET_CODE (target
) == MEM
&& MEM_VOLATILE_P (target
))
6258 || GET_MODE (target
) != mode
6259 || (GET_CODE (target
) == REG
6260 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
6261 target
= gen_reg_rtx (mode
);
6262 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
6263 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, 0);
6265 /* First try to do it with a special MIN or MAX instruction.
6266 If that does not win, use a conditional jump to select the proper
6268 this_optab
= (TREE_UNSIGNED (type
)
6269 ? (code
== MIN_EXPR
? umin_optab
: umax_optab
)
6270 : (code
== MIN_EXPR
? smin_optab
: smax_optab
));
6272 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
, unsignedp
,
6277 /* At this point, a MEM target is no longer useful; we will get better
6280 if (GET_CODE (target
) == MEM
)
6281 target
= gen_reg_rtx (mode
);
6284 emit_move_insn (target
, op0
);
6286 op0
= gen_label_rtx ();
6288 /* If this mode is an integer too wide to compare properly,
6289 compare word by word. Rely on cse to optimize constant cases. */
6290 if (GET_MODE_CLASS (mode
) == MODE_INT
&& !can_compare_p (mode
))
6292 if (code
== MAX_EXPR
)
6293 do_jump_by_parts_greater_rtx (mode
, TREE_UNSIGNED (type
),
6294 target
, op1
, NULL_RTX
, op0
);
6296 do_jump_by_parts_greater_rtx (mode
, TREE_UNSIGNED (type
),
6297 op1
, target
, NULL_RTX
, op0
);
6298 emit_move_insn (target
, op1
);
6302 if (code
== MAX_EXPR
)
6303 temp
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 1)))
6304 ? compare_from_rtx (target
, op1
, GEU
, 1, mode
, NULL_RTX
, 0)
6305 : compare_from_rtx (target
, op1
, GE
, 0, mode
, NULL_RTX
, 0));
6307 temp
= (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 1)))
6308 ? compare_from_rtx (target
, op1
, LEU
, 1, mode
, NULL_RTX
, 0)
6309 : compare_from_rtx (target
, op1
, LE
, 0, mode
, NULL_RTX
, 0));
6310 if (temp
== const0_rtx
)
6311 emit_move_insn (target
, op1
);
6312 else if (temp
!= const_true_rtx
)
6314 if (bcc_gen_fctn
[(int) GET_CODE (temp
)] != 0)
6315 emit_jump_insn ((*bcc_gen_fctn
[(int) GET_CODE (temp
)]) (op0
));
6318 emit_move_insn (target
, op1
);
6325 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
6326 temp
= expand_unop (mode
, one_cmpl_optab
, op0
, target
, 1);
6332 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
6333 temp
= expand_unop (mode
, ffs_optab
, op0
, target
, 1);
6338 /* ??? Can optimize bitwise operations with one arg constant.
6339 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
6340 and (a bitwise1 b) bitwise2 b (etc)
6341 but that is probably not worth while. */
6343 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
6344 boolean values when we want in all cases to compute both of them. In
6345 general it is fastest to do TRUTH_AND_EXPR by computing both operands
6346 as actual zero-or-1 values and then bitwise anding. In cases where
6347 there cannot be any side effects, better code would be made by
6348 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
6349 how to recognize those cases. */
6351 case TRUTH_AND_EXPR
:
6353 this_optab
= and_optab
;
6358 this_optab
= ior_optab
;
6361 case TRUTH_XOR_EXPR
:
6363 this_optab
= xor_optab
;
6370 preexpand_calls (exp
);
6371 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1)))
6373 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
6374 return expand_shift (code
, mode
, op0
, TREE_OPERAND (exp
, 1), target
,
6377 /* Could determine the answer when only additive constants differ. Also,
6378 the addition of one can be handled by changing the condition. */
6385 preexpand_calls (exp
);
6386 temp
= do_store_flag (exp
, target
, tmode
!= VOIDmode
? tmode
: mode
, 0);
6390 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
6391 if (code
== NE_EXPR
&& integer_zerop (TREE_OPERAND (exp
, 1))
6393 && GET_CODE (original_target
) == REG
6394 && (GET_MODE (original_target
)
6395 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
6397 temp
= expand_expr (TREE_OPERAND (exp
, 0), original_target
,
6400 if (temp
!= original_target
)
6401 temp
= copy_to_reg (temp
);
6403 op1
= gen_label_rtx ();
6404 emit_cmp_insn (temp
, const0_rtx
, EQ
, NULL_RTX
,
6405 GET_MODE (temp
), unsignedp
, 0);
6406 emit_jump_insn (gen_beq (op1
));
6407 emit_move_insn (temp
, const1_rtx
);
6412 /* If no set-flag instruction, must generate a conditional
6413 store into a temporary variable. Drop through
6414 and handle this like && and ||. */
6416 case TRUTH_ANDIF_EXPR
:
6417 case TRUTH_ORIF_EXPR
:
6419 && (target
== 0 || ! safe_from_p (target
, exp
)
6420 /* Make sure we don't have a hard reg (such as function's return
6421 value) live across basic blocks, if not optimizing. */
6422 || (!optimize
&& GET_CODE (target
) == REG
6423 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)))
6424 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
6427 emit_clr_insn (target
);
6429 op1
= gen_label_rtx ();
6430 jumpifnot (exp
, op1
);
6433 emit_0_to_1_insn (target
);
6436 return ignore
? const0_rtx
: target
;
6438 case TRUTH_NOT_EXPR
:
6439 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, VOIDmode
, 0);
6440 /* The parser is careful to generate TRUTH_NOT_EXPR
6441 only with operands that are always zero or one. */
6442 temp
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
,
6443 target
, 1, OPTAB_LIB_WIDEN
);
6449 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
6451 return expand_expr (TREE_OPERAND (exp
, 1),
6452 (ignore
? const0_rtx
: target
),
6456 /* If we would have a "singleton" (see below) were it not for a
6457 conversion in each arm, bring that conversion back out. */
6458 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == NOP_EXPR
6459 && TREE_CODE (TREE_OPERAND (exp
, 2)) == NOP_EXPR
6460 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0))
6461 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp
, 2), 0))))
6463 tree
true = TREE_OPERAND (TREE_OPERAND (exp
, 1), 0);
6464 tree
false = TREE_OPERAND (TREE_OPERAND (exp
, 2), 0);
6466 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
6467 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
6468 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
6469 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
6470 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
6471 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
6472 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
6473 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
6474 return expand_expr (build1 (NOP_EXPR
, type
,
6475 build (COND_EXPR
, TREE_TYPE (true),
6476 TREE_OPERAND (exp
, 0),
6478 target
, tmode
, modifier
);
6482 /* Note that COND_EXPRs whose type is a structure or union
6483 are required to be constructed to contain assignments of
6484 a temporary variable, so that we can evaluate them here
6485 for side effect only. If type is void, we must do likewise. */
6487 /* If an arm of the branch requires a cleanup,
6488 only that cleanup is performed. */
6491 tree binary_op
= 0, unary_op
= 0;
6493 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
6494 convert it to our mode, if necessary. */
6495 if (integer_onep (TREE_OPERAND (exp
, 1))
6496 && integer_zerop (TREE_OPERAND (exp
, 2))
6497 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<')
6501 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
6506 op0
= expand_expr (TREE_OPERAND (exp
, 0), target
, mode
, modifier
);
6507 if (GET_MODE (op0
) == mode
)
6511 target
= gen_reg_rtx (mode
);
6512 convert_move (target
, op0
, unsignedp
);
6516 /* Check for X ? A + B : A. If we have this, we can copy A to the
6517 output and conditionally add B. Similarly for unary operations.
6518 Don't do this if X has side-effects because those side effects
6519 might affect A or B and the "?" operation is a sequence point in
6520 ANSI. (operand_equal_p tests for side effects.) */
6522 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 1))) == '2'
6523 && operand_equal_p (TREE_OPERAND (exp
, 2),
6524 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0), 0))
6525 singleton
= TREE_OPERAND (exp
, 2), binary_op
= TREE_OPERAND (exp
, 1);
6526 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 2))) == '2'
6527 && operand_equal_p (TREE_OPERAND (exp
, 1),
6528 TREE_OPERAND (TREE_OPERAND (exp
, 2), 0), 0))
6529 singleton
= TREE_OPERAND (exp
, 1), binary_op
= TREE_OPERAND (exp
, 2);
6530 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 1))) == '1'
6531 && operand_equal_p (TREE_OPERAND (exp
, 2),
6532 TREE_OPERAND (TREE_OPERAND (exp
, 1), 0), 0))
6533 singleton
= TREE_OPERAND (exp
, 2), unary_op
= TREE_OPERAND (exp
, 1);
6534 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 2))) == '1'
6535 && operand_equal_p (TREE_OPERAND (exp
, 1),
6536 TREE_OPERAND (TREE_OPERAND (exp
, 2), 0), 0))
6537 singleton
= TREE_OPERAND (exp
, 1), unary_op
= TREE_OPERAND (exp
, 2);
6539 /* If we are not to produce a result, we have no target. Otherwise,
6540 if a target was specified use it; it will not be used as an
6541 intermediate target unless it is safe. If no target, use a
6546 else if (original_target
6547 && (safe_from_p (original_target
, TREE_OPERAND (exp
, 0))
6548 || (singleton
&& GET_CODE (original_target
) == REG
6549 && REGNO (original_target
) >= FIRST_PSEUDO_REGISTER
6550 && original_target
== var_rtx (singleton
)))
6551 && GET_MODE (original_target
) == mode
6552 && ! (GET_CODE (original_target
) == MEM
6553 && MEM_VOLATILE_P (original_target
)))
6554 temp
= original_target
;
6555 else if (TREE_ADDRESSABLE (type
))
6558 temp
= assign_temp (type
, 0, 0, 1);
6560 /* If we had X ? A + C : A, with C a constant power of 2, and we can
6561 do the test of X as a store-flag operation, do this as
6562 A + ((X != 0) << log C). Similarly for other simple binary
6563 operators. Only do for C == 1 if BRANCH_COST is low. */
6564 if (temp
&& singleton
&& binary_op
6565 && (TREE_CODE (binary_op
) == PLUS_EXPR
6566 || TREE_CODE (binary_op
) == MINUS_EXPR
6567 || TREE_CODE (binary_op
) == BIT_IOR_EXPR
6568 || TREE_CODE (binary_op
) == BIT_XOR_EXPR
)
6569 && (BRANCH_COST
>= 3 ? integer_pow2p (TREE_OPERAND (binary_op
, 1))
6570 : integer_onep (TREE_OPERAND (binary_op
, 1)))
6571 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<')
6574 optab boptab
= (TREE_CODE (binary_op
) == PLUS_EXPR
? add_optab
6575 : TREE_CODE (binary_op
) == MINUS_EXPR
? sub_optab
6576 : TREE_CODE (binary_op
) == BIT_IOR_EXPR
? ior_optab
6579 /* If we had X ? A : A + 1, do this as A + (X == 0).
6581 We have to invert the truth value here and then put it
6582 back later if do_store_flag fails. We cannot simply copy
6583 TREE_OPERAND (exp, 0) to another variable and modify that
6584 because invert_truthvalue can modify the tree pointed to
6586 if (singleton
== TREE_OPERAND (exp
, 1))
6587 TREE_OPERAND (exp
, 0)
6588 = invert_truthvalue (TREE_OPERAND (exp
, 0));
6590 result
= do_store_flag (TREE_OPERAND (exp
, 0),
6591 (safe_from_p (temp
, singleton
)
6593 mode
, BRANCH_COST
<= 1);
6595 if (result
!= 0 && ! integer_onep (TREE_OPERAND (binary_op
, 1)))
6596 result
= expand_shift (LSHIFT_EXPR
, mode
, result
,
6597 build_int_2 (tree_log2
6601 (safe_from_p (temp
, singleton
)
6602 ? temp
: NULL_RTX
), 0);
6606 op1
= expand_expr (singleton
, NULL_RTX
, VOIDmode
, 0);
6607 return expand_binop (mode
, boptab
, op1
, result
, temp
,
6608 unsignedp
, OPTAB_LIB_WIDEN
);
6610 else if (singleton
== TREE_OPERAND (exp
, 1))
6611 TREE_OPERAND (exp
, 0)
6612 = invert_truthvalue (TREE_OPERAND (exp
, 0));
6615 do_pending_stack_adjust ();
6617 op0
= gen_label_rtx ();
6619 if (singleton
&& ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0)))
6623 /* If the target conflicts with the other operand of the
6624 binary op, we can't use it. Also, we can't use the target
6625 if it is a hard register, because evaluating the condition
6626 might clobber it. */
6628 && ! safe_from_p (temp
, TREE_OPERAND (binary_op
, 1)))
6629 || (GET_CODE (temp
) == REG
6630 && REGNO (temp
) < FIRST_PSEUDO_REGISTER
))
6631 temp
= gen_reg_rtx (mode
);
6632 store_expr (singleton
, temp
, 0);
6635 expand_expr (singleton
,
6636 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
6637 if (singleton
== TREE_OPERAND (exp
, 1))
6638 jumpif (TREE_OPERAND (exp
, 0), op0
);
6640 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
6642 start_cleanup_deferal ();
6643 if (binary_op
&& temp
== 0)
6644 /* Just touch the other operand. */
6645 expand_expr (TREE_OPERAND (binary_op
, 1),
6646 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
6648 store_expr (build (TREE_CODE (binary_op
), type
,
6649 make_tree (type
, temp
),
6650 TREE_OPERAND (binary_op
, 1)),
6653 store_expr (build1 (TREE_CODE (unary_op
), type
,
6654 make_tree (type
, temp
)),
6658 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
6659 comparison operator. If we have one of these cases, set the
6660 output to A, branch on A (cse will merge these two references),
6661 then set the output to FOO. */
6663 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<'
6664 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1))
6665 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
6666 TREE_OPERAND (exp
, 1), 0)
6667 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
6668 && safe_from_p (temp
, TREE_OPERAND (exp
, 2)))
6670 if (GET_CODE (temp
) == REG
&& REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
6671 temp
= gen_reg_rtx (mode
);
6672 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
6673 jumpif (TREE_OPERAND (exp
, 0), op0
);
6675 start_cleanup_deferal ();
6676 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
6680 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, 0))) == '<'
6681 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp
, 0), 1))
6682 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
6683 TREE_OPERAND (exp
, 2), 0)
6684 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp
, 0))
6685 && safe_from_p (temp
, TREE_OPERAND (exp
, 1)))
6687 if (GET_CODE (temp
) == REG
&& REGNO (temp
) < FIRST_PSEUDO_REGISTER
)
6688 temp
= gen_reg_rtx (mode
);
6689 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
6690 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
6692 start_cleanup_deferal ();
6693 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
6698 op1
= gen_label_rtx ();
6699 jumpifnot (TREE_OPERAND (exp
, 0), op0
);
6701 start_cleanup_deferal ();
6703 store_expr (TREE_OPERAND (exp
, 1), temp
, 0);
6705 expand_expr (TREE_OPERAND (exp
, 1),
6706 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
6707 end_cleanup_deferal ();
6709 emit_jump_insn (gen_jump (op1
));
6712 start_cleanup_deferal ();
6714 store_expr (TREE_OPERAND (exp
, 2), temp
, 0);
6716 expand_expr (TREE_OPERAND (exp
, 2),
6717 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
, 0);
6720 end_cleanup_deferal ();
6731 /* Something needs to be initialized, but we didn't know
6732 where that thing was when building the tree. For example,
6733 it could be the return value of a function, or a parameter
6734 to a function which lays down in the stack, or a temporary
6735 variable which must be passed by reference.
6737 We guarantee that the expression will either be constructed
6738 or copied into our original target. */
6740 tree slot
= TREE_OPERAND (exp
, 0);
6741 tree cleanups
= NULL_TREE
;
6745 if (TREE_CODE (slot
) != VAR_DECL
)
6749 target
= original_target
;
6753 if (DECL_RTL (slot
) != 0)
6755 target
= DECL_RTL (slot
);
6756 /* If we have already expanded the slot, so don't do
6758 if (TREE_OPERAND (exp
, 1) == NULL_TREE
)
6763 target
= assign_temp (type
, 2, 1, 1);
6764 /* All temp slots at this level must not conflict. */
6765 preserve_temp_slots (target
);
6766 DECL_RTL (slot
) = target
;
6768 /* Since SLOT is not known to the called function
6769 to belong to its stack frame, we must build an explicit
6770 cleanup. This case occurs when we must build up a reference
6771 to pass the reference as an argument. In this case,
6772 it is very likely that such a reference need not be
6775 if (TREE_OPERAND (exp
, 2) == 0)
6776 TREE_OPERAND (exp
, 2) = maybe_build_cleanup (slot
);
6777 cleanups
= TREE_OPERAND (exp
, 2);
6782 /* This case does occur, when expanding a parameter which
6783 needs to be constructed on the stack. The target
6784 is the actual stack address that we want to initialize.
6785 The function we call will perform the cleanup in this case. */
6787 /* If we have already assigned it space, use that space,
6788 not target that we were passed in, as our target
6789 parameter is only a hint. */
6790 if (DECL_RTL (slot
) != 0)
6792 target
= DECL_RTL (slot
);
6793 /* If we have already expanded the slot, so don't do
6795 if (TREE_OPERAND (exp
, 1) == NULL_TREE
)
6799 DECL_RTL (slot
) = target
;
6802 exp1
= TREE_OPERAND (exp
, 3) = TREE_OPERAND (exp
, 1);
6803 /* Mark it as expanded. */
6804 TREE_OPERAND (exp
, 1) = NULL_TREE
;
6806 store_expr (exp1
, target
, 0);
6808 expand_decl_cleanup (NULL_TREE
, cleanups
);
6815 tree lhs
= TREE_OPERAND (exp
, 0);
6816 tree rhs
= TREE_OPERAND (exp
, 1);
6817 tree noncopied_parts
= 0;
6818 tree lhs_type
= TREE_TYPE (lhs
);
6820 temp
= expand_assignment (lhs
, rhs
, ! ignore
, original_target
!= 0);
6821 if (TYPE_NONCOPIED_PARTS (lhs_type
) != 0 && !fixed_type_p (rhs
))
6822 noncopied_parts
= init_noncopied_parts (stabilize_reference (lhs
),
6823 TYPE_NONCOPIED_PARTS (lhs_type
));
6824 while (noncopied_parts
!= 0)
6826 expand_assignment (TREE_VALUE (noncopied_parts
),
6827 TREE_PURPOSE (noncopied_parts
), 0, 0);
6828 noncopied_parts
= TREE_CHAIN (noncopied_parts
);
6835 /* If lhs is complex, expand calls in rhs before computing it.
6836 That's so we don't compute a pointer and save it over a call.
6837 If lhs is simple, compute it first so we can give it as a
6838 target if the rhs is just a call. This avoids an extra temp and copy
6839 and that prevents a partial-subsumption which makes bad code.
6840 Actually we could treat component_ref's of vars like vars. */
6842 tree lhs
= TREE_OPERAND (exp
, 0);
6843 tree rhs
= TREE_OPERAND (exp
, 1);
6844 tree noncopied_parts
= 0;
6845 tree lhs_type
= TREE_TYPE (lhs
);
6849 if (TREE_CODE (lhs
) != VAR_DECL
6850 && TREE_CODE (lhs
) != RESULT_DECL
6851 && TREE_CODE (lhs
) != PARM_DECL
)
6852 preexpand_calls (exp
);
6854 /* Check for |= or &= of a bitfield of size one into another bitfield
6855 of size 1. In this case, (unless we need the result of the
6856 assignment) we can do this more efficiently with a
6857 test followed by an assignment, if necessary.
6859 ??? At this point, we can't get a BIT_FIELD_REF here. But if
6860 things change so we do, this code should be enhanced to
6863 && TREE_CODE (lhs
) == COMPONENT_REF
6864 && (TREE_CODE (rhs
) == BIT_IOR_EXPR
6865 || TREE_CODE (rhs
) == BIT_AND_EXPR
)
6866 && TREE_OPERAND (rhs
, 0) == lhs
6867 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == COMPONENT_REF
6868 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs
, 1))) == 1
6869 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs
, 1), 1))) == 1)
6871 rtx label
= gen_label_rtx ();
6873 do_jump (TREE_OPERAND (rhs
, 1),
6874 TREE_CODE (rhs
) == BIT_IOR_EXPR
? label
: 0,
6875 TREE_CODE (rhs
) == BIT_AND_EXPR
? label
: 0);
6876 expand_assignment (lhs
, convert (TREE_TYPE (rhs
),
6877 (TREE_CODE (rhs
) == BIT_IOR_EXPR
6879 : integer_zero_node
)),
6881 do_pending_stack_adjust ();
6886 if (TYPE_NONCOPIED_PARTS (lhs_type
) != 0
6887 && ! (fixed_type_p (lhs
) && fixed_type_p (rhs
)))
6888 noncopied_parts
= save_noncopied_parts (stabilize_reference (lhs
),
6889 TYPE_NONCOPIED_PARTS (lhs_type
));
6891 temp
= expand_assignment (lhs
, rhs
, ! ignore
, original_target
!= 0);
6892 while (noncopied_parts
!= 0)
6894 expand_assignment (TREE_PURPOSE (noncopied_parts
),
6895 TREE_VALUE (noncopied_parts
), 0, 0);
6896 noncopied_parts
= TREE_CHAIN (noncopied_parts
);
6901 case PREINCREMENT_EXPR
:
6902 case PREDECREMENT_EXPR
:
6903 return expand_increment (exp
, 0, ignore
);
6905 case POSTINCREMENT_EXPR
:
6906 case POSTDECREMENT_EXPR
:
6907 /* Faster to treat as pre-increment if result is not used. */
6908 return expand_increment (exp
, ! ignore
, ignore
);
6911 /* If nonzero, TEMP will be set to the address of something that might
6912 be a MEM corresponding to a stack slot. */
6915 /* Are we taking the address of a nested function? */
6916 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == FUNCTION_DECL
6917 && decl_function_context (TREE_OPERAND (exp
, 0)) != 0
6918 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp
, 0)))
6920 op0
= trampoline_address (TREE_OPERAND (exp
, 0));
6921 op0
= force_operand (op0
, target
);
6923 /* If we are taking the address of something erroneous, just
6925 else if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ERROR_MARK
)
6929 /* We make sure to pass const0_rtx down if we came in with
6930 ignore set, to avoid doing the cleanups twice for something. */
6931 op0
= expand_expr (TREE_OPERAND (exp
, 0),
6932 ignore
? const0_rtx
: NULL_RTX
, VOIDmode
,
6933 (modifier
== EXPAND_INITIALIZER
6934 ? modifier
: EXPAND_CONST_ADDRESS
));
6936 /* If we are going to ignore the result, OP0 will have been set
6937 to const0_rtx, so just return it. Don't get confused and
6938 think we are taking the address of the constant. */
6942 op0
= protect_from_queue (op0
, 0);
6944 /* We would like the object in memory. If it is a constant,
6945 we can have it be statically allocated into memory. For
6946 a non-constant (REG, SUBREG or CONCAT), we need to allocate some
6947 memory and store the value into it. */
6949 if (CONSTANT_P (op0
))
6950 op0
= force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))),
6952 else if (GET_CODE (op0
) == MEM
)
6954 mark_temp_addr_taken (op0
);
6955 temp
= XEXP (op0
, 0);
6958 else if (GET_CODE (op0
) == REG
|| GET_CODE (op0
) == SUBREG
6959 || GET_CODE (op0
) == CONCAT
)
6961 /* If this object is in a register, it must be not
6963 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
6964 rtx memloc
= assign_temp (inner_type
, 1, 1, 1);
6966 mark_temp_addr_taken (memloc
);
6967 emit_move_insn (memloc
, op0
);
6971 if (GET_CODE (op0
) != MEM
)
6974 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
6976 temp
= XEXP (op0
, 0);
6977 #ifdef POINTERS_EXTEND_UNSIGNED
6978 if (GET_MODE (temp
) == Pmode
&& GET_MODE (temp
) != mode
6979 && mode
== ptr_mode
)
6980 temp
= convert_memory_address (ptr_mode
, temp
);
6985 op0
= force_operand (XEXP (op0
, 0), target
);
6988 if (flag_force_addr
&& GET_CODE (op0
) != REG
)
6989 op0
= force_reg (Pmode
, op0
);
6991 if (GET_CODE (op0
) == REG
6992 && ! REG_USERVAR_P (op0
))
6993 mark_reg_pointer (op0
, TYPE_ALIGN (TREE_TYPE (type
)) / BITS_PER_UNIT
);
6995 /* If we might have had a temp slot, add an equivalent address
6998 update_temp_slot_address (temp
, op0
);
7000 #ifdef POINTERS_EXTEND_UNSIGNED
7001 if (GET_MODE (op0
) == Pmode
&& GET_MODE (op0
) != mode
7002 && mode
== ptr_mode
)
7003 op0
= convert_memory_address (ptr_mode
, op0
);
7008 case ENTRY_VALUE_EXPR
:
7011 /* COMPLEX type for Extended Pascal & Fortran */
7014 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
7017 /* Get the rtx code of the operands. */
7018 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
7019 op1
= expand_expr (TREE_OPERAND (exp
, 1), 0, VOIDmode
, 0);
7022 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
7026 /* Move the real (op0) and imaginary (op1) parts to their location. */
7027 emit_move_insn (gen_realpart (mode
, target
), op0
);
7028 emit_move_insn (gen_imagpart (mode
, target
), op1
);
7030 insns
= get_insns ();
7033 /* Complex construction should appear as a single unit. */
7034 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
7035 each with a separate pseudo as destination.
7036 It's not correct for flow to treat them as a unit. */
7037 if (GET_CODE (target
) != CONCAT
)
7038 emit_no_conflict_block (insns
, target
, op0
, op1
, NULL_RTX
);
7046 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
7047 return gen_realpart (mode
, op0
);
7050 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
7051 return gen_imagpart (mode
, op0
);
7055 enum machine_mode partmode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
7059 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
7062 target
= gen_reg_rtx (mode
);
7066 /* Store the realpart and the negated imagpart to target. */
7067 emit_move_insn (gen_realpart (partmode
, target
),
7068 gen_realpart (partmode
, op0
));
7070 imag_t
= gen_imagpart (partmode
, target
);
7071 temp
= expand_unop (partmode
, neg_optab
,
7072 gen_imagpart (partmode
, op0
), imag_t
, 0);
7074 emit_move_insn (imag_t
, temp
);
7076 insns
= get_insns ();
7079 /* Conjugate should appear as a single unit
7080 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
7081 each with a separate pseudo as destination.
7082 It's not correct for flow to treat them as a unit. */
7083 if (GET_CODE (target
) != CONCAT
)
7084 emit_no_conflict_block (insns
, target
, op0
, NULL_RTX
, NULL_RTX
);
7091 case TRY_CATCH_EXPR
:
7093 tree handler
= TREE_OPERAND (exp
, 1);
7095 expand_eh_region_start ();
7097 op0
= expand_expr (TREE_OPERAND (exp
, 0), 0, VOIDmode
, 0);
7099 expand_eh_region_end (handler
);
7106 rtx dcc
= get_dynamic_cleanup_chain ();
7107 emit_move_insn (dcc
, validize_mem (gen_rtx (MEM
, Pmode
, dcc
)));
7113 rtx dhc
= get_dynamic_handler_chain ();
7114 emit_move_insn (dhc
, validize_mem (gen_rtx (MEM
, Pmode
, dhc
)));
7119 op0
= CONST0_RTX (tmode
);
7125 return (*lang_expand_expr
) (exp
, original_target
, tmode
, modifier
);
7128 /* Here to do an ordinary binary operator, generating an instruction
7129 from the optab already placed in `this_optab'. */
7131 preexpand_calls (exp
);
7132 if (! safe_from_p (subtarget
, TREE_OPERAND (exp
, 1)))
7134 op0
= expand_expr (TREE_OPERAND (exp
, 0), subtarget
, VOIDmode
, 0);
7135 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
7137 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
,
7138 unsignedp
, OPTAB_LIB_WIDEN
);
7145 /* Emit bytecode to evaluate the given expression EXP to the stack. */
7148 bc_expand_expr (exp
)
7151 enum tree_code code
;
7154 struct binary_operator
*binoptab
;
7155 struct unary_operator
*unoptab
;
7156 struct increment_operator
*incroptab
;
7157 struct bc_label
*lab
, *lab1
;
7158 enum bytecode_opcode opcode
;
7161 code
= TREE_CODE (exp
);
7167 if (DECL_RTL (exp
) == 0)
7169 error_with_decl (exp
, "prior parameter's size depends on `%s'");
7173 bc_load_parmaddr (DECL_RTL (exp
));
7174 bc_load_memory (TREE_TYPE (exp
), exp
);
7180 if (DECL_RTL (exp
) == 0)
7184 if (BYTECODE_LABEL (DECL_RTL (exp
)))
7185 bc_load_externaddr (DECL_RTL (exp
));
7187 bc_load_localaddr (DECL_RTL (exp
));
7189 if (TREE_PUBLIC (exp
))
7190 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp
),
7191 BYTECODE_BC_LABEL (DECL_RTL (exp
))->offset
);
7193 bc_load_localaddr (DECL_RTL (exp
));
7195 bc_load_memory (TREE_TYPE (exp
), exp
);
7200 #ifdef DEBUG_PRINT_CODE
7201 fprintf (stderr
, " [%x]\n", TREE_INT_CST_LOW (exp
));
7203 bc_emit_instruction (mode_to_const_map
[(int) (DECL_BIT_FIELD (exp
)
7205 : TYPE_MODE (TREE_TYPE (exp
)))],
7206 (HOST_WIDE_INT
) TREE_INT_CST_LOW (exp
));
7212 #ifdef DEBUG_PRINT_CODE
7213 fprintf (stderr
, " [%g]\n", (double) TREE_INT_CST_LOW (exp
));
7215 /* FIX THIS: find a better way to pass real_cst's. -bson */
7216 bc_emit_instruction (mode_to_const_map
[TYPE_MODE (TREE_TYPE (exp
))],
7217 (double) TREE_REAL_CST (exp
));
7226 /* We build a call description vector describing the type of
7227 the return value and of the arguments; this call vector,
7228 together with a pointer to a location for the return value
7229 and the base of the argument list, is passed to the low
7230 level machine dependent call subroutine, which is responsible
7231 for putting the arguments wherever real functions expect
7232 them, as well as getting the return value back. */
7234 tree calldesc
= 0, arg
;
7238 /* Push the evaluated args on the evaluation stack in reverse
7239 order. Also make an entry for each arg in the calldesc
7240 vector while we're at it. */
7242 TREE_OPERAND (exp
, 1) = nreverse (TREE_OPERAND (exp
, 1));
7244 for (arg
= TREE_OPERAND (exp
, 1); arg
; arg
= TREE_CHAIN (arg
))
7247 bc_expand_expr (TREE_VALUE (arg
));
7249 calldesc
= tree_cons ((tree
) 0,
7250 size_in_bytes (TREE_TYPE (TREE_VALUE (arg
))),
7252 calldesc
= tree_cons ((tree
) 0,
7253 bc_runtime_type_code (TREE_TYPE (TREE_VALUE (arg
))),
7257 TREE_OPERAND (exp
, 1) = nreverse (TREE_OPERAND (exp
, 1));
7259 /* Allocate a location for the return value and push its
7260 address on the evaluation stack. Also make an entry
7261 at the front of the calldesc for the return value type. */
7263 type
= TREE_TYPE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
7264 retval
= bc_allocate_local (int_size_in_bytes (type
), TYPE_ALIGN (type
));
7265 bc_load_localaddr (retval
);
7267 calldesc
= tree_cons ((tree
) 0, size_in_bytes (type
), calldesc
);
7268 calldesc
= tree_cons ((tree
) 0, bc_runtime_type_code (type
), calldesc
);
7270 /* Prepend the argument count. */
7271 calldesc
= tree_cons ((tree
) 0,
7272 build_int_2 (nargs
, 0),
7275 /* Push the address of the call description vector on the stack. */
7276 calldesc
= build_nt (CONSTRUCTOR
, (tree
) 0, calldesc
);
7277 TREE_TYPE (calldesc
) = build_array_type (integer_type_node
,
7278 build_index_type (build_int_2 (nargs
* 2, 0)));
7279 r
= output_constant_def (calldesc
);
7280 bc_load_externaddr (r
);
7282 /* Push the address of the function to be called. */
7283 bc_expand_expr (TREE_OPERAND (exp
, 0));
7285 /* Call the function, popping its address and the calldesc vector
7286 address off the evaluation stack in the process. */
7287 bc_emit_instruction (call
);
7289 /* Pop the arguments off the stack. */
7290 bc_adjust_stack (nargs
);
7292 /* Load the return value onto the stack. */
7293 bc_load_localaddr (retval
);
7294 bc_load_memory (type
, TREE_OPERAND (exp
, 0));
7300 if (!SAVE_EXPR_RTL (exp
))
7302 /* First time around: copy to local variable */
7303 SAVE_EXPR_RTL (exp
) = bc_allocate_local (int_size_in_bytes (TREE_TYPE (exp
)),
7304 TYPE_ALIGN (TREE_TYPE(exp
)));
7305 bc_expand_expr (TREE_OPERAND (exp
, 0));
7306 bc_emit_instruction (duplicate
);
7308 bc_load_localaddr (SAVE_EXPR_RTL (exp
));
7309 bc_store_memory (TREE_TYPE (exp
), TREE_OPERAND (exp
, 0));
7313 /* Consecutive reference: use saved copy */
7314 bc_load_localaddr (SAVE_EXPR_RTL (exp
));
7315 bc_load_memory (TREE_TYPE (exp
), TREE_OPERAND (exp
, 0));
7320 /* FIXME: the XXXX_STMT codes have been removed in GCC2, but
7321 how are they handled instead? */
7324 TREE_USED (exp
) = 1;
7325 bc_expand_expr (STMT_BODY (exp
));
7332 bc_expand_expr (TREE_OPERAND (exp
, 0));
7333 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp
, 0)), TREE_TYPE (exp
));
7338 expand_assignment (TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1), 0, 0);
7343 bc_expand_address (TREE_OPERAND (exp
, 0));
7348 bc_expand_expr (TREE_OPERAND (exp
, 0));
7349 bc_load_memory (TREE_TYPE (exp
), TREE_OPERAND (exp
, 0));
7354 bc_expand_expr (bc_canonicalize_array_ref (exp
));
7359 bc_expand_component_address (exp
);
7361 /* If we have a bitfield, generate a proper load */
7362 bc_load_memory (TREE_TYPE (TREE_OPERAND (exp
, 1)), TREE_OPERAND (exp
, 1));
7367 bc_expand_expr (TREE_OPERAND (exp
, 0));
7368 bc_emit_instruction (drop
);
7369 bc_expand_expr (TREE_OPERAND (exp
, 1));
7374 bc_expand_expr (TREE_OPERAND (exp
, 0));
7375 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp
, 0)));
7376 lab
= bc_get_bytecode_label ();
7377 bc_emit_bytecode (xjumpifnot
);
7378 bc_emit_bytecode_labelref (lab
);
7380 #ifdef DEBUG_PRINT_CODE
7381 fputc ('\n', stderr
);
7383 bc_expand_expr (TREE_OPERAND (exp
, 1));
7384 lab1
= bc_get_bytecode_label ();
7385 bc_emit_bytecode (jump
);
7386 bc_emit_bytecode_labelref (lab1
);
7388 #ifdef DEBUG_PRINT_CODE
7389 fputc ('\n', stderr
);
7392 bc_emit_bytecode_labeldef (lab
);
7393 bc_expand_expr (TREE_OPERAND (exp
, 2));
7394 bc_emit_bytecode_labeldef (lab1
);
7397 case TRUTH_ANDIF_EXPR
:
7399 opcode
= xjumpifnot
;
7402 case TRUTH_ORIF_EXPR
:
7409 binoptab
= optab_plus_expr
;
7414 binoptab
= optab_minus_expr
;
7419 binoptab
= optab_mult_expr
;
7422 case TRUNC_DIV_EXPR
:
7423 case FLOOR_DIV_EXPR
:
7425 case ROUND_DIV_EXPR
:
7426 case EXACT_DIV_EXPR
:
7428 binoptab
= optab_trunc_div_expr
;
7431 case TRUNC_MOD_EXPR
:
7432 case FLOOR_MOD_EXPR
:
7434 case ROUND_MOD_EXPR
:
7436 binoptab
= optab_trunc_mod_expr
;
7439 case FIX_ROUND_EXPR
:
7440 case FIX_FLOOR_EXPR
:
7442 abort (); /* Not used for C. */
7444 case FIX_TRUNC_EXPR
:
7451 abort (); /* FIXME */
7455 binoptab
= optab_rdiv_expr
;
7460 binoptab
= optab_bit_and_expr
;
7465 binoptab
= optab_bit_ior_expr
;
7470 binoptab
= optab_bit_xor_expr
;
7475 binoptab
= optab_lshift_expr
;
7480 binoptab
= optab_rshift_expr
;
7483 case TRUTH_AND_EXPR
:
7485 binoptab
= optab_truth_and_expr
;
7490 binoptab
= optab_truth_or_expr
;
7495 binoptab
= optab_lt_expr
;
7500 binoptab
= optab_le_expr
;
7505 binoptab
= optab_ge_expr
;
7510 binoptab
= optab_gt_expr
;
7515 binoptab
= optab_eq_expr
;
7520 binoptab
= optab_ne_expr
;
7525 unoptab
= optab_negate_expr
;
7530 unoptab
= optab_bit_not_expr
;
7533 case TRUTH_NOT_EXPR
:
7535 unoptab
= optab_truth_not_expr
;
7538 case PREDECREMENT_EXPR
:
7540 incroptab
= optab_predecrement_expr
;
7543 case PREINCREMENT_EXPR
:
7545 incroptab
= optab_preincrement_expr
;
7548 case POSTDECREMENT_EXPR
:
7550 incroptab
= optab_postdecrement_expr
;
7553 case POSTINCREMENT_EXPR
:
7555 incroptab
= optab_postincrement_expr
;
7560 bc_expand_constructor (exp
);
7570 tree vars
= TREE_OPERAND (exp
, 0);
7571 int vars_need_expansion
= 0;
7573 /* Need to open a binding contour here because
7574 if there are any cleanups they most be contained here. */
7575 expand_start_bindings (0);
7577 /* Mark the corresponding BLOCK for output. */
7578 if (TREE_OPERAND (exp
, 2) != 0)
7579 TREE_USED (TREE_OPERAND (exp
, 2)) = 1;
7581 /* If VARS have not yet been expanded, expand them now. */
7584 if (DECL_RTL (vars
) == 0)
7586 vars_need_expansion
= 1;
7589 expand_decl_init (vars
);
7590 vars
= TREE_CHAIN (vars
);
7593 bc_expand_expr (TREE_OPERAND (exp
, 1));
7595 expand_end_bindings (TREE_OPERAND (exp
, 0), 0, 0);
7605 bc_expand_binary_operation (binoptab
, TREE_TYPE (exp
),
7606 TREE_OPERAND (exp
, 0), TREE_OPERAND (exp
, 1));
7612 bc_expand_unary_operation (unoptab
, TREE_TYPE (exp
), TREE_OPERAND (exp
, 0));
7618 bc_expand_expr (TREE_OPERAND (exp
, 0));
7619 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp
, 0)));
7620 lab
= bc_get_bytecode_label ();
7622 bc_emit_instruction (duplicate
);
7623 bc_emit_bytecode (opcode
);
7624 bc_emit_bytecode_labelref (lab
);
7626 #ifdef DEBUG_PRINT_CODE
7627 fputc ('\n', stderr
);
7630 bc_emit_instruction (drop
);
7632 bc_expand_expr (TREE_OPERAND (exp
, 1));
7633 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp
, 1)));
7634 bc_emit_bytecode_labeldef (lab
);
7640 type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
7642 /* Push the quantum. */
7643 bc_expand_expr (TREE_OPERAND (exp
, 1));
7645 /* Convert it to the lvalue's type. */
7646 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp
, 1)), type
);
7648 /* Push the address of the lvalue */
7649 bc_expand_expr (build1 (ADDR_EXPR
, TYPE_POINTER_TO (type
), TREE_OPERAND (exp
, 0)));
7651 /* Perform actual increment */
7652 bc_expand_increment (incroptab
, type
);
7656 /* Return the alignment in bits of EXP, a pointer valued expression.
7657 But don't return more than MAX_ALIGN no matter what.
7658 The alignment returned is, by default, the alignment of the thing that
7659 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
7661 Otherwise, look at the expression to see if we can do better, i.e., if the
7662 expression is actually pointing at an object whose alignment is tighter. */
7665 get_pointer_alignment (exp
, max_align
)
7669 unsigned align
, inner
;
7671 if (TREE_CODE (TREE_TYPE (exp
)) != POINTER_TYPE
)
7674 align
= TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp
)));
7675 align
= MIN (align
, max_align
);
7679 switch (TREE_CODE (exp
))
7683 case NON_LVALUE_EXPR
:
7684 exp
= TREE_OPERAND (exp
, 0);
7685 if (TREE_CODE (TREE_TYPE (exp
)) != POINTER_TYPE
)
7687 inner
= TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp
)));
7688 align
= MIN (inner
, max_align
);
7692 /* If sum of pointer + int, restrict our maximum alignment to that
7693 imposed by the integer. If not, we can't do any better than
7695 if (TREE_CODE (TREE_OPERAND (exp
, 1)) != INTEGER_CST
)
7698 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)) * BITS_PER_UNIT
)
7703 exp
= TREE_OPERAND (exp
, 0);
7707 /* See what we are pointing at and look at its alignment. */
7708 exp
= TREE_OPERAND (exp
, 0);
7709 if (TREE_CODE (exp
) == FUNCTION_DECL
)
7710 align
= FUNCTION_BOUNDARY
;
7711 else if (TREE_CODE_CLASS (TREE_CODE (exp
)) == 'd')
7712 align
= DECL_ALIGN (exp
);
7713 #ifdef CONSTANT_ALIGNMENT
7714 else if (TREE_CODE_CLASS (TREE_CODE (exp
)) == 'c')
7715 align
= CONSTANT_ALIGNMENT (exp
, align
);
7717 return MIN (align
, max_align
);
7725 /* Return the tree node and offset if a given argument corresponds to
7726 a string constant. */
7729 string_constant (arg
, ptr_offset
)
7735 if (TREE_CODE (arg
) == ADDR_EXPR
7736 && TREE_CODE (TREE_OPERAND (arg
, 0)) == STRING_CST
)
7738 *ptr_offset
= integer_zero_node
;
7739 return TREE_OPERAND (arg
, 0);
7741 else if (TREE_CODE (arg
) == PLUS_EXPR
)
7743 tree arg0
= TREE_OPERAND (arg
, 0);
7744 tree arg1
= TREE_OPERAND (arg
, 1);
7749 if (TREE_CODE (arg0
) == ADDR_EXPR
7750 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == STRING_CST
)
7753 return TREE_OPERAND (arg0
, 0);
7755 else if (TREE_CODE (arg1
) == ADDR_EXPR
7756 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == STRING_CST
)
7759 return TREE_OPERAND (arg1
, 0);
7766 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
7767 way, because it could contain a zero byte in the middle.
7768 TREE_STRING_LENGTH is the size of the character array, not the string.
7770 Unfortunately, string_constant can't access the values of const char
7771 arrays with initializers, so neither can we do so here. */
7781 src
= string_constant (src
, &offset_node
);
7784 max
= TREE_STRING_LENGTH (src
);
7785 ptr
= TREE_STRING_POINTER (src
);
7786 if (offset_node
&& TREE_CODE (offset_node
) != INTEGER_CST
)
7788 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
7789 compute the offset to the following null if we don't know where to
7790 start searching for it. */
7792 for (i
= 0; i
< max
; i
++)
7795 /* We don't know the starting offset, but we do know that the string
7796 has no internal zero bytes. We can assume that the offset falls
7797 within the bounds of the string; otherwise, the programmer deserves
7798 what he gets. Subtract the offset from the length of the string,
7800 /* This would perhaps not be valid if we were dealing with named
7801 arrays in addition to literal string constants. */
7802 return size_binop (MINUS_EXPR
, size_int (max
), offset_node
);
7805 /* We have a known offset into the string. Start searching there for
7806 a null character. */
7807 if (offset_node
== 0)
7811 /* Did we get a long long offset? If so, punt. */
7812 if (TREE_INT_CST_HIGH (offset_node
) != 0)
7814 offset
= TREE_INT_CST_LOW (offset_node
);
7816 /* If the offset is known to be out of bounds, warn, and call strlen at
7818 if (offset
< 0 || offset
> max
)
7820 warning ("offset outside bounds of constant string");
7823 /* Use strlen to search for the first zero byte. Since any strings
7824 constructed with build_string will have nulls appended, we win even
7825 if we get handed something like (char[4])"abcd".
7827 Since OFFSET is our starting index into the string, no further
7828 calculation is needed. */
7829 return size_int (strlen (ptr
+ offset
));
7833 expand_builtin_return_addr (fndecl_code
, count
, tem
)
7834 enum built_in_function fndecl_code
;
7840 /* Some machines need special handling before we can access
7841 arbitrary frames. For example, on the sparc, we must first flush
7842 all register windows to the stack. */
7843 #ifdef SETUP_FRAME_ADDRESSES
7844 SETUP_FRAME_ADDRESSES ();
7847 /* On the sparc, the return address is not in the frame, it is in a
7848 register. There is no way to access it off of the current frame
7849 pointer, but it can be accessed off the previous frame pointer by
7850 reading the value from the register window save area. */
7851 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
7852 if (fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
7856 /* Scan back COUNT frames to the specified frame. */
7857 for (i
= 0; i
< count
; i
++)
7859 /* Assume the dynamic chain pointer is in the word that the
7860 frame address points to, unless otherwise specified. */
7861 #ifdef DYNAMIC_CHAIN_ADDRESS
7862 tem
= DYNAMIC_CHAIN_ADDRESS (tem
);
7864 tem
= memory_address (Pmode
, tem
);
7865 tem
= copy_to_reg (gen_rtx (MEM
, Pmode
, tem
));
7868 /* For __builtin_frame_address, return what we've got. */
7869 if (fndecl_code
== BUILT_IN_FRAME_ADDRESS
)
7872 /* For __builtin_return_address, Get the return address from that
7874 #ifdef RETURN_ADDR_RTX
7875 tem
= RETURN_ADDR_RTX (count
, tem
);
7877 tem
= memory_address (Pmode
,
7878 plus_constant (tem
, GET_MODE_SIZE (Pmode
)));
7879 tem
= gen_rtx (MEM
, Pmode
, tem
);
7884 /* __builtin_setjmp is passed a pointer to an array of five words (not
7885 all will be used on all machines). It operates similarly to the C
7886 library function of the same name, but is more efficient. Much of
7887 the code below (and for longjmp) is copied from the handling of
7890 NOTE: This is intended for use by GNAT and the exception handling
7891 scheme in the compiler and will only work in the method used by
7895 expand_builtin_setjmp (buf_addr
, target
)
7899 rtx lab1
= gen_label_rtx (), lab2
= gen_label_rtx ();
7900 enum machine_mode sa_mode
= Pmode
, value_mode
;
7902 int old_inhibit_defer_pop
= inhibit_defer_pop
;
7904 = RETURN_POPS_ARGS (get_identifier ("__dummy"),
7905 build_function_type (void_type_node
, NULL_TREE
),
7908 CUMULATIVE_ARGS args_so_far
;
7912 value_mode
= TYPE_MODE (integer_type_node
);
7914 #ifdef POINTERS_EXTEND_UNSIGNED
7915 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
7918 buf_addr
= force_reg (Pmode
, buf_addr
);
7920 if (target
== 0 || GET_CODE (target
) != REG
7921 || REGNO (target
) < FIRST_PSEUDO_REGISTER
)
7922 target
= gen_reg_rtx (value_mode
);
7926 CONST_CALL_P (emit_note (NULL_PTR
, NOTE_INSN_SETJMP
)) = 1;
7927 current_function_calls_setjmp
= 1;
7929 /* We store the frame pointer and the address of lab1 in the buffer
7930 and use the rest of it for the stack save area, which is
7931 machine-dependent. */
7932 emit_move_insn (gen_rtx (MEM
, Pmode
, buf_addr
),
7933 virtual_stack_vars_rtx
);
7935 (validize_mem (gen_rtx (MEM
, Pmode
,
7936 plus_constant (buf_addr
,
7937 GET_MODE_SIZE (Pmode
)))),
7938 gen_rtx (LABEL_REF
, Pmode
, lab1
));
7940 #ifdef HAVE_save_stack_nonlocal
7941 if (HAVE_save_stack_nonlocal
)
7942 sa_mode
= insn_operand_mode
[(int) CODE_FOR_save_stack_nonlocal
][0];
7945 stack_save
= gen_rtx (MEM
, sa_mode
,
7946 plus_constant (buf_addr
,
7947 2 * GET_MODE_SIZE (Pmode
)));
7948 emit_stack_save (SAVE_NONLOCAL
, &stack_save
, NULL_RTX
);
7952 emit_insn (gen_setjmp ());
7955 /* Set TARGET to zero and branch around the other case. */
7956 emit_move_insn (target
, const0_rtx
);
7957 emit_jump_insn (gen_jump (lab2
));
7961 /* Note that setjmp clobbers FP when we get here, so we have to make
7962 sure it's marked as used by this function. */
7963 emit_insn (gen_rtx (USE
, VOIDmode
, hard_frame_pointer_rtx
));
7965 /* Mark the static chain as clobbered here so life information
7966 doesn't get messed up for it. */
7967 emit_insn (gen_rtx (CLOBBER
, VOIDmode
, static_chain_rtx
));
7969 /* Now put in the code to restore the frame pointer, and argument
7970 pointer, if needed. The code below is from expand_end_bindings
7971 in stmt.c; see detailed documentation there. */
7972 #ifdef HAVE_nonlocal_goto
7973 if (! HAVE_nonlocal_goto
)
7975 emit_move_insn (virtual_stack_vars_rtx
, hard_frame_pointer_rtx
);
7977 /* Do we need to do something like:
7979 current_function_has_nonlocal_label = 1;
7981 here? It seems like we might have to, or some subset of that
7982 functionality, but I am unsure. (mrs) */
7984 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
7985 if (fixed_regs
[ARG_POINTER_REGNUM
])
7987 #ifdef ELIMINABLE_REGS
7988 static struct elims
{int from
, to
;} elim_regs
[] = ELIMINABLE_REGS
;
7990 for (i
= 0; i
< sizeof elim_regs
/ sizeof elim_regs
[0]; i
++)
7991 if (elim_regs
[i
].from
== ARG_POINTER_REGNUM
7992 && elim_regs
[i
].to
== HARD_FRAME_POINTER_REGNUM
)
7995 if (i
== sizeof elim_regs
/ sizeof elim_regs
[0])
7998 /* Now restore our arg pointer from the address at which it
7999 was saved in our stack frame.
8000 If there hasn't be space allocated for it yet, make
8002 if (arg_pointer_save_area
== 0)
8003 arg_pointer_save_area
8004 = assign_stack_local (Pmode
, GET_MODE_SIZE (Pmode
), 0);
8005 emit_move_insn (virtual_incoming_args_rtx
,
8006 copy_to_reg (arg_pointer_save_area
));
8011 #ifdef HAVE_nonlocal_goto_receiver
8012 if (HAVE_nonlocal_goto_receiver
)
8013 emit_insn (gen_nonlocal_goto_receiver ());
8015 /* The static chain pointer contains the address of dummy function.
8016 We need to call it here to handle some PIC cases of restoring a
8017 global pointer. Then return 1. */
8018 op0
= copy_to_mode_reg (Pmode
, static_chain_rtx
);
8020 /* We can't actually call emit_library_call here, so do everything
8021 it does, which isn't much for a libfunc with no args. */
8022 op0
= memory_address (FUNCTION_MODE
, op0
);
8024 INIT_CUMULATIVE_ARGS (args_so_far
, NULL_TREE
,
8025 gen_rtx (SYMBOL_REF
, Pmode
, "__dummy"), 1);
8026 next_arg_reg
= FUNCTION_ARG (args_so_far
, VOIDmode
, void_type_node
, 1);
8028 #ifndef ACCUMULATE_OUTGOING_ARGS
8029 #ifdef HAVE_call_pop
8031 emit_call_insn (gen_call_pop (gen_rtx (MEM
, FUNCTION_MODE
, op0
),
8032 const0_rtx
, next_arg_reg
,
8033 GEN_INT (return_pops
)));
8040 emit_call_insn (gen_call (gen_rtx (MEM
, FUNCTION_MODE
, op0
),
8041 const0_rtx
, next_arg_reg
, const0_rtx
));
8046 emit_move_insn (target
, const1_rtx
);
8052 /* Expand an expression EXP that calls a built-in function,
8053 with result going to TARGET if that's convenient
8054 (and in mode MODE if that's convenient).
8055 SUBTARGET may be used as the target for computing one of EXP's operands.
8056 IGNORE is nonzero if the value is to be ignored. */
8058 #define CALLED_AS_BUILT_IN(NODE) \
8059 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
8062 expand_builtin (exp
, target
, subtarget
, mode
, ignore
)
8066 enum machine_mode mode
;
8069 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
8070 tree arglist
= TREE_OPERAND (exp
, 1);
8073 enum machine_mode value_mode
= TYPE_MODE (TREE_TYPE (exp
));
8074 optab builtin_optab
;
8076 switch (DECL_FUNCTION_CODE (fndecl
))
8081 /* build_function_call changes these into ABS_EXPR. */
8086 /* Treat these like sqrt, but only if the user asks for them. */
8087 if (! flag_fast_math
)
8089 case BUILT_IN_FSQRT
:
8090 /* If not optimizing, call the library function. */
8095 /* Arg could be wrong type if user redeclared this fcn wrong. */
8096 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != REAL_TYPE
)
8099 /* Stabilize and compute the argument. */
8100 if (TREE_CODE (TREE_VALUE (arglist
)) != VAR_DECL
8101 && TREE_CODE (TREE_VALUE (arglist
)) != PARM_DECL
)
8103 exp
= copy_node (exp
);
8104 arglist
= copy_node (arglist
);
8105 TREE_OPERAND (exp
, 1) = arglist
;
8106 TREE_VALUE (arglist
) = save_expr (TREE_VALUE (arglist
));
8108 op0
= expand_expr (TREE_VALUE (arglist
), subtarget
, VOIDmode
, 0);
8110 /* Make a suitable register to place result in. */
8111 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
8116 switch (DECL_FUNCTION_CODE (fndecl
))
8119 builtin_optab
= sin_optab
; break;
8121 builtin_optab
= cos_optab
; break;
8122 case BUILT_IN_FSQRT
:
8123 builtin_optab
= sqrt_optab
; break;
8128 /* Compute into TARGET.
8129 Set TARGET to wherever the result comes back. */
8130 target
= expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist
))),
8131 builtin_optab
, op0
, target
, 0);
8133 /* If we were unable to expand via the builtin, stop the
8134 sequence (without outputting the insns) and break, causing
8135 a call the the library function. */
8142 /* Check the results by default. But if flag_fast_math is turned on,
8143 then assume sqrt will always be called with valid arguments. */
8145 if (! flag_fast_math
)
8147 /* Don't define the builtin FP instructions
8148 if your machine is not IEEE. */
8149 if (TARGET_FLOAT_FORMAT
!= IEEE_FLOAT_FORMAT
)
8152 lab1
= gen_label_rtx ();
8154 /* Test the result; if it is NaN, set errno=EDOM because
8155 the argument was not in the domain. */
8156 emit_cmp_insn (target
, target
, EQ
, 0, GET_MODE (target
), 0, 0);
8157 emit_jump_insn (gen_beq (lab1
));
8161 #ifdef GEN_ERRNO_RTX
8162 rtx errno_rtx
= GEN_ERRNO_RTX
;
8165 = gen_rtx (MEM
, word_mode
, gen_rtx (SYMBOL_REF
, Pmode
, "errno"));
8168 emit_move_insn (errno_rtx
, GEN_INT (TARGET_EDOM
));
8171 /* We can't set errno=EDOM directly; let the library call do it.
8172 Pop the arguments right away in case the call gets deleted. */
8174 expand_call (exp
, target
, 0);
8181 /* Output the entire sequence. */
8182 insns
= get_insns ();
8188 /* __builtin_apply_args returns block of memory allocated on
8189 the stack into which is stored the arg pointer, structure
8190 value address, static chain, and all the registers that might
8191 possibly be used in performing a function call. The code is
8192 moved to the start of the function so the incoming values are
8194 case BUILT_IN_APPLY_ARGS
:
8195 /* Don't do __builtin_apply_args more than once in a function.
8196 Save the result of the first call and reuse it. */
8197 if (apply_args_value
!= 0)
8198 return apply_args_value
;
8200 /* When this function is called, it means that registers must be
8201 saved on entry to this function. So we migrate the
8202 call to the first insn of this function. */
8207 temp
= expand_builtin_apply_args ();
8211 apply_args_value
= temp
;
8213 /* Put the sequence after the NOTE that starts the function.
8214 If this is inside a SEQUENCE, make the outer-level insn
8215 chain current, so the code is placed at the start of the
8217 push_topmost_sequence ();
8218 emit_insns_before (seq
, NEXT_INSN (get_insns ()));
8219 pop_topmost_sequence ();
8223 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
8224 FUNCTION with a copy of the parameters described by
8225 ARGUMENTS, and ARGSIZE. It returns a block of memory
8226 allocated on the stack into which is stored all the registers
8227 that might possibly be used for returning the result of a
8228 function. ARGUMENTS is the value returned by
8229 __builtin_apply_args. ARGSIZE is the number of bytes of
8230 arguments that must be copied. ??? How should this value be
8231 computed? We'll also need a safe worst case value for varargs
8233 case BUILT_IN_APPLY
:
8235 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8236 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
8237 || TREE_CHAIN (arglist
) == 0
8238 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist
)))) != POINTER_TYPE
8239 || TREE_CHAIN (TREE_CHAIN (arglist
)) == 0
8240 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
))))) != INTEGER_TYPE
)
8248 for (t
= arglist
, i
= 0; t
; t
= TREE_CHAIN (t
), i
++)
8249 ops
[i
] = expand_expr (TREE_VALUE (t
), NULL_RTX
, VOIDmode
, 0);
8251 return expand_builtin_apply (ops
[0], ops
[1], ops
[2]);
8254 /* __builtin_return (RESULT) causes the function to return the
8255 value described by RESULT. RESULT is address of the block of
8256 memory returned by __builtin_apply. */
8257 case BUILT_IN_RETURN
:
8259 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8260 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) == POINTER_TYPE
)
8261 expand_builtin_return (expand_expr (TREE_VALUE (arglist
),
8262 NULL_RTX
, VOIDmode
, 0));
8265 case BUILT_IN_SAVEREGS
:
8266 /* Don't do __builtin_saveregs more than once in a function.
8267 Save the result of the first call and reuse it. */
8268 if (saveregs_value
!= 0)
8269 return saveregs_value
;
8271 /* When this function is called, it means that registers must be
8272 saved on entry to this function. So we migrate the
8273 call to the first insn of this function. */
8277 /* Now really call the function. `expand_call' does not call
8278 expand_builtin, so there is no danger of infinite recursion here. */
8281 #ifdef EXPAND_BUILTIN_SAVEREGS
8282 /* Do whatever the machine needs done in this case. */
8283 temp
= EXPAND_BUILTIN_SAVEREGS (arglist
);
8285 /* The register where the function returns its value
8286 is likely to have something else in it, such as an argument.
8287 So preserve that register around the call. */
8289 if (value_mode
!= VOIDmode
)
8291 rtx valreg
= hard_libcall_value (value_mode
);
8292 rtx saved_valreg
= gen_reg_rtx (value_mode
);
8294 emit_move_insn (saved_valreg
, valreg
);
8295 temp
= expand_call (exp
, target
, ignore
);
8296 emit_move_insn (valreg
, saved_valreg
);
8299 /* Generate the call, putting the value in a pseudo. */
8300 temp
= expand_call (exp
, target
, ignore
);
8306 saveregs_value
= temp
;
8308 /* Put the sequence after the NOTE that starts the function.
8309 If this is inside a SEQUENCE, make the outer-level insn
8310 chain current, so the code is placed at the start of the
8312 push_topmost_sequence ();
8313 emit_insns_before (seq
, NEXT_INSN (get_insns ()));
8314 pop_topmost_sequence ();
8318 /* __builtin_args_info (N) returns word N of the arg space info
8319 for the current function. The number and meanings of words
8320 is controlled by the definition of CUMULATIVE_ARGS. */
8321 case BUILT_IN_ARGS_INFO
:
8323 int nwords
= sizeof (CUMULATIVE_ARGS
) / sizeof (int);
8325 int *word_ptr
= (int *) ¤t_function_args_info
;
8326 tree type
, elts
, result
;
8328 if (sizeof (CUMULATIVE_ARGS
) % sizeof (int) != 0)
8329 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
8330 __FILE__
, __LINE__
);
8334 tree arg
= TREE_VALUE (arglist
);
8335 if (TREE_CODE (arg
) != INTEGER_CST
)
8336 error ("argument of `__builtin_args_info' must be constant");
8339 int wordnum
= TREE_INT_CST_LOW (arg
);
8341 if (wordnum
< 0 || wordnum
>= nwords
|| TREE_INT_CST_HIGH (arg
))
8342 error ("argument of `__builtin_args_info' out of range");
8344 return GEN_INT (word_ptr
[wordnum
]);
8348 error ("missing argument in `__builtin_args_info'");
8353 for (i
= 0; i
< nwords
; i
++)
8354 elts
= tree_cons (NULL_TREE
, build_int_2 (word_ptr
[i
], 0));
8356 type
= build_array_type (integer_type_node
,
8357 build_index_type (build_int_2 (nwords
, 0)));
8358 result
= build (CONSTRUCTOR
, type
, NULL_TREE
, nreverse (elts
));
8359 TREE_CONSTANT (result
) = 1;
8360 TREE_STATIC (result
) = 1;
8361 result
= build (INDIRECT_REF
, build_pointer_type (type
), result
);
8362 TREE_CONSTANT (result
) = 1;
8363 return expand_expr (result
, NULL_RTX
, VOIDmode
, 0);
8367 /* Return the address of the first anonymous stack arg. */
8368 case BUILT_IN_NEXT_ARG
:
8370 tree fntype
= TREE_TYPE (current_function_decl
);
8372 if ((TYPE_ARG_TYPES (fntype
) == 0
8373 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype
)))
8375 && ! current_function_varargs
)
8377 error ("`va_start' used in function with fixed args");
8383 tree last_parm
= tree_last (DECL_ARGUMENTS (current_function_decl
));
8384 tree arg
= TREE_VALUE (arglist
);
8386 /* Strip off all nops for the sake of the comparison. This
8387 is not quite the same as STRIP_NOPS. It does more.
8388 We must also strip off INDIRECT_EXPR for C++ reference
8390 while (TREE_CODE (arg
) == NOP_EXPR
8391 || TREE_CODE (arg
) == CONVERT_EXPR
8392 || TREE_CODE (arg
) == NON_LVALUE_EXPR
8393 || TREE_CODE (arg
) == INDIRECT_REF
)
8394 arg
= TREE_OPERAND (arg
, 0);
8395 if (arg
!= last_parm
)
8396 warning ("second parameter of `va_start' not last named argument");
8398 else if (! current_function_varargs
)
8399 /* Evidently an out of date version of <stdarg.h>; can't validate
8400 va_start's second argument, but can still work as intended. */
8401 warning ("`__builtin_next_arg' called without an argument");
8404 return expand_binop (Pmode
, add_optab
,
8405 current_function_internal_arg_pointer
,
8406 current_function_arg_offset_rtx
,
8407 NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
8409 case BUILT_IN_CLASSIFY_TYPE
:
8412 tree type
= TREE_TYPE (TREE_VALUE (arglist
));
8413 enum tree_code code
= TREE_CODE (type
);
8414 if (code
== VOID_TYPE
)
8415 return GEN_INT (void_type_class
);
8416 if (code
== INTEGER_TYPE
)
8417 return GEN_INT (integer_type_class
);
8418 if (code
== CHAR_TYPE
)
8419 return GEN_INT (char_type_class
);
8420 if (code
== ENUMERAL_TYPE
)
8421 return GEN_INT (enumeral_type_class
);
8422 if (code
== BOOLEAN_TYPE
)
8423 return GEN_INT (boolean_type_class
);
8424 if (code
== POINTER_TYPE
)
8425 return GEN_INT (pointer_type_class
);
8426 if (code
== REFERENCE_TYPE
)
8427 return GEN_INT (reference_type_class
);
8428 if (code
== OFFSET_TYPE
)
8429 return GEN_INT (offset_type_class
);
8430 if (code
== REAL_TYPE
)
8431 return GEN_INT (real_type_class
);
8432 if (code
== COMPLEX_TYPE
)
8433 return GEN_INT (complex_type_class
);
8434 if (code
== FUNCTION_TYPE
)
8435 return GEN_INT (function_type_class
);
8436 if (code
== METHOD_TYPE
)
8437 return GEN_INT (method_type_class
);
8438 if (code
== RECORD_TYPE
)
8439 return GEN_INT (record_type_class
);
8440 if (code
== UNION_TYPE
|| code
== QUAL_UNION_TYPE
)
8441 return GEN_INT (union_type_class
);
8442 if (code
== ARRAY_TYPE
)
8444 if (TYPE_STRING_FLAG (type
))
8445 return GEN_INT (string_type_class
);
8447 return GEN_INT (array_type_class
);
8449 if (code
== SET_TYPE
)
8450 return GEN_INT (set_type_class
);
8451 if (code
== FILE_TYPE
)
8452 return GEN_INT (file_type_class
);
8453 if (code
== LANG_TYPE
)
8454 return GEN_INT (lang_type_class
);
8456 return GEN_INT (no_type_class
);
8458 case BUILT_IN_CONSTANT_P
:
8463 tree arg
= TREE_VALUE (arglist
);
8466 return (TREE_CODE_CLASS (TREE_CODE (arg
)) == 'c'
8467 || (TREE_CODE (arg
) == ADDR_EXPR
8468 && TREE_CODE (TREE_OPERAND (arg
, 0)) == STRING_CST
)
8469 ? const1_rtx
: const0_rtx
);
8472 case BUILT_IN_FRAME_ADDRESS
:
8473 /* The argument must be a nonnegative integer constant.
8474 It counts the number of frames to scan up the stack.
8475 The value is the address of that frame. */
8476 case BUILT_IN_RETURN_ADDRESS
:
8477 /* The argument must be a nonnegative integer constant.
8478 It counts the number of frames to scan up the stack.
8479 The value is the return address saved in that frame. */
8481 /* Warning about missing arg was already issued. */
8483 else if (TREE_CODE (TREE_VALUE (arglist
)) != INTEGER_CST
)
8485 error ("invalid arg to `__builtin_return_address'");
8488 else if (tree_int_cst_sgn (TREE_VALUE (arglist
)) < 0)
8490 error ("invalid arg to `__builtin_return_address'");
8495 rtx tem
= expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl
),
8496 TREE_INT_CST_LOW (TREE_VALUE (arglist
)),
8497 hard_frame_pointer_rtx
);
8499 /* For __builtin_frame_address, return what we've got. */
8500 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
8503 if (GET_CODE (tem
) != REG
)
8504 tem
= copy_to_reg (tem
);
8508 case BUILT_IN_ALLOCA
:
8510 /* Arg could be non-integer if user redeclared this fcn wrong. */
8511 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != INTEGER_TYPE
)
8514 /* Compute the argument. */
8515 op0
= expand_expr (TREE_VALUE (arglist
), NULL_RTX
, VOIDmode
, 0);
8517 /* Allocate the desired space. */
8518 return allocate_dynamic_stack_space (op0
, target
, BITS_PER_UNIT
);
8521 /* If not optimizing, call the library function. */
8522 if (!optimize
&& ! CALLED_AS_BUILT_IN (fndecl
))
8526 /* Arg could be non-integer if user redeclared this fcn wrong. */
8527 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != INTEGER_TYPE
)
8530 /* Compute the argument. */
8531 op0
= expand_expr (TREE_VALUE (arglist
), subtarget
, VOIDmode
, 0);
8532 /* Compute ffs, into TARGET if possible.
8533 Set TARGET to wherever the result comes back. */
8534 target
= expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist
))),
8535 ffs_optab
, op0
, target
, 1);
8540 case BUILT_IN_STRLEN
:
8541 /* If not optimizing, call the library function. */
8542 if (!optimize
&& ! CALLED_AS_BUILT_IN (fndecl
))
8546 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8547 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
)
8551 tree src
= TREE_VALUE (arglist
);
8552 tree len
= c_strlen (src
);
8555 = get_pointer_alignment (src
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
8557 rtx result
, src_rtx
, char_rtx
;
8558 enum machine_mode insn_mode
= value_mode
, char_mode
;
8559 enum insn_code icode
;
8561 /* If the length is known, just return it. */
8563 return expand_expr (len
, target
, mode
, 0);
8565 /* If SRC is not a pointer type, don't do this operation inline. */
8569 /* Call a function if we can't compute strlen in the right mode. */
8571 while (insn_mode
!= VOIDmode
)
8573 icode
= strlen_optab
->handlers
[(int) insn_mode
].insn_code
;
8574 if (icode
!= CODE_FOR_nothing
)
8577 insn_mode
= GET_MODE_WIDER_MODE (insn_mode
);
8579 if (insn_mode
== VOIDmode
)
8582 /* Make a place to write the result of the instruction. */
8585 && GET_CODE (result
) == REG
8586 && GET_MODE (result
) == insn_mode
8587 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
8588 result
= gen_reg_rtx (insn_mode
);
8590 /* Make sure the operands are acceptable to the predicates. */
8592 if (! (*insn_operand_predicate
[(int)icode
][0]) (result
, insn_mode
))
8593 result
= gen_reg_rtx (insn_mode
);
8595 src_rtx
= memory_address (BLKmode
,
8596 expand_expr (src
, NULL_RTX
, ptr_mode
,
8598 if (! (*insn_operand_predicate
[(int)icode
][1]) (src_rtx
, Pmode
))
8599 src_rtx
= copy_to_mode_reg (Pmode
, src_rtx
);
8601 char_rtx
= const0_rtx
;
8602 char_mode
= insn_operand_mode
[(int)icode
][2];
8603 if (! (*insn_operand_predicate
[(int)icode
][2]) (char_rtx
, char_mode
))
8604 char_rtx
= copy_to_mode_reg (char_mode
, char_rtx
);
8606 emit_insn (GEN_FCN (icode
) (result
,
8607 gen_rtx (MEM
, BLKmode
, src_rtx
),
8608 char_rtx
, GEN_INT (align
)));
8610 /* Return the value in the proper mode for this function. */
8611 if (GET_MODE (result
) == value_mode
)
8613 else if (target
!= 0)
8615 convert_move (target
, result
, 0);
8619 return convert_to_mode (value_mode
, result
, 0);
8622 case BUILT_IN_STRCPY
:
8623 /* If not optimizing, call the library function. */
8624 if (!optimize
&& ! CALLED_AS_BUILT_IN (fndecl
))
8628 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8629 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
8630 || TREE_CHAIN (arglist
) == 0
8631 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist
)))) != POINTER_TYPE
)
8635 tree len
= c_strlen (TREE_VALUE (TREE_CHAIN (arglist
)));
8640 len
= size_binop (PLUS_EXPR
, len
, integer_one_node
);
8642 chainon (arglist
, build_tree_list (NULL_TREE
, len
));
8646 case BUILT_IN_MEMCPY
:
8647 /* If not optimizing, call the library function. */
8648 if (!optimize
&& ! CALLED_AS_BUILT_IN (fndecl
))
8652 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8653 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
8654 || TREE_CHAIN (arglist
) == 0
8655 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist
)))) != POINTER_TYPE
8656 || TREE_CHAIN (TREE_CHAIN (arglist
)) == 0
8657 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
))))) != INTEGER_TYPE
)
8661 tree dest
= TREE_VALUE (arglist
);
8662 tree src
= TREE_VALUE (TREE_CHAIN (arglist
));
8663 tree len
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
8667 = get_pointer_alignment (src
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
8669 = get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
8670 rtx dest_rtx
, dest_mem
, src_mem
;
8672 /* If either SRC or DEST is not a pointer type, don't do
8673 this operation in-line. */
8674 if (src_align
== 0 || dest_align
== 0)
8676 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRCPY
)
8677 TREE_CHAIN (TREE_CHAIN (arglist
)) = 0;
8681 dest_rtx
= expand_expr (dest
, NULL_RTX
, ptr_mode
, EXPAND_SUM
);
8682 dest_mem
= gen_rtx (MEM
, BLKmode
,
8683 memory_address (BLKmode
, dest_rtx
));
8684 /* There could be a void* cast on top of the object. */
8685 while (TREE_CODE (dest
) == NOP_EXPR
)
8686 dest
= TREE_OPERAND (dest
, 0);
8687 type
= TREE_TYPE (TREE_TYPE (dest
));
8688 MEM_IN_STRUCT_P (dest_mem
) = AGGREGATE_TYPE_P (type
);
8689 src_mem
= gen_rtx (MEM
, BLKmode
,
8690 memory_address (BLKmode
,
8691 expand_expr (src
, NULL_RTX
,
8694 /* There could be a void* cast on top of the object. */
8695 while (TREE_CODE (src
) == NOP_EXPR
)
8696 src
= TREE_OPERAND (src
, 0);
8697 type
= TREE_TYPE (TREE_TYPE (src
));
8698 MEM_IN_STRUCT_P (src_mem
) = AGGREGATE_TYPE_P (type
);
8700 /* Copy word part most expediently. */
8701 emit_block_move (dest_mem
, src_mem
,
8702 expand_expr (len
, NULL_RTX
, VOIDmode
, 0),
8703 MIN (src_align
, dest_align
));
8704 return force_operand (dest_rtx
, NULL_RTX
);
8707 case BUILT_IN_MEMSET
:
8708 /* If not optimizing, call the library function. */
8709 if (!optimize
&& ! CALLED_AS_BUILT_IN (fndecl
))
8713 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8714 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
8715 || TREE_CHAIN (arglist
) == 0
8716 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist
))))
8718 || TREE_CHAIN (TREE_CHAIN (arglist
)) == 0
8720 != (TREE_CODE (TREE_TYPE
8722 (TREE_CHAIN (TREE_CHAIN (arglist
))))))))
8726 tree dest
= TREE_VALUE (arglist
);
8727 tree val
= TREE_VALUE (TREE_CHAIN (arglist
));
8728 tree len
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
8732 = get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
8733 rtx dest_rtx
, dest_mem
;
8735 /* If DEST is not a pointer type, don't do this
8736 operation in-line. */
8737 if (dest_align
== 0)
8740 /* If VAL is not 0, don't do this operation in-line. */
8741 if (expand_expr (val
, NULL_RTX
, VOIDmode
, 0) != const0_rtx
)
8744 dest_rtx
= expand_expr (dest
, NULL_RTX
, ptr_mode
, EXPAND_SUM
);
8745 dest_mem
= gen_rtx (MEM
, BLKmode
,
8746 memory_address (BLKmode
, dest_rtx
));
8747 /* There could be a void* cast on top of the object. */
8748 while (TREE_CODE (dest
) == NOP_EXPR
)
8749 dest
= TREE_OPERAND (dest
, 0);
8750 type
= TREE_TYPE (TREE_TYPE (dest
));
8751 MEM_IN_STRUCT_P (dest_mem
) = AGGREGATE_TYPE_P (type
);
8753 clear_storage (dest_mem
, expand_expr (len
, NULL_RTX
, VOIDmode
, 0),
8756 return force_operand (dest_rtx
, NULL_RTX
);
8759 /* These comparison functions need an instruction that returns an actual
8760 index. An ordinary compare that just sets the condition codes
8762 #ifdef HAVE_cmpstrsi
8763 case BUILT_IN_STRCMP
:
8764 /* If not optimizing, call the library function. */
8765 if (!optimize
&& ! CALLED_AS_BUILT_IN (fndecl
))
8769 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8770 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
8771 || TREE_CHAIN (arglist
) == 0
8772 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist
)))) != POINTER_TYPE
)
8774 else if (!HAVE_cmpstrsi
)
8777 tree arg1
= TREE_VALUE (arglist
);
8778 tree arg2
= TREE_VALUE (TREE_CHAIN (arglist
));
8782 len
= c_strlen (arg1
);
8784 len
= size_binop (PLUS_EXPR
, integer_one_node
, len
);
8785 len2
= c_strlen (arg2
);
8787 len2
= size_binop (PLUS_EXPR
, integer_one_node
, len2
);
8789 /* If we don't have a constant length for the first, use the length
8790 of the second, if we know it. We don't require a constant for
8791 this case; some cost analysis could be done if both are available
8792 but neither is constant. For now, assume they're equally cheap.
8794 If both strings have constant lengths, use the smaller. This
8795 could arise if optimization results in strcpy being called with
8796 two fixed strings, or if the code was machine-generated. We should
8797 add some code to the `memcmp' handler below to deal with such
8798 situations, someday. */
8799 if (!len
|| TREE_CODE (len
) != INTEGER_CST
)
8806 else if (len2
&& TREE_CODE (len2
) == INTEGER_CST
)
8808 if (tree_int_cst_lt (len2
, len
))
8812 chainon (arglist
, build_tree_list (NULL_TREE
, len
));
8816 case BUILT_IN_MEMCMP
:
8817 /* If not optimizing, call the library function. */
8818 if (!optimize
&& ! CALLED_AS_BUILT_IN (fndecl
))
8822 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8823 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
8824 || TREE_CHAIN (arglist
) == 0
8825 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist
)))) != POINTER_TYPE
8826 || TREE_CHAIN (TREE_CHAIN (arglist
)) == 0
8827 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
))))) != INTEGER_TYPE
)
8829 else if (!HAVE_cmpstrsi
)
8832 tree arg1
= TREE_VALUE (arglist
);
8833 tree arg2
= TREE_VALUE (TREE_CHAIN (arglist
));
8834 tree len
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
8838 = get_pointer_alignment (arg1
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
8840 = get_pointer_alignment (arg2
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
8841 enum machine_mode insn_mode
8842 = insn_operand_mode
[(int) CODE_FOR_cmpstrsi
][0];
8844 /* If we don't have POINTER_TYPE, call the function. */
8845 if (arg1_align
== 0 || arg2_align
== 0)
8847 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRCMP
)
8848 TREE_CHAIN (TREE_CHAIN (arglist
)) = 0;
8852 /* Make a place to write the result of the instruction. */
8855 && GET_CODE (result
) == REG
&& GET_MODE (result
) == insn_mode
8856 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
8857 result
= gen_reg_rtx (insn_mode
);
8859 emit_insn (gen_cmpstrsi (result
,
8860 gen_rtx (MEM
, BLKmode
,
8861 expand_expr (arg1
, NULL_RTX
,
8864 gen_rtx (MEM
, BLKmode
,
8865 expand_expr (arg2
, NULL_RTX
,
8868 expand_expr (len
, NULL_RTX
, VOIDmode
, 0),
8869 GEN_INT (MIN (arg1_align
, arg2_align
))));
8871 /* Return the value in the proper mode for this function. */
8872 mode
= TYPE_MODE (TREE_TYPE (exp
));
8873 if (GET_MODE (result
) == mode
)
8875 else if (target
!= 0)
8877 convert_move (target
, result
, 0);
8881 return convert_to_mode (mode
, result
, 0);
8884 case BUILT_IN_STRCMP
:
8885 case BUILT_IN_MEMCMP
:
8889 case BUILT_IN_SETJMP
:
8891 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
)
8895 rtx buf_addr
= expand_expr (TREE_VALUE (arglist
), subtarget
,
8897 return expand_builtin_setjmp (buf_addr
, target
);
8900 /* __builtin_longjmp is passed a pointer to an array of five words
8901 and a value, which is a dummy. It's similar to the C library longjmp
8902 function but works with __builtin_setjmp above. */
8903 case BUILT_IN_LONGJMP
:
8904 if (arglist
== 0 || TREE_CHAIN (arglist
) == 0
8905 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != POINTER_TYPE
)
8909 tree dummy_id
= get_identifier ("__dummy");
8910 tree dummy_type
= build_function_type (void_type_node
, NULL_TREE
);
8911 tree dummy_decl
= build_decl (FUNCTION_DECL
, dummy_id
, dummy_type
);
8912 #ifdef POINTERS_EXTEND_UNSIGNED
8915 convert_memory_address
8917 expand_expr (TREE_VALUE (arglist
),
8918 NULL_RTX
, VOIDmode
, 0)));
8921 = force_reg (Pmode
, expand_expr (TREE_VALUE (arglist
),
8925 rtx fp
= gen_rtx (MEM
, Pmode
, buf_addr
);
8926 rtx lab
= gen_rtx (MEM
, Pmode
,
8927 plus_constant (buf_addr
, GET_MODE_SIZE (Pmode
)));
8928 enum machine_mode sa_mode
8929 #ifdef HAVE_save_stack_nonlocal
8930 = (HAVE_save_stack_nonlocal
8931 ? insn_operand_mode
[(int) CODE_FOR_save_stack_nonlocal
][0]
8936 rtx stack
= gen_rtx (MEM
, sa_mode
,
8937 plus_constant (buf_addr
,
8938 2 * GET_MODE_SIZE (Pmode
)));
8940 DECL_EXTERNAL (dummy_decl
) = 1;
8941 TREE_PUBLIC (dummy_decl
) = 1;
8942 make_decl_rtl (dummy_decl
, NULL_PTR
, 1);
8944 /* Expand the second expression just for side-effects. */
8945 expand_expr (TREE_VALUE (TREE_CHAIN (arglist
)),
8946 const0_rtx
, VOIDmode
, 0);
8948 assemble_external (dummy_decl
);
8950 /* Pick up FP, label, and SP from the block and jump. This code is
8951 from expand_goto in stmt.c; see there for detailed comments. */
8952 #if HAVE_nonlocal_goto
8953 if (HAVE_nonlocal_goto
)
8954 emit_insn (gen_nonlocal_goto (fp
, lab
, stack
,
8955 XEXP (DECL_RTL (dummy_decl
), 0)));
8959 lab
= copy_to_reg (lab
);
8960 emit_move_insn (hard_frame_pointer_rtx
, fp
);
8961 emit_stack_restore (SAVE_NONLOCAL
, stack
, NULL_RTX
);
8963 /* Put in the static chain register the address of the dummy
8965 emit_move_insn (static_chain_rtx
, XEXP (DECL_RTL (dummy_decl
), 0));
8966 emit_insn (gen_rtx (USE
, VOIDmode
, hard_frame_pointer_rtx
));
8967 emit_insn (gen_rtx (USE
, VOIDmode
, stack_pointer_rtx
));
8968 emit_insn (gen_rtx (USE
, VOIDmode
, static_chain_rtx
));
8969 emit_indirect_jump (lab
);
8975 default: /* just do library call, if unknown builtin */
8976 error ("built-in function `%s' not currently supported",
8977 IDENTIFIER_POINTER (DECL_NAME (fndecl
)));
8980 /* The switch statement above can drop through to cause the function
8981 to be called normally. */
8983 return expand_call (exp
, target
, ignore
);
8986 /* Built-in functions to perform an untyped call and return. */
8988 /* For each register that may be used for calling a function, this
8989 gives a mode used to copy the register's value. VOIDmode indicates
8990 the register is not used for calling a function. If the machine
8991 has register windows, this gives only the outbound registers.
8992 INCOMING_REGNO gives the corresponding inbound register. */
8993 static enum machine_mode apply_args_mode
[FIRST_PSEUDO_REGISTER
];
8995 /* For each register that may be used for returning values, this gives
8996 a mode used to copy the register's value. VOIDmode indicates the
8997 register is not used for returning values. If the machine has
8998 register windows, this gives only the outbound registers.
8999 INCOMING_REGNO gives the corresponding inbound register. */
9000 static enum machine_mode apply_result_mode
[FIRST_PSEUDO_REGISTER
];
9002 /* For each register that may be used for calling a function, this
9003 gives the offset of that register into the block returned by
9004 __builtin_apply_args. 0 indicates that the register is not
9005 used for calling a function. */
9006 static int apply_args_reg_offset
[FIRST_PSEUDO_REGISTER
];
9008 /* Return the offset of register REGNO into the block returned by
9009 __builtin_apply_args. This is not declared static, since it is
9010 needed in objc-act.c. */
9013 apply_args_register_offset (regno
)
9018 /* Arguments are always put in outgoing registers (in the argument
9019 block) if such make sense. */
9020 #ifdef OUTGOING_REGNO
9021 regno
= OUTGOING_REGNO(regno
);
9023 return apply_args_reg_offset
[regno
];
9026 /* Return the size required for the block returned by __builtin_apply_args,
9027 and initialize apply_args_mode. */
9032 static int size
= -1;
9034 enum machine_mode mode
;
9036 /* The values computed by this function never change. */
9039 /* The first value is the incoming arg-pointer. */
9040 size
= GET_MODE_SIZE (Pmode
);
9042 /* The second value is the structure value address unless this is
9043 passed as an "invisible" first argument. */
9044 if (struct_value_rtx
)
9045 size
+= GET_MODE_SIZE (Pmode
);
9047 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
9048 if (FUNCTION_ARG_REGNO_P (regno
))
9050 /* Search for the proper mode for copying this register's
9051 value. I'm not sure this is right, but it works so far. */
9052 enum machine_mode best_mode
= VOIDmode
;
9054 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
9056 mode
= GET_MODE_WIDER_MODE (mode
))
9057 if (HARD_REGNO_MODE_OK (regno
, mode
)
9058 && HARD_REGNO_NREGS (regno
, mode
) == 1)
9061 if (best_mode
== VOIDmode
)
9062 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
);
9064 mode
= GET_MODE_WIDER_MODE (mode
))
9065 if (HARD_REGNO_MODE_OK (regno
, mode
)
9066 && (mov_optab
->handlers
[(int) mode
].insn_code
9067 != CODE_FOR_nothing
))
9071 if (mode
== VOIDmode
)
9074 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
9075 if (size
% align
!= 0)
9076 size
= CEIL (size
, align
) * align
;
9077 apply_args_reg_offset
[regno
] = size
;
9078 size
+= GET_MODE_SIZE (mode
);
9079 apply_args_mode
[regno
] = mode
;
9083 apply_args_mode
[regno
] = VOIDmode
;
9084 apply_args_reg_offset
[regno
] = 0;
9090 /* Return the size required for the block returned by __builtin_apply,
9091 and initialize apply_result_mode. */
9094 apply_result_size ()
9096 static int size
= -1;
9098 enum machine_mode mode
;
9100 /* The values computed by this function never change. */
9105 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
9106 if (FUNCTION_VALUE_REGNO_P (regno
))
9108 /* Search for the proper mode for copying this register's
9109 value. I'm not sure this is right, but it works so far. */
9110 enum machine_mode best_mode
= VOIDmode
;
9112 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
9114 mode
= GET_MODE_WIDER_MODE (mode
))
9115 if (HARD_REGNO_MODE_OK (regno
, mode
))
9118 if (best_mode
== VOIDmode
)
9119 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
);
9121 mode
= GET_MODE_WIDER_MODE (mode
))
9122 if (HARD_REGNO_MODE_OK (regno
, mode
)
9123 && (mov_optab
->handlers
[(int) mode
].insn_code
9124 != CODE_FOR_nothing
))
9128 if (mode
== VOIDmode
)
9131 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
9132 if (size
% align
!= 0)
9133 size
= CEIL (size
, align
) * align
;
9134 size
+= GET_MODE_SIZE (mode
);
9135 apply_result_mode
[regno
] = mode
;
9138 apply_result_mode
[regno
] = VOIDmode
;
9140 /* Allow targets that use untyped_call and untyped_return to override
9141 the size so that machine-specific information can be stored here. */
9142 #ifdef APPLY_RESULT_SIZE
9143 size
= APPLY_RESULT_SIZE
;
9149 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
9150 /* Create a vector describing the result block RESULT. If SAVEP is true,
9151 the result block is used to save the values; otherwise it is used to
9152 restore the values. */
9155 result_vector (savep
, result
)
9159 int regno
, size
, align
, nelts
;
9160 enum machine_mode mode
;
9162 rtx
*savevec
= (rtx
*) alloca (FIRST_PSEUDO_REGISTER
* sizeof (rtx
));
9165 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
9166 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
9168 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
9169 if (size
% align
!= 0)
9170 size
= CEIL (size
, align
) * align
;
9171 reg
= gen_rtx (REG
, mode
, savep
? regno
: INCOMING_REGNO (regno
));
9172 mem
= change_address (result
, mode
,
9173 plus_constant (XEXP (result
, 0), size
));
9174 savevec
[nelts
++] = (savep
9175 ? gen_rtx (SET
, VOIDmode
, mem
, reg
)
9176 : gen_rtx (SET
, VOIDmode
, reg
, mem
));
9177 size
+= GET_MODE_SIZE (mode
);
9179 return gen_rtx (PARALLEL
, VOIDmode
, gen_rtvec_v (nelts
, savevec
));
9181 #endif /* HAVE_untyped_call or HAVE_untyped_return */
9183 /* Save the state required to perform an untyped call with the same
9184 arguments as were passed to the current function. */
9187 expand_builtin_apply_args ()
9190 int size
, align
, regno
;
9191 enum machine_mode mode
;
9193 /* Create a block where the arg-pointer, structure value address,
9194 and argument registers can be saved. */
9195 registers
= assign_stack_local (BLKmode
, apply_args_size (), -1);
9197 /* Walk past the arg-pointer and structure value address. */
9198 size
= GET_MODE_SIZE (Pmode
);
9199 if (struct_value_rtx
)
9200 size
+= GET_MODE_SIZE (Pmode
);
9202 /* Save each register used in calling a function to the block. */
9203 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
9204 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
9208 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
9209 if (size
% align
!= 0)
9210 size
= CEIL (size
, align
) * align
;
9212 tem
= gen_rtx (REG
, mode
, INCOMING_REGNO (regno
));
9215 /* For reg-stack.c's stack register household.
9216 Compare with a similar piece of code in function.c. */
9218 emit_insn (gen_rtx (USE
, mode
, tem
));
9221 emit_move_insn (change_address (registers
, mode
,
9222 plus_constant (XEXP (registers
, 0),
9225 size
+= GET_MODE_SIZE (mode
);
9228 /* Save the arg pointer to the block. */
9229 emit_move_insn (change_address (registers
, Pmode
, XEXP (registers
, 0)),
9230 copy_to_reg (virtual_incoming_args_rtx
));
9231 size
= GET_MODE_SIZE (Pmode
);
9233 /* Save the structure value address unless this is passed as an
9234 "invisible" first argument. */
9235 if (struct_value_incoming_rtx
)
9237 emit_move_insn (change_address (registers
, Pmode
,
9238 plus_constant (XEXP (registers
, 0),
9240 copy_to_reg (struct_value_incoming_rtx
));
9241 size
+= GET_MODE_SIZE (Pmode
);
9244 /* Return the address of the block. */
9245 return copy_addr_to_reg (XEXP (registers
, 0));
9248 /* Perform an untyped call and save the state required to perform an
9249 untyped return of whatever value was returned by the given function. */
9252 expand_builtin_apply (function
, arguments
, argsize
)
9253 rtx function
, arguments
, argsize
;
9255 int size
, align
, regno
;
9256 enum machine_mode mode
;
9257 rtx incoming_args
, result
, reg
, dest
, call_insn
;
9258 rtx old_stack_level
= 0;
9259 rtx call_fusage
= 0;
9261 /* Create a block where the return registers can be saved. */
9262 result
= assign_stack_local (BLKmode
, apply_result_size (), -1);
9264 /* ??? The argsize value should be adjusted here. */
9266 /* Fetch the arg pointer from the ARGUMENTS block. */
9267 incoming_args
= gen_reg_rtx (Pmode
);
9268 emit_move_insn (incoming_args
,
9269 gen_rtx (MEM
, Pmode
, arguments
));
9270 #ifndef STACK_GROWS_DOWNWARD
9271 incoming_args
= expand_binop (Pmode
, sub_optab
, incoming_args
, argsize
,
9272 incoming_args
, 0, OPTAB_LIB_WIDEN
);
9275 /* Perform postincrements before actually calling the function. */
9278 /* Push a new argument block and copy the arguments. */
9279 do_pending_stack_adjust ();
9280 emit_stack_save (SAVE_BLOCK
, &old_stack_level
, NULL_RTX
);
9282 /* Push a block of memory onto the stack to store the memory arguments.
9283 Save the address in a register, and copy the memory arguments. ??? I
9284 haven't figured out how the calling convention macros effect this,
9285 but it's likely that the source and/or destination addresses in
9286 the block copy will need updating in machine specific ways. */
9287 dest
= copy_addr_to_reg (push_block (argsize
, 0, 0));
9288 emit_block_move (gen_rtx (MEM
, BLKmode
, dest
),
9289 gen_rtx (MEM
, BLKmode
, incoming_args
),
9291 PARM_BOUNDARY
/ BITS_PER_UNIT
);
9293 /* Refer to the argument block. */
9295 arguments
= gen_rtx (MEM
, BLKmode
, arguments
);
9297 /* Walk past the arg-pointer and structure value address. */
9298 size
= GET_MODE_SIZE (Pmode
);
9299 if (struct_value_rtx
)
9300 size
+= GET_MODE_SIZE (Pmode
);
9302 /* Restore each of the registers previously saved. Make USE insns
9303 for each of these registers for use in making the call. */
9304 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
9305 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
9307 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
9308 if (size
% align
!= 0)
9309 size
= CEIL (size
, align
) * align
;
9310 reg
= gen_rtx (REG
, mode
, regno
);
9311 emit_move_insn (reg
,
9312 change_address (arguments
, mode
,
9313 plus_constant (XEXP (arguments
, 0),
9316 use_reg (&call_fusage
, reg
);
9317 size
+= GET_MODE_SIZE (mode
);
9320 /* Restore the structure value address unless this is passed as an
9321 "invisible" first argument. */
9322 size
= GET_MODE_SIZE (Pmode
);
9323 if (struct_value_rtx
)
9325 rtx value
= gen_reg_rtx (Pmode
);
9326 emit_move_insn (value
,
9327 change_address (arguments
, Pmode
,
9328 plus_constant (XEXP (arguments
, 0),
9330 emit_move_insn (struct_value_rtx
, value
);
9331 if (GET_CODE (struct_value_rtx
) == REG
)
9332 use_reg (&call_fusage
, struct_value_rtx
);
9333 size
+= GET_MODE_SIZE (Pmode
);
9336 /* All arguments and registers used for the call are set up by now! */
9337 function
= prepare_call_address (function
, NULL_TREE
, &call_fusage
, 0);
9339 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
9340 and we don't want to load it into a register as an optimization,
9341 because prepare_call_address already did it if it should be done. */
9342 if (GET_CODE (function
) != SYMBOL_REF
)
9343 function
= memory_address (FUNCTION_MODE
, function
);
9345 /* Generate the actual call instruction and save the return value. */
9346 #ifdef HAVE_untyped_call
9347 if (HAVE_untyped_call
)
9348 emit_call_insn (gen_untyped_call (gen_rtx (MEM
, FUNCTION_MODE
, function
),
9349 result
, result_vector (1, result
)));
9352 #ifdef HAVE_call_value
9353 if (HAVE_call_value
)
9357 /* Locate the unique return register. It is not possible to
9358 express a call that sets more than one return register using
9359 call_value; use untyped_call for that. In fact, untyped_call
9360 only needs to save the return registers in the given block. */
9361 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
9362 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
9365 abort (); /* HAVE_untyped_call required. */
9366 valreg
= gen_rtx (REG
, mode
, regno
);
9369 emit_call_insn (gen_call_value (valreg
,
9370 gen_rtx (MEM
, FUNCTION_MODE
, function
),
9371 const0_rtx
, NULL_RTX
, const0_rtx
));
9373 emit_move_insn (change_address (result
, GET_MODE (valreg
),
9381 /* Find the CALL insn we just emitted. */
9382 for (call_insn
= get_last_insn ();
9383 call_insn
&& GET_CODE (call_insn
) != CALL_INSN
;
9384 call_insn
= PREV_INSN (call_insn
))
9390 /* Put the register usage information on the CALL. If there is already
9391 some usage information, put ours at the end. */
9392 if (CALL_INSN_FUNCTION_USAGE (call_insn
))
9396 for (link
= CALL_INSN_FUNCTION_USAGE (call_insn
); XEXP (link
, 1) != 0;
9397 link
= XEXP (link
, 1))
9400 XEXP (link
, 1) = call_fusage
;
9403 CALL_INSN_FUNCTION_USAGE (call_insn
) = call_fusage
;
9405 /* Restore the stack. */
9406 emit_stack_restore (SAVE_BLOCK
, old_stack_level
, NULL_RTX
);
9408 /* Return the address of the result block. */
9409 return copy_addr_to_reg (XEXP (result
, 0));
9412 /* Perform an untyped return. */
9415 expand_builtin_return (result
)
9418 int size
, align
, regno
;
9419 enum machine_mode mode
;
9421 rtx call_fusage
= 0;
9423 apply_result_size ();
9424 result
= gen_rtx (MEM
, BLKmode
, result
);
9426 #ifdef HAVE_untyped_return
9427 if (HAVE_untyped_return
)
9429 emit_jump_insn (gen_untyped_return (result
, result_vector (0, result
)));
9435 /* Restore the return value and note that each value is used. */
9437 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
9438 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
9440 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
9441 if (size
% align
!= 0)
9442 size
= CEIL (size
, align
) * align
;
9443 reg
= gen_rtx (REG
, mode
, INCOMING_REGNO (regno
));
9444 emit_move_insn (reg
,
9445 change_address (result
, mode
,
9446 plus_constant (XEXP (result
, 0),
9449 push_to_sequence (call_fusage
);
9450 emit_insn (gen_rtx (USE
, VOIDmode
, reg
));
9451 call_fusage
= get_insns ();
9453 size
+= GET_MODE_SIZE (mode
);
9456 /* Put the USE insns before the return. */
9457 emit_insns (call_fusage
);
9459 /* Return whatever values was restored by jumping directly to the end
9461 expand_null_return ();
9464 /* Expand code for a post- or pre- increment or decrement
9465 and return the RTX for the result.
9466 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9469 expand_increment (exp
, post
, ignore
)
9473 register rtx op0
, op1
;
9474 register rtx temp
, value
;
9475 register tree incremented
= TREE_OPERAND (exp
, 0);
9476 optab this_optab
= add_optab
;
9478 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
9479 int op0_is_copy
= 0;
9480 int single_insn
= 0;
9481 /* 1 means we can't store into OP0 directly,
9482 because it is a subreg narrower than a word,
9483 and we don't dare clobber the rest of the word. */
9486 if (output_bytecode
)
9488 bc_expand_expr (exp
);
9492 /* Stabilize any component ref that might need to be
9493 evaluated more than once below. */
9495 || TREE_CODE (incremented
) == BIT_FIELD_REF
9496 || (TREE_CODE (incremented
) == COMPONENT_REF
9497 && (TREE_CODE (TREE_OPERAND (incremented
, 0)) != INDIRECT_REF
9498 || DECL_BIT_FIELD (TREE_OPERAND (incremented
, 1)))))
9499 incremented
= stabilize_reference (incremented
);
9500 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9501 ones into save exprs so that they don't accidentally get evaluated
9502 more than once by the code below. */
9503 if (TREE_CODE (incremented
) == PREINCREMENT_EXPR
9504 || TREE_CODE (incremented
) == PREDECREMENT_EXPR
)
9505 incremented
= save_expr (incremented
);
9507 /* Compute the operands as RTX.
9508 Note whether OP0 is the actual lvalue or a copy of it:
9509 I believe it is a copy iff it is a register or subreg
9510 and insns were generated in computing it. */
9512 temp
= get_last_insn ();
9513 op0
= expand_expr (incremented
, NULL_RTX
, VOIDmode
, 0);
9515 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9516 in place but instead must do sign- or zero-extension during assignment,
9517 so we copy it into a new register and let the code below use it as
9520 Note that we can safely modify this SUBREG since it is know not to be
9521 shared (it was made by the expand_expr call above). */
9523 if (GET_CODE (op0
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (op0
))
9526 SUBREG_REG (op0
) = copy_to_reg (SUBREG_REG (op0
));
9530 else if (GET_CODE (op0
) == SUBREG
9531 && GET_MODE_BITSIZE (GET_MODE (op0
)) < BITS_PER_WORD
)
9533 /* We cannot increment this SUBREG in place. If we are
9534 post-incrementing, get a copy of the old value. Otherwise,
9535 just mark that we cannot increment in place. */
9537 op0
= copy_to_reg (op0
);
9542 op0_is_copy
= ((GET_CODE (op0
) == SUBREG
|| GET_CODE (op0
) == REG
)
9543 && temp
!= get_last_insn ());
9544 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
9546 /* Decide whether incrementing or decrementing. */
9547 if (TREE_CODE (exp
) == POSTDECREMENT_EXPR
9548 || TREE_CODE (exp
) == PREDECREMENT_EXPR
)
9549 this_optab
= sub_optab
;
9551 /* Convert decrement by a constant into a negative increment. */
9552 if (this_optab
== sub_optab
9553 && GET_CODE (op1
) == CONST_INT
)
9555 op1
= GEN_INT (- INTVAL (op1
));
9556 this_optab
= add_optab
;
9559 /* For a preincrement, see if we can do this with a single instruction. */
9562 icode
= (int) this_optab
->handlers
[(int) mode
].insn_code
;
9563 if (icode
!= (int) CODE_FOR_nothing
9564 /* Make sure that OP0 is valid for operands 0 and 1
9565 of the insn we want to queue. */
9566 && (*insn_operand_predicate
[icode
][0]) (op0
, mode
)
9567 && (*insn_operand_predicate
[icode
][1]) (op0
, mode
)
9568 && (*insn_operand_predicate
[icode
][2]) (op1
, mode
))
9572 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9573 then we cannot just increment OP0. We must therefore contrive to
9574 increment the original value. Then, for postincrement, we can return
9575 OP0 since it is a copy of the old value. For preincrement, expand here
9576 unless we can do it with a single insn.
9578 Likewise if storing directly into OP0 would clobber high bits
9579 we need to preserve (bad_subreg). */
9580 if (op0_is_copy
|| (!post
&& !single_insn
) || bad_subreg
)
9582 /* This is the easiest way to increment the value wherever it is.
9583 Problems with multiple evaluation of INCREMENTED are prevented
9584 because either (1) it is a component_ref or preincrement,
9585 in which case it was stabilized above, or (2) it is an array_ref
9586 with constant index in an array in a register, which is
9587 safe to reevaluate. */
9588 tree newexp
= build (((TREE_CODE (exp
) == POSTDECREMENT_EXPR
9589 || TREE_CODE (exp
) == PREDECREMENT_EXPR
)
9590 ? MINUS_EXPR
: PLUS_EXPR
),
9593 TREE_OPERAND (exp
, 1));
9595 while (TREE_CODE (incremented
) == NOP_EXPR
9596 || TREE_CODE (incremented
) == CONVERT_EXPR
)
9598 newexp
= convert (TREE_TYPE (incremented
), newexp
);
9599 incremented
= TREE_OPERAND (incremented
, 0);
9602 temp
= expand_assignment (incremented
, newexp
, ! post
&& ! ignore
, 0);
9603 return post
? op0
: temp
;
9608 /* We have a true reference to the value in OP0.
9609 If there is an insn to add or subtract in this mode, queue it.
9610 Queueing the increment insn avoids the register shuffling
9611 that often results if we must increment now and first save
9612 the old value for subsequent use. */
9614 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9615 op0
= stabilize (op0
);
9618 icode
= (int) this_optab
->handlers
[(int) mode
].insn_code
;
9619 if (icode
!= (int) CODE_FOR_nothing
9620 /* Make sure that OP0 is valid for operands 0 and 1
9621 of the insn we want to queue. */
9622 && (*insn_operand_predicate
[icode
][0]) (op0
, mode
)
9623 && (*insn_operand_predicate
[icode
][1]) (op0
, mode
))
9625 if (! (*insn_operand_predicate
[icode
][2]) (op1
, mode
))
9626 op1
= force_reg (mode
, op1
);
9628 return enqueue_insn (op0
, GEN_FCN (icode
) (op0
, op0
, op1
));
9630 if (icode
!= (int) CODE_FOR_nothing
&& GET_CODE (op0
) == MEM
)
9632 rtx addr
= force_reg (Pmode
, XEXP (op0
, 0));
9635 op0
= change_address (op0
, VOIDmode
, addr
);
9636 temp
= force_reg (GET_MODE (op0
), op0
);
9637 if (! (*insn_operand_predicate
[icode
][2]) (op1
, mode
))
9638 op1
= force_reg (mode
, op1
);
9640 /* The increment queue is LIFO, thus we have to `queue'
9641 the instructions in reverse order. */
9642 enqueue_insn (op0
, gen_move_insn (op0
, temp
));
9643 result
= enqueue_insn (temp
, GEN_FCN (icode
) (temp
, temp
, op1
));
9648 /* Preincrement, or we can't increment with one simple insn. */
9650 /* Save a copy of the value before inc or dec, to return it later. */
9651 temp
= value
= copy_to_reg (op0
);
9653 /* Arrange to return the incremented value. */
9654 /* Copy the rtx because expand_binop will protect from the queue,
9655 and the results of that would be invalid for us to return
9656 if our caller does emit_queue before using our result. */
9657 temp
= copy_rtx (value
= op0
);
9659 /* Increment however we can. */
9660 op1
= expand_binop (mode
, this_optab
, value
, op1
, op0
,
9661 TREE_UNSIGNED (TREE_TYPE (exp
)), OPTAB_LIB_WIDEN
);
9662 /* Make sure the value is stored into OP0. */
9664 emit_move_insn (op0
, op1
);
9669 /* Expand all function calls contained within EXP, innermost ones first.
9670 But don't look within expressions that have sequence points.
9671 For each CALL_EXPR, record the rtx for its value
9672 in the CALL_EXPR_RTL field. */
9675 preexpand_calls (exp
)
9678 register int nops
, i
;
9679 int type
= TREE_CODE_CLASS (TREE_CODE (exp
));
9681 if (! do_preexpand_calls
)
9684 /* Only expressions and references can contain calls. */
9686 if (type
!= 'e' && type
!= '<' && type
!= '1' && type
!= '2' && type
!= 'r')
9689 switch (TREE_CODE (exp
))
9692 /* Do nothing if already expanded. */
9693 if (CALL_EXPR_RTL (exp
) != 0
9694 /* Do nothing if the call returns a variable-sized object. */
9695 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp
))) != INTEGER_CST
9696 /* Do nothing to built-in functions. */
9697 || (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
9698 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
9700 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))))
9703 CALL_EXPR_RTL (exp
) = expand_call (exp
, NULL_RTX
, 0);
9708 case TRUTH_ANDIF_EXPR
:
9709 case TRUTH_ORIF_EXPR
:
9710 /* If we find one of these, then we can be sure
9711 the adjust will be done for it (since it makes jumps).
9712 Do it now, so that if this is inside an argument
9713 of a function, we don't get the stack adjustment
9714 after some other args have already been pushed. */
9715 do_pending_stack_adjust ();
9720 case WITH_CLEANUP_EXPR
:
9721 case CLEANUP_POINT_EXPR
:
9725 if (SAVE_EXPR_RTL (exp
) != 0)
9729 nops
= tree_code_length
[(int) TREE_CODE (exp
)];
9730 for (i
= 0; i
< nops
; i
++)
9731 if (TREE_OPERAND (exp
, i
) != 0)
9733 type
= TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp
, i
)));
9734 if (type
== 'e' || type
== '<' || type
== '1' || type
== '2'
9736 preexpand_calls (TREE_OPERAND (exp
, i
));
9740 /* At the start of a function, record that we have no previously-pushed
9741 arguments waiting to be popped. */
9744 init_pending_stack_adjust ()
9746 pending_stack_adjust
= 0;
9749 /* When exiting from function, if safe, clear out any pending stack adjust
9750 so the adjustment won't get done. */
9753 clear_pending_stack_adjust ()
9755 #ifdef EXIT_IGNORE_STACK
9757 && ! flag_omit_frame_pointer
&& EXIT_IGNORE_STACK
9758 && ! (DECL_INLINE (current_function_decl
) && ! flag_no_inline
)
9759 && ! flag_inline_functions
)
9760 pending_stack_adjust
= 0;
9764 /* Pop any previously-pushed arguments that have not been popped yet. */
9767 do_pending_stack_adjust ()
9769 if (inhibit_defer_pop
== 0)
9771 if (pending_stack_adjust
!= 0)
9772 adjust_stack (GEN_INT (pending_stack_adjust
));
9773 pending_stack_adjust
= 0;
9777 /* Expand conditional expressions. */
9779 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9780 LABEL is an rtx of code CODE_LABEL, in this function and all the
9784 jumpifnot (exp
, label
)
9788 do_jump (exp
, label
, NULL_RTX
);
9791 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9798 do_jump (exp
, NULL_RTX
, label
);
9801 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9802 the result is zero, or IF_TRUE_LABEL if the result is one.
9803 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9804 meaning fall through in that case.
9806 do_jump always does any pending stack adjust except when it does not
9807 actually perform a jump. An example where there is no jump
9808 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9810 This function is responsible for optimizing cases such as
9811 &&, || and comparison operators in EXP. */
9814 do_jump (exp
, if_false_label
, if_true_label
)
9816 rtx if_false_label
, if_true_label
;
9818 register enum tree_code code
= TREE_CODE (exp
);
9819 /* Some cases need to create a label to jump to
9820 in order to properly fall through.
9821 These cases set DROP_THROUGH_LABEL nonzero. */
9822 rtx drop_through_label
= 0;
9827 enum machine_mode mode
;
9837 temp
= integer_zerop (exp
) ? if_false_label
: if_true_label
;
9843 /* This is not true with #pragma weak */
9845 /* The address of something can never be zero. */
9847 emit_jump (if_true_label
);
9852 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == COMPONENT_REF
9853 || TREE_CODE (TREE_OPERAND (exp
, 0)) == BIT_FIELD_REF
9854 || TREE_CODE (TREE_OPERAND (exp
, 0)) == ARRAY_REF
)
9857 /* If we are narrowing the operand, we have to do the compare in the
9859 if ((TYPE_PRECISION (TREE_TYPE (exp
))
9860 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
9862 case NON_LVALUE_EXPR
:
9863 case REFERENCE_EXPR
:
9868 /* These cannot change zero->non-zero or vice versa. */
9869 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
9873 /* This is never less insns than evaluating the PLUS_EXPR followed by
9874 a test and can be longer if the test is eliminated. */
9876 /* Reduce to minus. */
9877 exp
= build (MINUS_EXPR
, TREE_TYPE (exp
),
9878 TREE_OPERAND (exp
, 0),
9879 fold (build1 (NEGATE_EXPR
, TREE_TYPE (TREE_OPERAND (exp
, 1)),
9880 TREE_OPERAND (exp
, 1))));
9881 /* Process as MINUS. */
9885 /* Non-zero iff operands of minus differ. */
9886 comparison
= compare (build (NE_EXPR
, TREE_TYPE (exp
),
9887 TREE_OPERAND (exp
, 0),
9888 TREE_OPERAND (exp
, 1)),
9893 /* If we are AND'ing with a small constant, do this comparison in the
9894 smallest type that fits. If the machine doesn't have comparisons
9895 that small, it will be converted back to the wider comparison.
9896 This helps if we are testing the sign bit of a narrower object.
9897 combine can't do this for us because it can't know whether a
9898 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9900 if (! SLOW_BYTE_ACCESS
9901 && TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
9902 && TYPE_PRECISION (TREE_TYPE (exp
)) <= HOST_BITS_PER_WIDE_INT
9903 && (i
= floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)))) >= 0
9904 && (mode
= mode_for_size (i
+ 1, MODE_INT
, 0)) != BLKmode
9905 && (type
= type_for_mode (mode
, 1)) != 0
9906 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (exp
))
9907 && (cmp_optab
->handlers
[(int) TYPE_MODE (type
)].insn_code
9908 != CODE_FOR_nothing
))
9910 do_jump (convert (type
, exp
), if_false_label
, if_true_label
);
9915 case TRUTH_NOT_EXPR
:
9916 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
9919 case TRUTH_ANDIF_EXPR
:
9920 if (if_false_label
== 0)
9921 if_false_label
= drop_through_label
= gen_label_rtx ();
9922 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, NULL_RTX
);
9923 start_cleanup_deferal ();
9924 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
9925 end_cleanup_deferal ();
9928 case TRUTH_ORIF_EXPR
:
9929 if (if_true_label
== 0)
9930 if_true_label
= drop_through_label
= gen_label_rtx ();
9931 do_jump (TREE_OPERAND (exp
, 0), NULL_RTX
, if_true_label
);
9932 start_cleanup_deferal ();
9933 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
9934 end_cleanup_deferal ();
9939 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
9940 preserve_temp_slots (NULL_RTX
);
9944 do_pending_stack_adjust ();
9945 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
9952 int bitsize
, bitpos
, unsignedp
;
9953 enum machine_mode mode
;
9959 /* Get description of this reference. We don't actually care
9960 about the underlying object here. */
9961 get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
9962 &mode
, &unsignedp
, &volatilep
,
9965 type
= type_for_size (bitsize
, unsignedp
);
9966 if (! SLOW_BYTE_ACCESS
9967 && type
!= 0 && bitsize
>= 0
9968 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (exp
))
9969 && (cmp_optab
->handlers
[(int) TYPE_MODE (type
)].insn_code
9970 != CODE_FOR_nothing
))
9972 do_jump (convert (type
, exp
), if_false_label
, if_true_label
);
9979 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9980 if (integer_onep (TREE_OPERAND (exp
, 1))
9981 && integer_zerop (TREE_OPERAND (exp
, 2)))
9982 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
9984 else if (integer_zerop (TREE_OPERAND (exp
, 1))
9985 && integer_onep (TREE_OPERAND (exp
, 2)))
9986 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
9990 register rtx label1
= gen_label_rtx ();
9991 drop_through_label
= gen_label_rtx ();
9993 do_jump (TREE_OPERAND (exp
, 0), label1
, NULL_RTX
);
9995 start_cleanup_deferal ();
9996 /* Now the THEN-expression. */
9997 do_jump (TREE_OPERAND (exp
, 1),
9998 if_false_label
? if_false_label
: drop_through_label
,
9999 if_true_label
? if_true_label
: drop_through_label
);
10000 /* In case the do_jump just above never jumps. */
10001 do_pending_stack_adjust ();
10002 emit_label (label1
);
10004 /* Now the ELSE-expression. */
10005 do_jump (TREE_OPERAND (exp
, 2),
10006 if_false_label
? if_false_label
: drop_through_label
,
10007 if_true_label
? if_true_label
: drop_through_label
);
10008 end_cleanup_deferal ();
10014 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
10016 if (integer_zerop (TREE_OPERAND (exp
, 1)))
10017 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
10018 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_FLOAT
10019 || GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_INT
)
10022 (build (TRUTH_ANDIF_EXPR
, TREE_TYPE (exp
),
10023 fold (build (EQ_EXPR
, TREE_TYPE (exp
),
10024 fold (build1 (REALPART_EXPR
,
10025 TREE_TYPE (inner_type
),
10026 TREE_OPERAND (exp
, 0))),
10027 fold (build1 (REALPART_EXPR
,
10028 TREE_TYPE (inner_type
),
10029 TREE_OPERAND (exp
, 1))))),
10030 fold (build (EQ_EXPR
, TREE_TYPE (exp
),
10031 fold (build1 (IMAGPART_EXPR
,
10032 TREE_TYPE (inner_type
),
10033 TREE_OPERAND (exp
, 0))),
10034 fold (build1 (IMAGPART_EXPR
,
10035 TREE_TYPE (inner_type
),
10036 TREE_OPERAND (exp
, 1))))))),
10037 if_false_label
, if_true_label
);
10038 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_INT
10039 && !can_compare_p (TYPE_MODE (inner_type
)))
10040 do_jump_by_parts_equality (exp
, if_false_label
, if_true_label
);
10042 comparison
= compare (exp
, EQ
, EQ
);
10048 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
10050 if (integer_zerop (TREE_OPERAND (exp
, 1)))
10051 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
10052 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_FLOAT
10053 || GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_INT
)
10056 (build (TRUTH_ORIF_EXPR
, TREE_TYPE (exp
),
10057 fold (build (NE_EXPR
, TREE_TYPE (exp
),
10058 fold (build1 (REALPART_EXPR
,
10059 TREE_TYPE (inner_type
),
10060 TREE_OPERAND (exp
, 0))),
10061 fold (build1 (REALPART_EXPR
,
10062 TREE_TYPE (inner_type
),
10063 TREE_OPERAND (exp
, 1))))),
10064 fold (build (NE_EXPR
, TREE_TYPE (exp
),
10065 fold (build1 (IMAGPART_EXPR
,
10066 TREE_TYPE (inner_type
),
10067 TREE_OPERAND (exp
, 0))),
10068 fold (build1 (IMAGPART_EXPR
,
10069 TREE_TYPE (inner_type
),
10070 TREE_OPERAND (exp
, 1))))))),
10071 if_false_label
, if_true_label
);
10072 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_INT
10073 && !can_compare_p (TYPE_MODE (inner_type
)))
10074 do_jump_by_parts_equality (exp
, if_true_label
, if_false_label
);
10076 comparison
= compare (exp
, NE
, NE
);
10081 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
10083 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
10084 do_jump_by_parts_greater (exp
, 1, if_false_label
, if_true_label
);
10086 comparison
= compare (exp
, LT
, LTU
);
10090 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
10092 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
10093 do_jump_by_parts_greater (exp
, 0, if_true_label
, if_false_label
);
10095 comparison
= compare (exp
, LE
, LEU
);
10099 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
10101 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
10102 do_jump_by_parts_greater (exp
, 0, if_false_label
, if_true_label
);
10104 comparison
= compare (exp
, GT
, GTU
);
10108 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
10110 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
10111 do_jump_by_parts_greater (exp
, 1, if_true_label
, if_false_label
);
10113 comparison
= compare (exp
, GE
, GEU
);
10118 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
10120 /* This is not needed any more and causes poor code since it causes
10121 comparisons and tests from non-SI objects to have different code
10123 /* Copy to register to avoid generating bad insns by cse
10124 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
10125 if (!cse_not_expected
&& GET_CODE (temp
) == MEM
)
10126 temp
= copy_to_reg (temp
);
10128 do_pending_stack_adjust ();
10129 if (GET_CODE (temp
) == CONST_INT
)
10130 comparison
= (temp
== const0_rtx
? const0_rtx
: const_true_rtx
);
10131 else if (GET_CODE (temp
) == LABEL_REF
)
10132 comparison
= const_true_rtx
;
10133 else if (GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
10134 && !can_compare_p (GET_MODE (temp
)))
10135 /* Note swapping the labels gives us not-equal. */
10136 do_jump_by_parts_equality_rtx (temp
, if_true_label
, if_false_label
);
10137 else if (GET_MODE (temp
) != VOIDmode
)
10138 comparison
= compare_from_rtx (temp
, CONST0_RTX (GET_MODE (temp
)),
10139 NE
, TREE_UNSIGNED (TREE_TYPE (exp
)),
10140 GET_MODE (temp
), NULL_RTX
, 0);
10145 /* Do any postincrements in the expression that was tested. */
10148 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
10149 straight into a conditional jump instruction as the jump condition.
10150 Otherwise, all the work has been done already. */
10152 if (comparison
== const_true_rtx
)
10155 emit_jump (if_true_label
);
10157 else if (comparison
== const0_rtx
)
10159 if (if_false_label
)
10160 emit_jump (if_false_label
);
10162 else if (comparison
)
10163 do_jump_for_compare (comparison
, if_false_label
, if_true_label
);
10165 if (drop_through_label
)
10167 /* If do_jump produces code that might be jumped around,
10168 do any stack adjusts from that code, before the place
10169 where control merges in. */
10170 do_pending_stack_adjust ();
10171 emit_label (drop_through_label
);
10175 /* Given a comparison expression EXP for values too wide to be compared
10176 with one insn, test the comparison and jump to the appropriate label.
10177 The code of EXP is ignored; we always test GT if SWAP is 0,
10178 and LT if SWAP is 1. */
10181 do_jump_by_parts_greater (exp
, swap
, if_false_label
, if_true_label
)
10184 rtx if_false_label
, if_true_label
;
10186 rtx op0
= expand_expr (TREE_OPERAND (exp
, swap
), NULL_RTX
, VOIDmode
, 0);
10187 rtx op1
= expand_expr (TREE_OPERAND (exp
, !swap
), NULL_RTX
, VOIDmode
, 0);
10188 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
10189 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
10190 rtx drop_through_label
= 0;
10191 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0)));
10194 if (! if_true_label
|| ! if_false_label
)
10195 drop_through_label
= gen_label_rtx ();
10196 if (! if_true_label
)
10197 if_true_label
= drop_through_label
;
10198 if (! if_false_label
)
10199 if_false_label
= drop_through_label
;
10201 /* Compare a word at a time, high order first. */
10202 for (i
= 0; i
< nwords
; i
++)
10205 rtx op0_word
, op1_word
;
10207 if (WORDS_BIG_ENDIAN
)
10209 op0_word
= operand_subword_force (op0
, i
, mode
);
10210 op1_word
= operand_subword_force (op1
, i
, mode
);
10214 op0_word
= operand_subword_force (op0
, nwords
- 1 - i
, mode
);
10215 op1_word
= operand_subword_force (op1
, nwords
- 1 - i
, mode
);
10218 /* All but high-order word must be compared as unsigned. */
10219 comp
= compare_from_rtx (op0_word
, op1_word
,
10220 (unsignedp
|| i
> 0) ? GTU
: GT
,
10221 unsignedp
, word_mode
, NULL_RTX
, 0);
10222 if (comp
== const_true_rtx
)
10223 emit_jump (if_true_label
);
10224 else if (comp
!= const0_rtx
)
10225 do_jump_for_compare (comp
, NULL_RTX
, if_true_label
);
10227 /* Consider lower words only if these are equal. */
10228 comp
= compare_from_rtx (op0_word
, op1_word
, NE
, unsignedp
, word_mode
,
10230 if (comp
== const_true_rtx
)
10231 emit_jump (if_false_label
);
10232 else if (comp
!= const0_rtx
)
10233 do_jump_for_compare (comp
, NULL_RTX
, if_false_label
);
10236 if (if_false_label
)
10237 emit_jump (if_false_label
);
10238 if (drop_through_label
)
10239 emit_label (drop_through_label
);
10242 /* Compare OP0 with OP1, word at a time, in mode MODE.
10243 UNSIGNEDP says to do unsigned comparison.
10244 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
10247 do_jump_by_parts_greater_rtx (mode
, unsignedp
, op0
, op1
, if_false_label
, if_true_label
)
10248 enum machine_mode mode
;
10251 rtx if_false_label
, if_true_label
;
10253 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
10254 rtx drop_through_label
= 0;
10257 if (! if_true_label
|| ! if_false_label
)
10258 drop_through_label
= gen_label_rtx ();
10259 if (! if_true_label
)
10260 if_true_label
= drop_through_label
;
10261 if (! if_false_label
)
10262 if_false_label
= drop_through_label
;
10264 /* Compare a word at a time, high order first. */
10265 for (i
= 0; i
< nwords
; i
++)
10268 rtx op0_word
, op1_word
;
10270 if (WORDS_BIG_ENDIAN
)
10272 op0_word
= operand_subword_force (op0
, i
, mode
);
10273 op1_word
= operand_subword_force (op1
, i
, mode
);
10277 op0_word
= operand_subword_force (op0
, nwords
- 1 - i
, mode
);
10278 op1_word
= operand_subword_force (op1
, nwords
- 1 - i
, mode
);
10281 /* All but high-order word must be compared as unsigned. */
10282 comp
= compare_from_rtx (op0_word
, op1_word
,
10283 (unsignedp
|| i
> 0) ? GTU
: GT
,
10284 unsignedp
, word_mode
, NULL_RTX
, 0);
10285 if (comp
== const_true_rtx
)
10286 emit_jump (if_true_label
);
10287 else if (comp
!= const0_rtx
)
10288 do_jump_for_compare (comp
, NULL_RTX
, if_true_label
);
10290 /* Consider lower words only if these are equal. */
10291 comp
= compare_from_rtx (op0_word
, op1_word
, NE
, unsignedp
, word_mode
,
10293 if (comp
== const_true_rtx
)
10294 emit_jump (if_false_label
);
10295 else if (comp
!= const0_rtx
)
10296 do_jump_for_compare (comp
, NULL_RTX
, if_false_label
);
10299 if (if_false_label
)
10300 emit_jump (if_false_label
);
10301 if (drop_through_label
)
10302 emit_label (drop_through_label
);
10305 /* Given an EQ_EXPR expression EXP for values too wide to be compared
10306 with one insn, test the comparison and jump to the appropriate label. */
10309 do_jump_by_parts_equality (exp
, if_false_label
, if_true_label
)
10311 rtx if_false_label
, if_true_label
;
10313 rtx op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
10314 rtx op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
10315 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
10316 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
10318 rtx drop_through_label
= 0;
10320 if (! if_false_label
)
10321 drop_through_label
= if_false_label
= gen_label_rtx ();
10323 for (i
= 0; i
< nwords
; i
++)
10325 rtx comp
= compare_from_rtx (operand_subword_force (op0
, i
, mode
),
10326 operand_subword_force (op1
, i
, mode
),
10327 EQ
, TREE_UNSIGNED (TREE_TYPE (exp
)),
10328 word_mode
, NULL_RTX
, 0);
10329 if (comp
== const_true_rtx
)
10330 emit_jump (if_false_label
);
10331 else if (comp
!= const0_rtx
)
10332 do_jump_for_compare (comp
, if_false_label
, NULL_RTX
);
10336 emit_jump (if_true_label
);
10337 if (drop_through_label
)
10338 emit_label (drop_through_label
);
10341 /* Jump according to whether OP0 is 0.
10342 We assume that OP0 has an integer mode that is too wide
10343 for the available compare insns. */
10346 do_jump_by_parts_equality_rtx (op0
, if_false_label
, if_true_label
)
10348 rtx if_false_label
, if_true_label
;
10350 int nwords
= GET_MODE_SIZE (GET_MODE (op0
)) / UNITS_PER_WORD
;
10352 rtx drop_through_label
= 0;
10354 if (! if_false_label
)
10355 drop_through_label
= if_false_label
= gen_label_rtx ();
10357 for (i
= 0; i
< nwords
; i
++)
10359 rtx comp
= compare_from_rtx (operand_subword_force (op0
, i
,
10361 const0_rtx
, EQ
, 1, word_mode
, NULL_RTX
, 0);
10362 if (comp
== const_true_rtx
)
10363 emit_jump (if_false_label
);
10364 else if (comp
!= const0_rtx
)
10365 do_jump_for_compare (comp
, if_false_label
, NULL_RTX
);
10369 emit_jump (if_true_label
);
10370 if (drop_through_label
)
10371 emit_label (drop_through_label
);
10374 /* Given a comparison expression in rtl form, output conditional branches to
10375 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
10378 do_jump_for_compare (comparison
, if_false_label
, if_true_label
)
10379 rtx comparison
, if_false_label
, if_true_label
;
10383 if (bcc_gen_fctn
[(int) GET_CODE (comparison
)] != 0)
10384 emit_jump_insn ((*bcc_gen_fctn
[(int) GET_CODE (comparison
)]) (if_true_label
));
10388 if (if_false_label
)
10389 emit_jump (if_false_label
);
10391 else if (if_false_label
)
10394 rtx prev
= get_last_insn ();
10397 /* Output the branch with the opposite condition. Then try to invert
10398 what is generated. If more than one insn is a branch, or if the
10399 branch is not the last insn written, abort. If we can't invert
10400 the branch, emit make a true label, redirect this jump to that,
10401 emit a jump to the false label and define the true label. */
10403 if (bcc_gen_fctn
[(int) GET_CODE (comparison
)] != 0)
10404 emit_jump_insn ((*bcc_gen_fctn
[(int) GET_CODE (comparison
)])(if_false_label
));
10408 /* Here we get the first insn that was just emitted. It used to be the
10409 case that, on some machines, emitting the branch would discard
10410 the previous compare insn and emit a replacement. This isn't
10411 done anymore, but abort if we see that PREV is deleted. */
10414 insn
= get_insns ();
10415 else if (INSN_DELETED_P (prev
))
10418 insn
= NEXT_INSN (prev
);
10420 for (; insn
; insn
= NEXT_INSN (insn
))
10421 if (GET_CODE (insn
) == JUMP_INSN
)
10428 if (branch
!= get_last_insn ())
10431 JUMP_LABEL (branch
) = if_false_label
;
10432 if (! invert_jump (branch
, if_false_label
))
10434 if_true_label
= gen_label_rtx ();
10435 redirect_jump (branch
, if_true_label
);
10436 emit_jump (if_false_label
);
10437 emit_label (if_true_label
);
10442 /* Generate code for a comparison expression EXP
10443 (including code to compute the values to be compared)
10444 and set (CC0) according to the result.
10445 SIGNED_CODE should be the rtx operation for this comparison for
10446 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10448 We force a stack adjustment unless there are currently
10449 things pushed on the stack that aren't yet used. */
10452 compare (exp
, signed_code
, unsigned_code
)
10454 enum rtx_code signed_code
, unsigned_code
;
10457 = expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
10459 = expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
10460 register tree type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
10461 register enum machine_mode mode
= TYPE_MODE (type
);
10462 int unsignedp
= TREE_UNSIGNED (type
);
10463 enum rtx_code code
= unsignedp
? unsigned_code
: signed_code
;
10465 #ifdef HAVE_canonicalize_funcptr_for_compare
10466 /* If function pointers need to be "canonicalized" before they can
10467 be reliably compared, then canonicalize them. */
10468 if (HAVE_canonicalize_funcptr_for_compare
10469 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == POINTER_TYPE
10470 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
10473 rtx new_op0
= gen_reg_rtx (mode
);
10475 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0
, op0
));
10479 if (HAVE_canonicalize_funcptr_for_compare
10480 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 1))) == POINTER_TYPE
10481 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
10484 rtx new_op1
= gen_reg_rtx (mode
);
10486 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1
, op1
));
10491 return compare_from_rtx (op0
, op1
, code
, unsignedp
, mode
,
10493 ? expr_size (TREE_OPERAND (exp
, 0)) : NULL_RTX
),
10494 TYPE_ALIGN (TREE_TYPE (exp
)) / BITS_PER_UNIT
);
10497 /* Like compare but expects the values to compare as two rtx's.
10498 The decision as to signed or unsigned comparison must be made by the caller.
10500 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10503 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10504 size of MODE should be used. */
10507 compare_from_rtx (op0
, op1
, code
, unsignedp
, mode
, size
, align
)
10508 register rtx op0
, op1
;
10509 enum rtx_code code
;
10511 enum machine_mode mode
;
10517 /* If one operand is constant, make it the second one. Only do this
10518 if the other operand is not constant as well. */
10520 if ((CONSTANT_P (op0
) && ! CONSTANT_P (op1
))
10521 || (GET_CODE (op0
) == CONST_INT
&& GET_CODE (op1
) != CONST_INT
))
10526 code
= swap_condition (code
);
10529 if (flag_force_mem
)
10531 op0
= force_not_mem (op0
);
10532 op1
= force_not_mem (op1
);
10535 do_pending_stack_adjust ();
10537 if (GET_CODE (op0
) == CONST_INT
&& GET_CODE (op1
) == CONST_INT
10538 && (tem
= simplify_relational_operation (code
, mode
, op0
, op1
)) != 0)
10542 /* There's no need to do this now that combine.c can eliminate lots of
10543 sign extensions. This can be less efficient in certain cases on other
10546 /* If this is a signed equality comparison, we can do it as an
10547 unsigned comparison since zero-extension is cheaper than sign
10548 extension and comparisons with zero are done as unsigned. This is
10549 the case even on machines that can do fast sign extension, since
10550 zero-extension is easier to combine with other operations than
10551 sign-extension is. If we are comparing against a constant, we must
10552 convert it to what it would look like unsigned. */
10553 if ((code
== EQ
|| code
== NE
) && ! unsignedp
10554 && GET_MODE_BITSIZE (GET_MODE (op0
)) <= HOST_BITS_PER_WIDE_INT
)
10556 if (GET_CODE (op1
) == CONST_INT
10557 && (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
))) != INTVAL (op1
))
10558 op1
= GEN_INT (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
)));
10563 emit_cmp_insn (op0
, op1
, code
, size
, mode
, unsignedp
, align
);
10565 return gen_rtx (code
, VOIDmode
, cc0_rtx
, const0_rtx
);
10568 /* Generate code to calculate EXP using a store-flag instruction
10569 and return an rtx for the result. EXP is either a comparison
10570 or a TRUTH_NOT_EXPR whose operand is a comparison.
10572 If TARGET is nonzero, store the result there if convenient.
10574 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10577 Return zero if there is no suitable set-flag instruction
10578 available on this machine.
10580 Once expand_expr has been called on the arguments of the comparison,
10581 we are committed to doing the store flag, since it is not safe to
10582 re-evaluate the expression. We emit the store-flag insn by calling
10583 emit_store_flag, but only expand the arguments if we have a reason
10584 to believe that emit_store_flag will be successful. If we think that
10585 it will, but it isn't, we have to simulate the store-flag with a
10586 set/jump/set sequence. */
10589 do_store_flag (exp
, target
, mode
, only_cheap
)
10592 enum machine_mode mode
;
10595 enum rtx_code code
;
10596 tree arg0
, arg1
, type
;
10598 enum machine_mode operand_mode
;
10602 enum insn_code icode
;
10603 rtx subtarget
= target
;
10604 rtx result
, label
, pattern
, jump_pat
;
10606 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10607 result at the end. We can't simply invert the test since it would
10608 have already been inverted if it were valid. This case occurs for
10609 some floating-point comparisons. */
10611 if (TREE_CODE (exp
) == TRUTH_NOT_EXPR
)
10612 invert
= 1, exp
= TREE_OPERAND (exp
, 0);
10614 arg0
= TREE_OPERAND (exp
, 0);
10615 arg1
= TREE_OPERAND (exp
, 1);
10616 type
= TREE_TYPE (arg0
);
10617 operand_mode
= TYPE_MODE (type
);
10618 unsignedp
= TREE_UNSIGNED (type
);
10620 /* We won't bother with BLKmode store-flag operations because it would mean
10621 passing a lot of information to emit_store_flag. */
10622 if (operand_mode
== BLKmode
)
10625 /* We won't bother with store-flag operations involving function pointers
10626 when function pointers must be canonicalized before comparisons. */
10627 #ifdef HAVE_canonicalize_funcptr_for_compare
10628 if (HAVE_canonicalize_funcptr_for_compare
10629 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == POINTER_TYPE
10630 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
10632 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 1))) == POINTER_TYPE
10633 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
10634 == FUNCTION_TYPE
))))
10641 /* Get the rtx comparison code to use. We know that EXP is a comparison
10642 operation of some type. Some comparisons against 1 and -1 can be
10643 converted to comparisons with zero. Do so here so that the tests
10644 below will be aware that we have a comparison with zero. These
10645 tests will not catch constants in the first operand, but constants
10646 are rarely passed as the first operand. */
10648 switch (TREE_CODE (exp
))
10657 if (integer_onep (arg1
))
10658 arg1
= integer_zero_node
, code
= unsignedp
? LEU
: LE
;
10660 code
= unsignedp
? LTU
: LT
;
10663 if (! unsignedp
&& integer_all_onesp (arg1
))
10664 arg1
= integer_zero_node
, code
= LT
;
10666 code
= unsignedp
? LEU
: LE
;
10669 if (! unsignedp
&& integer_all_onesp (arg1
))
10670 arg1
= integer_zero_node
, code
= GE
;
10672 code
= unsignedp
? GTU
: GT
;
10675 if (integer_onep (arg1
))
10676 arg1
= integer_zero_node
, code
= unsignedp
? GTU
: GT
;
10678 code
= unsignedp
? GEU
: GE
;
10684 /* Put a constant second. */
10685 if (TREE_CODE (arg0
) == REAL_CST
|| TREE_CODE (arg0
) == INTEGER_CST
)
10687 tem
= arg0
; arg0
= arg1
; arg1
= tem
;
10688 code
= swap_condition (code
);
10691 /* If this is an equality or inequality test of a single bit, we can
10692 do this by shifting the bit being tested to the low-order bit and
10693 masking the result with the constant 1. If the condition was EQ,
10694 we xor it with 1. This does not require an scc insn and is faster
10695 than an scc insn even if we have it. */
10697 if ((code
== NE
|| code
== EQ
)
10698 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
10699 && integer_pow2p (TREE_OPERAND (arg0
, 1))
10700 && TYPE_PRECISION (type
) <= HOST_BITS_PER_WIDE_INT
)
10702 tree inner
= TREE_OPERAND (arg0
, 0);
10707 tem
= INTVAL (expand_expr (TREE_OPERAND (arg0
, 1),
10708 NULL_RTX
, VOIDmode
, 0));
10709 /* In this case, immed_double_const will sign extend the value to make
10710 it look the same on the host and target. We must remove the
10711 sign-extension before calling exact_log2, since exact_log2 will
10712 fail for negative values. */
10713 if (BITS_PER_WORD
< HOST_BITS_PER_WIDE_INT
10714 && BITS_PER_WORD
== GET_MODE_BITSIZE (TYPE_MODE (type
)))
10715 /* We don't use the obvious constant shift to generate the mask,
10716 because that generates compiler warnings when BITS_PER_WORD is
10717 greater than or equal to HOST_BITS_PER_WIDE_INT, even though this
10718 code is unreachable in that case. */
10719 tem
= tem
& GET_MODE_MASK (word_mode
);
10720 bitnum
= exact_log2 (tem
);
10722 /* If INNER is a right shift of a constant and it plus BITNUM does
10723 not overflow, adjust BITNUM and INNER. */
10725 if (TREE_CODE (inner
) == RSHIFT_EXPR
10726 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
10727 && TREE_INT_CST_HIGH (TREE_OPERAND (inner
, 1)) == 0
10728 && (bitnum
+ TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1))
10729 < TYPE_PRECISION (type
)))
10731 bitnum
+=TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1));
10732 inner
= TREE_OPERAND (inner
, 0);
10735 /* If we are going to be able to omit the AND below, we must do our
10736 operations as unsigned. If we must use the AND, we have a choice.
10737 Normally unsigned is faster, but for some machines signed is. */
10738 ops_unsignedp
= (bitnum
== TYPE_PRECISION (type
) - 1 ? 1
10739 #ifdef LOAD_EXTEND_OP
10740 : (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
? 0 : 1)
10746 if (subtarget
== 0 || GET_CODE (subtarget
) != REG
10747 || GET_MODE (subtarget
) != operand_mode
10748 || ! safe_from_p (subtarget
, inner
))
10751 op0
= expand_expr (inner
, subtarget
, VOIDmode
, 0);
10754 op0
= expand_shift (RSHIFT_EXPR
, GET_MODE (op0
), op0
,
10755 size_int (bitnum
), subtarget
, ops_unsignedp
);
10757 if (GET_MODE (op0
) != mode
)
10758 op0
= convert_to_mode (mode
, op0
, ops_unsignedp
);
10760 if ((code
== EQ
&& ! invert
) || (code
== NE
&& invert
))
10761 op0
= expand_binop (mode
, xor_optab
, op0
, const1_rtx
, subtarget
,
10762 ops_unsignedp
, OPTAB_LIB_WIDEN
);
10764 /* Put the AND last so it can combine with more things. */
10765 if (bitnum
!= TYPE_PRECISION (type
) - 1)
10766 op0
= expand_and (op0
, const1_rtx
, subtarget
);
10771 /* Now see if we are likely to be able to do this. Return if not. */
10772 if (! can_compare_p (operand_mode
))
10774 icode
= setcc_gen_code
[(int) code
];
10775 if (icode
== CODE_FOR_nothing
10776 || (only_cheap
&& insn_operand_mode
[(int) icode
][0] != mode
))
10778 /* We can only do this if it is one of the special cases that
10779 can be handled without an scc insn. */
10780 if ((code
== LT
&& integer_zerop (arg1
))
10781 || (! only_cheap
&& code
== GE
&& integer_zerop (arg1
)))
10783 else if (BRANCH_COST
>= 0
10784 && ! only_cheap
&& (code
== NE
|| code
== EQ
)
10785 && TREE_CODE (type
) != REAL_TYPE
10786 && ((abs_optab
->handlers
[(int) operand_mode
].insn_code
10787 != CODE_FOR_nothing
)
10788 || (ffs_optab
->handlers
[(int) operand_mode
].insn_code
10789 != CODE_FOR_nothing
)))
10795 preexpand_calls (exp
);
10796 if (subtarget
== 0 || GET_CODE (subtarget
) != REG
10797 || GET_MODE (subtarget
) != operand_mode
10798 || ! safe_from_p (subtarget
, arg1
))
10801 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, 0);
10802 op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
10805 target
= gen_reg_rtx (mode
);
10807 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10808 because, if the emit_store_flag does anything it will succeed and
10809 OP0 and OP1 will not be used subsequently. */
10811 result
= emit_store_flag (target
, code
,
10812 queued_subexp_p (op0
) ? copy_rtx (op0
) : op0
,
10813 queued_subexp_p (op1
) ? copy_rtx (op1
) : op1
,
10814 operand_mode
, unsignedp
, 1);
10819 result
= expand_binop (mode
, xor_optab
, result
, const1_rtx
,
10820 result
, 0, OPTAB_LIB_WIDEN
);
10824 /* If this failed, we have to do this with set/compare/jump/set code. */
10825 if (GET_CODE (target
) != REG
10826 || reg_mentioned_p (target
, op0
) || reg_mentioned_p (target
, op1
))
10827 target
= gen_reg_rtx (GET_MODE (target
));
10829 emit_move_insn (target
, invert
? const0_rtx
: const1_rtx
);
10830 result
= compare_from_rtx (op0
, op1
, code
, unsignedp
,
10831 operand_mode
, NULL_RTX
, 0);
10832 if (GET_CODE (result
) == CONST_INT
)
10833 return (((result
== const0_rtx
&& ! invert
)
10834 || (result
!= const0_rtx
&& invert
))
10835 ? const0_rtx
: const1_rtx
);
10837 label
= gen_label_rtx ();
10838 if (bcc_gen_fctn
[(int) code
] == 0)
10841 emit_jump_insn ((*bcc_gen_fctn
[(int) code
]) (label
));
10842 emit_move_insn (target
, invert
? const1_rtx
: const0_rtx
);
10843 emit_label (label
);
10848 /* Generate a tablejump instruction (used for switch statements). */
10850 #ifdef HAVE_tablejump
10852 /* INDEX is the value being switched on, with the lowest value
10853 in the table already subtracted.
10854 MODE is its expected mode (needed if INDEX is constant).
10855 RANGE is the length of the jump table.
10856 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10858 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10859 index value is out of range. */
10862 do_tablejump (index
, mode
, range
, table_label
, default_label
)
10863 rtx index
, range
, table_label
, default_label
;
10864 enum machine_mode mode
;
10866 register rtx temp
, vector
;
10868 /* Do an unsigned comparison (in the proper mode) between the index
10869 expression and the value which represents the length of the range.
10870 Since we just finished subtracting the lower bound of the range
10871 from the index expression, this comparison allows us to simultaneously
10872 check that the original index expression value is both greater than
10873 or equal to the minimum value of the range and less than or equal to
10874 the maximum value of the range. */
10876 emit_cmp_insn (index
, range
, GTU
, NULL_RTX
, mode
, 1, 0);
10877 emit_jump_insn (gen_bgtu (default_label
));
10879 /* If index is in range, it must fit in Pmode.
10880 Convert to Pmode so we can index with it. */
10882 index
= convert_to_mode (Pmode
, index
, 1);
10884 /* Don't let a MEM slip thru, because then INDEX that comes
10885 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10886 and break_out_memory_refs will go to work on it and mess it up. */
10887 #ifdef PIC_CASE_VECTOR_ADDRESS
10888 if (flag_pic
&& GET_CODE (index
) != REG
)
10889 index
= copy_to_mode_reg (Pmode
, index
);
10892 /* If flag_force_addr were to affect this address
10893 it could interfere with the tricky assumptions made
10894 about addresses that contain label-refs,
10895 which may be valid only very near the tablejump itself. */
10896 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10897 GET_MODE_SIZE, because this indicates how large insns are. The other
10898 uses should all be Pmode, because they are addresses. This code
10899 could fail if addresses and insns are not the same size. */
10900 index
= gen_rtx (PLUS
, Pmode
,
10901 gen_rtx (MULT
, Pmode
, index
,
10902 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE
))),
10903 gen_rtx (LABEL_REF
, Pmode
, table_label
));
10904 #ifdef PIC_CASE_VECTOR_ADDRESS
10906 index
= PIC_CASE_VECTOR_ADDRESS (index
);
10909 index
= memory_address_noforce (CASE_VECTOR_MODE
, index
);
10910 temp
= gen_reg_rtx (CASE_VECTOR_MODE
);
10911 vector
= gen_rtx (MEM
, CASE_VECTOR_MODE
, index
);
10912 RTX_UNCHANGING_P (vector
) = 1;
10913 convert_move (temp
, vector
, 0);
10915 emit_jump_insn (gen_tablejump (temp
, table_label
));
10917 #ifndef CASE_VECTOR_PC_RELATIVE
10918 /* If we are generating PIC code or if the table is PC-relative, the
10919 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10925 #endif /* HAVE_tablejump */
10928 /* Emit a suitable bytecode to load a value from memory, assuming a pointer
10929 to that value is on the top of the stack. The resulting type is TYPE, and
10930 the source declaration is DECL. */
10933 bc_load_memory (type
, decl
)
10936 enum bytecode_opcode opcode
;
10939 /* Bit fields are special. We only know about signed and
10940 unsigned ints, and enums. The latter are treated as
10941 signed integers. */
10943 if (DECL_BIT_FIELD (decl
))
10944 if (TREE_CODE (type
) == ENUMERAL_TYPE
10945 || TREE_CODE (type
) == INTEGER_TYPE
)
10946 opcode
= TREE_UNSIGNED (type
) ? zxloadBI
: sxloadBI
;
10950 /* See corresponding comment in bc_store_memory. */
10951 if (TYPE_MODE (type
) == BLKmode
10952 || TYPE_MODE (type
) == VOIDmode
)
10955 opcode
= mode_to_load_map
[(int) TYPE_MODE (type
)];
10957 if (opcode
== neverneverland
)
10960 bc_emit_bytecode (opcode
);
10962 #ifdef DEBUG_PRINT_CODE
10963 fputc ('\n', stderr
);
10968 /* Store the contents of the second stack slot to the address in the
10969 top stack slot. DECL is the declaration of the destination and is used
10970 to determine whether we're dealing with a bitfield. */
10973 bc_store_memory (type
, decl
)
10976 enum bytecode_opcode opcode
;
10979 if (DECL_BIT_FIELD (decl
))
10981 if (TREE_CODE (type
) == ENUMERAL_TYPE
10982 || TREE_CODE (type
) == INTEGER_TYPE
)
10988 if (TYPE_MODE (type
) == BLKmode
)
10990 /* Copy structure. This expands to a block copy instruction, storeBLK.
10991 In addition to the arguments expected by the other store instructions,
10992 it also expects a type size (SImode) on top of the stack, which is the
10993 structure size in size units (usually bytes). The two first arguments
10994 are already on the stack; so we just put the size on level 1. For some
10995 other languages, the size may be variable, this is why we don't encode
10996 it as a storeBLK literal, but rather treat it as a full-fledged expression. */
10998 bc_expand_expr (TYPE_SIZE (type
));
11002 opcode
= mode_to_store_map
[(int) TYPE_MODE (type
)];
11004 if (opcode
== neverneverland
)
11007 bc_emit_bytecode (opcode
);
11009 #ifdef DEBUG_PRINT_CODE
11010 fputc ('\n', stderr
);
11015 /* Allocate local stack space sufficient to hold a value of the given
11016 SIZE at alignment boundary ALIGNMENT bits. ALIGNMENT must be an
11017 integral power of 2. A special case is locals of type VOID, which
11018 have size 0 and alignment 1 - any "voidish" SIZE or ALIGNMENT is
11019 remapped into the corresponding attribute of SI. */
11022 bc_allocate_local (size
, alignment
)
11023 int size
, alignment
;
11026 int byte_alignment
;
11031 /* Normalize size and alignment */
11033 size
= UNITS_PER_WORD
;
11035 if (alignment
< BITS_PER_UNIT
)
11036 byte_alignment
= 1 << (INT_ALIGN
- 1);
11039 byte_alignment
= alignment
/ BITS_PER_UNIT
;
11041 if (local_vars_size
& (byte_alignment
- 1))
11042 local_vars_size
+= byte_alignment
- (local_vars_size
& (byte_alignment
- 1));
11044 retval
= bc_gen_rtx ((char *) 0, local_vars_size
, (struct bc_label
*) 0);
11045 local_vars_size
+= size
;
11051 /* Allocate variable-sized local array. Variable-sized arrays are
11052 actually pointers to the address in memory where they are stored. */
11055 bc_allocate_variable_array (size
)
11059 const int ptralign
= (1 << (PTR_ALIGN
- 1));
11061 /* Align pointer */
11062 if (local_vars_size
& ptralign
)
11063 local_vars_size
+= ptralign
- (local_vars_size
& ptralign
);
11065 /* Note down local space needed: pointer to block; also return
11068 retval
= bc_gen_rtx ((char *) 0, local_vars_size
, (struct bc_label
*) 0);
11069 local_vars_size
+= POINTER_SIZE
/ BITS_PER_UNIT
;
11074 /* Push the machine address for the given external variable offset. */
11077 bc_load_externaddr (externaddr
)
11080 bc_emit_bytecode (constP
);
11081 bc_emit_code_labelref (BYTECODE_LABEL (externaddr
),
11082 BYTECODE_BC_LABEL (externaddr
)->offset
);
11084 #ifdef DEBUG_PRINT_CODE
11085 fputc ('\n', stderr
);
11090 /* Like above, but expects an IDENTIFIER. */
11093 bc_load_externaddr_id (id
, offset
)
11097 if (!IDENTIFIER_POINTER (id
))
11100 bc_emit_bytecode (constP
);
11101 bc_emit_code_labelref (xstrdup (IDENTIFIER_POINTER (id
)), offset
);
11103 #ifdef DEBUG_PRINT_CODE
11104 fputc ('\n', stderr
);
11109 /* Push the machine address for the given local variable offset. */
11112 bc_load_localaddr (localaddr
)
11115 bc_emit_instruction (localP
, (HOST_WIDE_INT
) BYTECODE_BC_LABEL (localaddr
)->offset
);
11119 /* Push the machine address for the given parameter offset.
11120 NOTE: offset is in bits. */
11123 bc_load_parmaddr (parmaddr
)
11126 bc_emit_instruction (argP
, ((HOST_WIDE_INT
) BYTECODE_BC_LABEL (parmaddr
)->offset
11131 /* Convert a[i] into *(a + i). */
11134 bc_canonicalize_array_ref (exp
)
11137 tree type
= TREE_TYPE (exp
);
11138 tree array_adr
= build1 (ADDR_EXPR
, TYPE_POINTER_TO (type
),
11139 TREE_OPERAND (exp
, 0));
11140 tree index
= TREE_OPERAND (exp
, 1);
11143 /* Convert the integer argument to a type the same size as a pointer
11144 so the multiply won't overflow spuriously. */
11146 if (TYPE_PRECISION (TREE_TYPE (index
)) != POINTER_SIZE
)
11147 index
= convert (type_for_size (POINTER_SIZE
, 0), index
);
11149 /* The array address isn't volatile even if the array is.
11150 (Of course this isn't terribly relevant since the bytecode
11151 translator treats nearly everything as volatile anyway.) */
11152 TREE_THIS_VOLATILE (array_adr
) = 0;
11154 return build1 (INDIRECT_REF
, type
,
11155 fold (build (PLUS_EXPR
,
11156 TYPE_POINTER_TO (type
),
11158 fold (build (MULT_EXPR
,
11159 TYPE_POINTER_TO (type
),
11161 size_in_bytes (type
))))));
11165 /* Load the address of the component referenced by the given
11166 COMPONENT_REF expression.
11168 Returns innermost lvalue. */
11171 bc_expand_component_address (exp
)
11175 enum machine_mode mode
;
11177 HOST_WIDE_INT SIval
;
11180 tem
= TREE_OPERAND (exp
, 1);
11181 mode
= DECL_MODE (tem
);
11184 /* Compute cumulative bit offset for nested component refs
11185 and array refs, and find the ultimate containing object. */
11187 for (tem
= exp
;; tem
= TREE_OPERAND (tem
, 0))
11189 if (TREE_CODE (tem
) == COMPONENT_REF
)
11190 bitpos
+= TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (tem
, 1)));
11192 if (TREE_CODE (tem
) == ARRAY_REF
11193 && TREE_CODE (TREE_OPERAND (tem
, 1)) == INTEGER_CST
11194 && TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
))) == INTEGER_CST
)
11196 bitpos
+= (TREE_INT_CST_LOW (TREE_OPERAND (tem
, 1))
11197 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem
)))
11198 /* * TYPE_SIZE_UNIT (TREE_TYPE (tem)) */);
11203 bc_expand_expr (tem
);
11206 /* For bitfields also push their offset and size */
11207 if (DECL_BIT_FIELD (TREE_OPERAND (exp
, 1)))
11208 bc_push_offset_and_size (bitpos
, /* DECL_SIZE_UNIT */ (TREE_OPERAND (exp
, 1)));
11210 if (SIval
= bitpos
/ BITS_PER_UNIT
)
11211 bc_emit_instruction (addconstPSI
, SIval
);
11213 return (TREE_OPERAND (exp
, 1));
11217 /* Emit code to push two SI constants */
11220 bc_push_offset_and_size (offset
, size
)
11221 HOST_WIDE_INT offset
, size
;
11223 bc_emit_instruction (constSI
, offset
);
11224 bc_emit_instruction (constSI
, size
);
11228 /* Emit byte code to push the address of the given lvalue expression to
11229 the stack. If it's a bit field, we also push offset and size info.
11231 Returns innermost component, which allows us to determine not only
11232 its type, but also whether it's a bitfield. */
11235 bc_expand_address (exp
)
11239 if (!exp
|| TREE_CODE (exp
) == ERROR_MARK
)
11243 switch (TREE_CODE (exp
))
11247 return (bc_expand_address (bc_canonicalize_array_ref (exp
)));
11249 case COMPONENT_REF
:
11251 return (bc_expand_component_address (exp
));
11255 bc_expand_expr (TREE_OPERAND (exp
, 0));
11257 /* For variable-sized types: retrieve pointer. Sometimes the
11258 TYPE_SIZE tree is NULL. Is this a bug or a feature? Let's
11259 also make sure we have an operand, just in case... */
11261 if (TREE_OPERAND (exp
, 0)
11262 && TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp
, 0)))
11263 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp
, 0)))) != INTEGER_CST
)
11264 bc_emit_instruction (loadP
);
11266 /* If packed, also return offset and size */
11267 if (DECL_BIT_FIELD (TREE_OPERAND (exp
, 0)))
11269 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (exp
, 0))),
11270 TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (exp
, 0))));
11272 return (TREE_OPERAND (exp
, 0));
11274 case FUNCTION_DECL
:
11276 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp
),
11277 BYTECODE_BC_LABEL (DECL_RTL (exp
))->offset
);
11282 bc_load_parmaddr (DECL_RTL (exp
));
11284 /* For variable-sized types: retrieve pointer */
11285 if (TYPE_SIZE (TREE_TYPE (exp
))
11286 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
)
11287 bc_emit_instruction (loadP
);
11289 /* If packed, also return offset and size */
11290 if (DECL_BIT_FIELD (exp
))
11291 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp
)),
11292 TREE_INT_CST_LOW (DECL_SIZE (exp
)));
11298 bc_emit_instruction (returnP
);
11304 if (BYTECODE_LABEL (DECL_RTL (exp
)))
11305 bc_load_externaddr (DECL_RTL (exp
));
11308 if (DECL_EXTERNAL (exp
))
11309 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp
),
11310 (BYTECODE_BC_LABEL (DECL_RTL (exp
)))->offset
);
11312 bc_load_localaddr (DECL_RTL (exp
));
11314 /* For variable-sized types: retrieve pointer */
11315 if (TYPE_SIZE (TREE_TYPE (exp
))
11316 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
)
11317 bc_emit_instruction (loadP
);
11319 /* If packed, also return offset and size */
11320 if (DECL_BIT_FIELD (exp
))
11321 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp
)),
11322 TREE_INT_CST_LOW (DECL_SIZE (exp
)));
11330 bc_emit_bytecode (constP
);
11331 r
= output_constant_def (exp
);
11332 bc_emit_code_labelref (BYTECODE_LABEL (r
), BYTECODE_BC_LABEL (r
)->offset
);
11334 #ifdef DEBUG_PRINT_CODE
11335 fputc ('\n', stderr
);
11346 /* Most lvalues don't have components. */
11351 /* Emit a type code to be used by the runtime support in handling
11352 parameter passing. The type code consists of the machine mode
11353 plus the minimal alignment shifted left 8 bits. */
11356 bc_runtime_type_code (type
)
11361 switch (TREE_CODE (type
))
11367 case ENUMERAL_TYPE
:
11371 val
= (int) TYPE_MODE (type
) | TYPE_ALIGN (type
) << 8;
11383 return build_int_2 (val
, 0);
11387 /* Generate constructor label */
11390 bc_gen_constr_label ()
11392 static int label_counter
;
11393 static char label
[20];
11395 sprintf (label
, "*LR%d", label_counter
++);
11397 return (obstack_copy0 (&permanent_obstack
, label
, strlen (label
)));
11401 /* Evaluate constructor CONSTR and return pointer to it on level one. We
11402 expand the constructor data as static data, and push a pointer to it.
11403 The pointer is put in the pointer table and is retrieved by a constP
11404 bytecode instruction. We then loop and store each constructor member in
11405 the corresponding component. Finally, we return the original pointer on
11409 bc_expand_constructor (constr
)
11413 HOST_WIDE_INT ptroffs
;
11417 /* Literal constructors are handled as constants, whereas
11418 non-literals are evaluated and stored element by element
11419 into the data segment. */
11421 /* Allocate space in proper segment and push pointer to space on stack.
11424 l
= bc_gen_constr_label ();
11426 if (TREE_CONSTANT (constr
))
11430 bc_emit_const_labeldef (l
);
11431 bc_output_constructor (constr
, int_size_in_bytes (TREE_TYPE (constr
)));
11437 bc_emit_data_labeldef (l
);
11438 bc_output_data_constructor (constr
);
11442 /* Add reference to pointer table and recall pointer to stack;
11443 this code is common for both types of constructors: literals
11444 and non-literals. */
11446 ptroffs
= bc_define_pointer (l
);
11447 bc_emit_instruction (constP
, ptroffs
);
11449 /* This is all that has to be done if it's a literal. */
11450 if (TREE_CONSTANT (constr
))
11454 /* At this point, we have the pointer to the structure on top of the stack.
11455 Generate sequences of store_memory calls for the constructor. */
11457 /* constructor type is structure */
11458 if (TREE_CODE (TREE_TYPE (constr
)) == RECORD_TYPE
)
11462 /* If the constructor has fewer fields than the structure,
11463 clear the whole structure first. */
11465 if (list_length (CONSTRUCTOR_ELTS (constr
))
11466 != list_length (TYPE_FIELDS (TREE_TYPE (constr
))))
11468 bc_emit_instruction (duplicate
);
11469 bc_emit_instruction (constSI
, (HOST_WIDE_INT
) int_size_in_bytes (TREE_TYPE (constr
)));
11470 bc_emit_instruction (clearBLK
);
11473 /* Store each element of the constructor into the corresponding
11474 field of TARGET. */
11476 for (elt
= CONSTRUCTOR_ELTS (constr
); elt
; elt
= TREE_CHAIN (elt
))
11478 register tree field
= TREE_PURPOSE (elt
);
11479 register enum machine_mode mode
;
11484 bitsize
= TREE_INT_CST_LOW (DECL_SIZE (field
)) /* * DECL_SIZE_UNIT (field) */;
11485 mode
= DECL_MODE (field
);
11486 unsignedp
= TREE_UNSIGNED (field
);
11488 bitpos
= TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field
));
11490 bc_store_field (elt
, bitsize
, bitpos
, mode
, TREE_VALUE (elt
), TREE_TYPE (TREE_VALUE (elt
)),
11491 /* The alignment of TARGET is
11492 at least what its type requires. */
11494 TYPE_ALIGN (TREE_TYPE (constr
)) / BITS_PER_UNIT
,
11495 int_size_in_bytes (TREE_TYPE (constr
)));
11500 /* Constructor type is array */
11501 if (TREE_CODE (TREE_TYPE (constr
)) == ARRAY_TYPE
)
11505 tree domain
= TYPE_DOMAIN (TREE_TYPE (constr
));
11506 int minelt
= TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain
));
11507 int maxelt
= TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain
));
11508 tree elttype
= TREE_TYPE (TREE_TYPE (constr
));
11510 /* If the constructor has fewer fields than the structure,
11511 clear the whole structure first. */
11513 if (list_length (CONSTRUCTOR_ELTS (constr
)) < maxelt
- minelt
+ 1)
11515 bc_emit_instruction (duplicate
);
11516 bc_emit_instruction (constSI
, (HOST_WIDE_INT
) int_size_in_bytes (TREE_TYPE (constr
)));
11517 bc_emit_instruction (clearBLK
);
11521 /* Store each element of the constructor into the corresponding
11522 element of TARGET, determined by counting the elements. */
11524 for (elt
= CONSTRUCTOR_ELTS (constr
), i
= 0;
11526 elt
= TREE_CHAIN (elt
), i
++)
11528 register enum machine_mode mode
;
11533 mode
= TYPE_MODE (elttype
);
11534 bitsize
= GET_MODE_BITSIZE (mode
);
11535 unsignedp
= TREE_UNSIGNED (elttype
);
11537 bitpos
= (i
* TREE_INT_CST_LOW (TYPE_SIZE (elttype
))
11538 /* * TYPE_SIZE_UNIT (elttype) */ );
11540 bc_store_field (elt
, bitsize
, bitpos
, mode
,
11541 TREE_VALUE (elt
), TREE_TYPE (TREE_VALUE (elt
)),
11542 /* The alignment of TARGET is
11543 at least what its type requires. */
11545 TYPE_ALIGN (TREE_TYPE (constr
)) / BITS_PER_UNIT
,
11546 int_size_in_bytes (TREE_TYPE (constr
)));
11553 /* Store the value of EXP (an expression tree) into member FIELD of
11554 structure at address on stack, which has type TYPE, mode MODE and
11555 occupies BITSIZE bits, starting BITPOS bits from the beginning of the
11558 ALIGN is the alignment that TARGET is known to have, measured in bytes.
11559 TOTAL_SIZE is its size in bytes, or -1 if variable. */
11562 bc_store_field (field
, bitsize
, bitpos
, mode
, exp
, type
,
11563 value_mode
, unsignedp
, align
, total_size
)
11564 int bitsize
, bitpos
;
11565 enum machine_mode mode
;
11566 tree field
, exp
, type
;
11567 enum machine_mode value_mode
;
11573 /* Expand expression and copy pointer */
11574 bc_expand_expr (exp
);
11575 bc_emit_instruction (over
);
11578 /* If the component is a bit field, we cannot use addressing to access
11579 it. Use bit-field techniques to store in it. */
11581 if (DECL_BIT_FIELD (field
))
11583 bc_store_bit_field (bitpos
, bitsize
, unsignedp
);
11587 /* Not bit field */
11589 HOST_WIDE_INT offset
= bitpos
/ BITS_PER_UNIT
;
11591 /* Advance pointer to the desired member */
11593 bc_emit_instruction (addconstPSI
, offset
);
11596 bc_store_memory (type
, field
);
11601 /* Store SI/SU in bitfield */
11604 bc_store_bit_field (offset
, size
, unsignedp
)
11605 int offset
, size
, unsignedp
;
11607 /* Push bitfield offset and size */
11608 bc_push_offset_and_size (offset
, size
);
11611 bc_emit_instruction (sstoreBI
);
11615 /* Load SI/SU from bitfield */
11618 bc_load_bit_field (offset
, size
, unsignedp
)
11619 int offset
, size
, unsignedp
;
11621 /* Push bitfield offset and size */
11622 bc_push_offset_and_size (offset
, size
);
11624 /* Load: sign-extend if signed, else zero-extend */
11625 bc_emit_instruction (unsignedp
? zxloadBI
: sxloadBI
);
11629 /* Adjust interpreter stack by NLEVELS. Positive means drop NLEVELS
11630 (adjust stack pointer upwards), negative means add that number of
11631 levels (adjust the stack pointer downwards). Only positive values
11632 normally make sense. */
11635 bc_adjust_stack (nlevels
)
11644 bc_emit_instruction (drop
);
11647 bc_emit_instruction (drop
);
11652 bc_emit_instruction (adjstackSI
, (HOST_WIDE_INT
) nlevels
);
11653 stack_depth
-= nlevels
;
11656 #if defined (VALIDATE_STACK_FOR_BC)
11657 VALIDATE_STACK_FOR_BC ();