* pa/pa.c (compute_movstrsi_length): Handle residuals correctly.
[official-gcc.git] / gcc / expr.c
blob879759606a306876b4508ec5a902c75ad871d192
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 92, 93, 94, 95, 96, 1997 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 #include "config.h"
23 #include "machmode.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "obstack.h"
27 #include "flags.h"
28 #include "regs.h"
29 #include "hard-reg-set.h"
30 #include "except.h"
31 #include "function.h"
32 #include "insn-flags.h"
33 #include "insn-codes.h"
34 #include "expr.h"
35 #include "insn-config.h"
36 #include "recog.h"
37 #include "output.h"
38 #include "typeclass.h"
40 #include "bytecode.h"
41 #include "bc-opcode.h"
42 #include "bc-typecd.h"
43 #include "bc-optab.h"
44 #include "bc-emit.h"
47 #define CEIL(x,y) (((x) + (y) - 1) / (y))
49 /* Decide whether a function's arguments should be processed
50 from first to last or from last to first.
52 They should if the stack and args grow in opposite directions, but
53 only if we have push insns. */
55 #ifdef PUSH_ROUNDING
57 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
58 #define PUSH_ARGS_REVERSED /* If it's last to first */
59 #endif
61 #endif
63 #ifndef STACK_PUSH_CODE
64 #ifdef STACK_GROWS_DOWNWARD
65 #define STACK_PUSH_CODE PRE_DEC
66 #else
67 #define STACK_PUSH_CODE PRE_INC
68 #endif
69 #endif
71 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
72 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
74 /* If this is nonzero, we do not bother generating VOLATILE
75 around volatile memory references, and we are willing to
76 output indirect addresses. If cse is to follow, we reject
77 indirect addresses so a useful potential cse is generated;
78 if it is used only once, instruction combination will produce
79 the same indirect address eventually. */
80 int cse_not_expected;
82 /* Nonzero to generate code for all the subroutines within an
83 expression before generating the upper levels of the expression.
84 Nowadays this is never zero. */
85 int do_preexpand_calls = 1;
87 /* Number of units that we should eventually pop off the stack.
88 These are the arguments to function calls that have already returned. */
89 int pending_stack_adjust;
91 /* Nonzero means stack pops must not be deferred, and deferred stack
92 pops must not be output. It is nonzero inside a function call,
93 inside a conditional expression, inside a statement expression,
94 and in other cases as well. */
95 int inhibit_defer_pop;
97 /* A list of all cleanups which belong to the arguments of
98 function calls being expanded by expand_call. */
99 tree cleanups_this_call;
101 /* When temporaries are created by TARGET_EXPRs, they are created at
102 this level of temp_slot_level, so that they can remain allocated
103 until no longer needed. CLEANUP_POINT_EXPRs define the lifetime
104 of TARGET_EXPRs. */
105 int target_temp_slot_level;
107 /* Nonzero means __builtin_saveregs has already been done in this function.
108 The value is the pseudoreg containing the value __builtin_saveregs
109 returned. */
110 static rtx saveregs_value;
112 /* Similarly for __builtin_apply_args. */
113 static rtx apply_args_value;
115 /* This structure is used by move_by_pieces to describe the move to
116 be performed. */
118 struct move_by_pieces
120 rtx to;
121 rtx to_addr;
122 int autinc_to;
123 int explicit_inc_to;
124 int to_struct;
125 rtx from;
126 rtx from_addr;
127 int autinc_from;
128 int explicit_inc_from;
129 int from_struct;
130 int len;
131 int offset;
132 int reverse;
135 /* This structure is used by clear_by_pieces to describe the clear to
136 be performed. */
138 struct clear_by_pieces
140 rtx to;
141 rtx to_addr;
142 int autinc_to;
143 int explicit_inc_to;
144 int to_struct;
145 int len;
146 int offset;
147 int reverse;
150 /* Used to generate bytecodes: keep track of size of local variables,
151 as well as depth of arithmetic stack. (Notice that variables are
152 stored on the machine's stack, not the arithmetic stack.) */
154 extern int local_vars_size;
155 extern int stack_depth;
156 extern int max_stack_depth;
157 extern struct obstack permanent_obstack;
158 extern rtx arg_pointer_save_area;
160 static rtx enqueue_insn PROTO((rtx, rtx));
161 static int queued_subexp_p PROTO((rtx));
162 static void init_queue PROTO((void));
163 static void move_by_pieces PROTO((rtx, rtx, int, int));
164 static int move_by_pieces_ninsns PROTO((unsigned int, int));
165 static void move_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
166 struct move_by_pieces *));
167 static void clear_by_pieces PROTO((rtx, int, int));
168 static void clear_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
169 struct clear_by_pieces *));
170 static int is_zeros_p PROTO((tree));
171 static int mostly_zeros_p PROTO((tree));
172 static void store_constructor PROTO((tree, rtx, int));
173 static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
174 enum machine_mode, int, int, int));
175 static int get_inner_unaligned_p PROTO((tree));
176 static tree save_noncopied_parts PROTO((tree, tree));
177 static tree init_noncopied_parts PROTO((tree, tree));
178 static int safe_from_p PROTO((rtx, tree));
179 static int fixed_type_p PROTO((tree));
180 static rtx var_rtx PROTO((tree));
181 static int get_pointer_alignment PROTO((tree, unsigned));
182 static tree string_constant PROTO((tree, tree *));
183 static tree c_strlen PROTO((tree));
184 static rtx expand_builtin PROTO((tree, rtx, rtx,
185 enum machine_mode, int));
186 static int apply_args_size PROTO((void));
187 static int apply_result_size PROTO((void));
188 static rtx result_vector PROTO((int, rtx));
189 static rtx expand_builtin_apply_args PROTO((void));
190 static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
191 static void expand_builtin_return PROTO((rtx));
192 static rtx expand_increment PROTO((tree, int, int));
193 void bc_expand_increment PROTO((struct increment_operator *, tree));
194 rtx bc_allocate_local PROTO((int, int));
195 void bc_store_memory PROTO((tree, tree));
196 tree bc_expand_component_address PROTO((tree));
197 tree bc_expand_address PROTO((tree));
198 void bc_expand_constructor PROTO((tree));
199 void bc_adjust_stack PROTO((int));
200 tree bc_canonicalize_array_ref PROTO((tree));
201 void bc_load_memory PROTO((tree, tree));
202 void bc_load_externaddr PROTO((rtx));
203 void bc_load_externaddr_id PROTO((tree, int));
204 void bc_load_localaddr PROTO((rtx));
205 void bc_load_parmaddr PROTO((rtx));
206 static void preexpand_calls PROTO((tree));
207 static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
208 void do_jump_by_parts_greater_rtx PROTO((enum machine_mode, int, rtx, rtx, rtx, rtx));
209 static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
210 static void do_jump_by_parts_equality_rtx PROTO((rtx, rtx, rtx));
211 static void do_jump_for_compare PROTO((rtx, rtx, rtx));
212 static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
213 static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
214 static tree defer_cleanups_to PROTO((tree));
215 extern tree truthvalue_conversion PROTO((tree));
217 /* Record for each mode whether we can move a register directly to or
218 from an object of that mode in memory. If we can't, we won't try
219 to use that mode directly when accessing a field of that mode. */
221 static char direct_load[NUM_MACHINE_MODES];
222 static char direct_store[NUM_MACHINE_MODES];
224 /* MOVE_RATIO is the number of move instructions that is better than
225 a block move. */
227 #ifndef MOVE_RATIO
228 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
229 #define MOVE_RATIO 2
230 #else
231 /* A value of around 6 would minimize code size; infinity would minimize
232 execution time. */
233 #define MOVE_RATIO 15
234 #endif
235 #endif
237 /* This array records the insn_code of insns to perform block moves. */
238 enum insn_code movstr_optab[NUM_MACHINE_MODES];
240 /* This array records the insn_code of insns to perform block clears. */
241 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
243 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
245 #ifndef SLOW_UNALIGNED_ACCESS
246 #define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
247 #endif
249 /* Register mappings for target machines without register windows. */
250 #ifndef INCOMING_REGNO
251 #define INCOMING_REGNO(OUT) (OUT)
252 #endif
253 #ifndef OUTGOING_REGNO
254 #define OUTGOING_REGNO(IN) (IN)
255 #endif
257 /* Maps used to convert modes to const, load, and store bytecodes. */
258 enum bytecode_opcode mode_to_const_map[MAX_MACHINE_MODE];
259 enum bytecode_opcode mode_to_load_map[MAX_MACHINE_MODE];
260 enum bytecode_opcode mode_to_store_map[MAX_MACHINE_MODE];
262 /* Initialize maps used to convert modes to const, load, and store
263 bytecodes. */
265 void
266 bc_init_mode_to_opcode_maps ()
268 int mode;
270 for (mode = 0; mode < (int) MAX_MACHINE_MODE; mode++)
271 mode_to_const_map[mode] =
272 mode_to_load_map[mode] =
273 mode_to_store_map[mode] = neverneverland;
275 #define DEF_MODEMAP(SYM, CODE, UCODE, CONST, LOAD, STORE) \
276 mode_to_const_map[(int) SYM] = CONST; \
277 mode_to_load_map[(int) SYM] = LOAD; \
278 mode_to_store_map[(int) SYM] = STORE;
280 #include "modemap.def"
281 #undef DEF_MODEMAP
284 /* This is run once per compilation to set up which modes can be used
285 directly in memory and to initialize the block move optab. */
287 void
288 init_expr_once ()
290 rtx insn, pat;
291 enum machine_mode mode;
292 /* Try indexing by frame ptr and try by stack ptr.
293 It is known that on the Convex the stack ptr isn't a valid index.
294 With luck, one or the other is valid on any machine. */
295 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
296 rtx mem1 = gen_rtx (MEM, VOIDmode, frame_pointer_rtx);
298 start_sequence ();
299 insn = emit_insn (gen_rtx (SET, 0, 0));
300 pat = PATTERN (insn);
302 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
303 mode = (enum machine_mode) ((int) mode + 1))
305 int regno;
306 rtx reg;
307 int num_clobbers;
309 direct_load[(int) mode] = direct_store[(int) mode] = 0;
310 PUT_MODE (mem, mode);
311 PUT_MODE (mem1, mode);
313 /* See if there is some register that can be used in this mode and
314 directly loaded or stored from memory. */
316 if (mode != VOIDmode && mode != BLKmode)
317 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
318 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
319 regno++)
321 if (! HARD_REGNO_MODE_OK (regno, mode))
322 continue;
324 reg = gen_rtx (REG, mode, regno);
326 SET_SRC (pat) = mem;
327 SET_DEST (pat) = reg;
328 if (recog (pat, insn, &num_clobbers) >= 0)
329 direct_load[(int) mode] = 1;
331 SET_SRC (pat) = mem1;
332 SET_DEST (pat) = reg;
333 if (recog (pat, insn, &num_clobbers) >= 0)
334 direct_load[(int) mode] = 1;
336 SET_SRC (pat) = reg;
337 SET_DEST (pat) = mem;
338 if (recog (pat, insn, &num_clobbers) >= 0)
339 direct_store[(int) mode] = 1;
341 SET_SRC (pat) = reg;
342 SET_DEST (pat) = mem1;
343 if (recog (pat, insn, &num_clobbers) >= 0)
344 direct_store[(int) mode] = 1;
348 end_sequence ();
351 /* This is run at the start of compiling a function. */
353 void
354 init_expr ()
356 init_queue ();
358 pending_stack_adjust = 0;
359 inhibit_defer_pop = 0;
360 cleanups_this_call = 0;
361 saveregs_value = 0;
362 apply_args_value = 0;
363 forced_labels = 0;
366 /* Save all variables describing the current status into the structure *P.
367 This is used before starting a nested function. */
369 void
370 save_expr_status (p)
371 struct function *p;
373 /* Instead of saving the postincrement queue, empty it. */
374 emit_queue ();
376 p->pending_stack_adjust = pending_stack_adjust;
377 p->inhibit_defer_pop = inhibit_defer_pop;
378 p->cleanups_this_call = cleanups_this_call;
379 p->saveregs_value = saveregs_value;
380 p->apply_args_value = apply_args_value;
381 p->forced_labels = forced_labels;
383 pending_stack_adjust = 0;
384 inhibit_defer_pop = 0;
385 cleanups_this_call = 0;
386 saveregs_value = 0;
387 apply_args_value = 0;
388 forced_labels = 0;
391 /* Restore all variables describing the current status from the structure *P.
392 This is used after a nested function. */
394 void
395 restore_expr_status (p)
396 struct function *p;
398 pending_stack_adjust = p->pending_stack_adjust;
399 inhibit_defer_pop = p->inhibit_defer_pop;
400 cleanups_this_call = p->cleanups_this_call;
401 saveregs_value = p->saveregs_value;
402 apply_args_value = p->apply_args_value;
403 forced_labels = p->forced_labels;
406 /* Manage the queue of increment instructions to be output
407 for POSTINCREMENT_EXPR expressions, etc. */
409 static rtx pending_chain;
411 /* Queue up to increment (or change) VAR later. BODY says how:
412 BODY should be the same thing you would pass to emit_insn
413 to increment right away. It will go to emit_insn later on.
415 The value is a QUEUED expression to be used in place of VAR
416 where you want to guarantee the pre-incrementation value of VAR. */
418 static rtx
419 enqueue_insn (var, body)
420 rtx var, body;
422 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
423 var, NULL_RTX, NULL_RTX, body, pending_chain);
424 return pending_chain;
427 /* Use protect_from_queue to convert a QUEUED expression
428 into something that you can put immediately into an instruction.
429 If the queued incrementation has not happened yet,
430 protect_from_queue returns the variable itself.
431 If the incrementation has happened, protect_from_queue returns a temp
432 that contains a copy of the old value of the variable.
434 Any time an rtx which might possibly be a QUEUED is to be put
435 into an instruction, it must be passed through protect_from_queue first.
436 QUEUED expressions are not meaningful in instructions.
438 Do not pass a value through protect_from_queue and then hold
439 on to it for a while before putting it in an instruction!
440 If the queue is flushed in between, incorrect code will result. */
443 protect_from_queue (x, modify)
444 register rtx x;
445 int modify;
447 register RTX_CODE code = GET_CODE (x);
449 #if 0 /* A QUEUED can hang around after the queue is forced out. */
450 /* Shortcut for most common case. */
451 if (pending_chain == 0)
452 return x;
453 #endif
455 if (code != QUEUED)
457 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
458 use of autoincrement. Make a copy of the contents of the memory
459 location rather than a copy of the address, but not if the value is
460 of mode BLKmode. Don't modify X in place since it might be
461 shared. */
462 if (code == MEM && GET_MODE (x) != BLKmode
463 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
465 register rtx y = XEXP (x, 0);
466 register rtx new = gen_rtx (MEM, GET_MODE (x), QUEUED_VAR (y));
468 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
469 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
470 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
472 if (QUEUED_INSN (y))
474 register rtx temp = gen_reg_rtx (GET_MODE (new));
475 emit_insn_before (gen_move_insn (temp, new),
476 QUEUED_INSN (y));
477 return temp;
479 return new;
481 /* Otherwise, recursively protect the subexpressions of all
482 the kinds of rtx's that can contain a QUEUED. */
483 if (code == MEM)
485 rtx tem = protect_from_queue (XEXP (x, 0), 0);
486 if (tem != XEXP (x, 0))
488 x = copy_rtx (x);
489 XEXP (x, 0) = tem;
492 else if (code == PLUS || code == MULT)
494 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
495 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
496 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
498 x = copy_rtx (x);
499 XEXP (x, 0) = new0;
500 XEXP (x, 1) = new1;
503 return x;
505 /* If the increment has not happened, use the variable itself. */
506 if (QUEUED_INSN (x) == 0)
507 return QUEUED_VAR (x);
508 /* If the increment has happened and a pre-increment copy exists,
509 use that copy. */
510 if (QUEUED_COPY (x) != 0)
511 return QUEUED_COPY (x);
512 /* The increment has happened but we haven't set up a pre-increment copy.
513 Set one up now, and use it. */
514 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
515 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
516 QUEUED_INSN (x));
517 return QUEUED_COPY (x);
520 /* Return nonzero if X contains a QUEUED expression:
521 if it contains anything that will be altered by a queued increment.
522 We handle only combinations of MEM, PLUS, MINUS and MULT operators
523 since memory addresses generally contain only those. */
525 static int
526 queued_subexp_p (x)
527 rtx x;
529 register enum rtx_code code = GET_CODE (x);
530 switch (code)
532 case QUEUED:
533 return 1;
534 case MEM:
535 return queued_subexp_p (XEXP (x, 0));
536 case MULT:
537 case PLUS:
538 case MINUS:
539 return queued_subexp_p (XEXP (x, 0))
540 || queued_subexp_p (XEXP (x, 1));
542 return 0;
545 /* Perform all the pending incrementations. */
547 void
548 emit_queue ()
550 register rtx p;
551 while (p = pending_chain)
553 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
554 pending_chain = QUEUED_NEXT (p);
558 static void
559 init_queue ()
561 if (pending_chain)
562 abort ();
565 /* Copy data from FROM to TO, where the machine modes are not the same.
566 Both modes may be integer, or both may be floating.
567 UNSIGNEDP should be nonzero if FROM is an unsigned type.
568 This causes zero-extension instead of sign-extension. */
570 void
571 convert_move (to, from, unsignedp)
572 register rtx to, from;
573 int unsignedp;
575 enum machine_mode to_mode = GET_MODE (to);
576 enum machine_mode from_mode = GET_MODE (from);
577 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
578 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
579 enum insn_code code;
580 rtx libcall;
582 /* rtx code for making an equivalent value. */
583 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
585 to = protect_from_queue (to, 1);
586 from = protect_from_queue (from, 0);
588 if (to_real != from_real)
589 abort ();
591 /* If FROM is a SUBREG that indicates that we have already done at least
592 the required extension, strip it. We don't handle such SUBREGs as
593 TO here. */
595 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
596 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
597 >= GET_MODE_SIZE (to_mode))
598 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
599 from = gen_lowpart (to_mode, from), from_mode = to_mode;
601 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
602 abort ();
604 if (to_mode == from_mode
605 || (from_mode == VOIDmode && CONSTANT_P (from)))
607 emit_move_insn (to, from);
608 return;
611 if (to_real)
613 rtx value;
615 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
617 /* Try converting directly if the insn is supported. */
618 if ((code = can_extend_p (to_mode, from_mode, 0))
619 != CODE_FOR_nothing)
621 emit_unop_insn (code, to, from, UNKNOWN);
622 return;
626 #ifdef HAVE_trunchfqf2
627 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
629 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
630 return;
632 #endif
633 #ifdef HAVE_truncsfqf2
634 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
636 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
637 return;
639 #endif
640 #ifdef HAVE_truncdfqf2
641 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
643 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
644 return;
646 #endif
647 #ifdef HAVE_truncxfqf2
648 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
650 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
651 return;
653 #endif
654 #ifdef HAVE_trunctfqf2
655 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
657 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
658 return;
660 #endif
662 #ifdef HAVE_trunctqfhf2
663 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
665 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
666 return;
668 #endif
669 #ifdef HAVE_truncsfhf2
670 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
672 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
673 return;
675 #endif
676 #ifdef HAVE_truncdfhf2
677 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
679 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
680 return;
682 #endif
683 #ifdef HAVE_truncxfhf2
684 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
686 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
687 return;
689 #endif
690 #ifdef HAVE_trunctfhf2
691 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
693 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
694 return;
696 #endif
698 #ifdef HAVE_truncsftqf2
699 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
701 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
702 return;
704 #endif
705 #ifdef HAVE_truncdftqf2
706 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
708 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
709 return;
711 #endif
712 #ifdef HAVE_truncxftqf2
713 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
715 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
716 return;
718 #endif
719 #ifdef HAVE_trunctftqf2
720 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
722 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
723 return;
725 #endif
727 #ifdef HAVE_truncdfsf2
728 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
730 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
731 return;
733 #endif
734 #ifdef HAVE_truncxfsf2
735 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
737 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
738 return;
740 #endif
741 #ifdef HAVE_trunctfsf2
742 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
744 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
745 return;
747 #endif
748 #ifdef HAVE_truncxfdf2
749 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
751 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
752 return;
754 #endif
755 #ifdef HAVE_trunctfdf2
756 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
758 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
759 return;
761 #endif
763 libcall = (rtx) 0;
764 switch (from_mode)
766 case SFmode:
767 switch (to_mode)
769 case DFmode:
770 libcall = extendsfdf2_libfunc;
771 break;
773 case XFmode:
774 libcall = extendsfxf2_libfunc;
775 break;
777 case TFmode:
778 libcall = extendsftf2_libfunc;
779 break;
781 break;
783 case DFmode:
784 switch (to_mode)
786 case SFmode:
787 libcall = truncdfsf2_libfunc;
788 break;
790 case XFmode:
791 libcall = extenddfxf2_libfunc;
792 break;
794 case TFmode:
795 libcall = extenddftf2_libfunc;
796 break;
798 break;
800 case XFmode:
801 switch (to_mode)
803 case SFmode:
804 libcall = truncxfsf2_libfunc;
805 break;
807 case DFmode:
808 libcall = truncxfdf2_libfunc;
809 break;
811 break;
813 case TFmode:
814 switch (to_mode)
816 case SFmode:
817 libcall = trunctfsf2_libfunc;
818 break;
820 case DFmode:
821 libcall = trunctfdf2_libfunc;
822 break;
824 break;
827 if (libcall == (rtx) 0)
828 /* This conversion is not implemented yet. */
829 abort ();
831 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
832 1, from, from_mode);
833 emit_move_insn (to, value);
834 return;
837 /* Now both modes are integers. */
839 /* Handle expanding beyond a word. */
840 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
841 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
843 rtx insns;
844 rtx lowpart;
845 rtx fill_value;
846 rtx lowfrom;
847 int i;
848 enum machine_mode lowpart_mode;
849 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
851 /* Try converting directly if the insn is supported. */
852 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
853 != CODE_FOR_nothing)
855 /* If FROM is a SUBREG, put it into a register. Do this
856 so that we always generate the same set of insns for
857 better cse'ing; if an intermediate assignment occurred,
858 we won't be doing the operation directly on the SUBREG. */
859 if (optimize > 0 && GET_CODE (from) == SUBREG)
860 from = force_reg (from_mode, from);
861 emit_unop_insn (code, to, from, equiv_code);
862 return;
864 /* Next, try converting via full word. */
865 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
866 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
867 != CODE_FOR_nothing))
869 if (GET_CODE (to) == REG)
870 emit_insn (gen_rtx (CLOBBER, VOIDmode, to));
871 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
872 emit_unop_insn (code, to,
873 gen_lowpart (word_mode, to), equiv_code);
874 return;
877 /* No special multiword conversion insn; do it by hand. */
878 start_sequence ();
880 /* Since we will turn this into a no conflict block, we must ensure
881 that the source does not overlap the target. */
883 if (reg_overlap_mentioned_p (to, from))
884 from = force_reg (from_mode, from);
886 /* Get a copy of FROM widened to a word, if necessary. */
887 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
888 lowpart_mode = word_mode;
889 else
890 lowpart_mode = from_mode;
892 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
894 lowpart = gen_lowpart (lowpart_mode, to);
895 emit_move_insn (lowpart, lowfrom);
897 /* Compute the value to put in each remaining word. */
898 if (unsignedp)
899 fill_value = const0_rtx;
900 else
902 #ifdef HAVE_slt
903 if (HAVE_slt
904 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
905 && STORE_FLAG_VALUE == -1)
907 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
908 lowpart_mode, 0, 0);
909 fill_value = gen_reg_rtx (word_mode);
910 emit_insn (gen_slt (fill_value));
912 else
913 #endif
915 fill_value
916 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
917 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
918 NULL_RTX, 0);
919 fill_value = convert_to_mode (word_mode, fill_value, 1);
923 /* Fill the remaining words. */
924 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
926 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
927 rtx subword = operand_subword (to, index, 1, to_mode);
929 if (subword == 0)
930 abort ();
932 if (fill_value != subword)
933 emit_move_insn (subword, fill_value);
936 insns = get_insns ();
937 end_sequence ();
939 emit_no_conflict_block (insns, to, from, NULL_RTX,
940 gen_rtx (equiv_code, to_mode, copy_rtx (from)));
941 return;
944 /* Truncating multi-word to a word or less. */
945 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
946 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
948 if (!((GET_CODE (from) == MEM
949 && ! MEM_VOLATILE_P (from)
950 && direct_load[(int) to_mode]
951 && ! mode_dependent_address_p (XEXP (from, 0)))
952 || GET_CODE (from) == REG
953 || GET_CODE (from) == SUBREG))
954 from = force_reg (from_mode, from);
955 convert_move (to, gen_lowpart (word_mode, from), 0);
956 return;
959 /* Handle pointer conversion */ /* SPEE 900220 */
960 if (to_mode == PSImode)
962 if (from_mode != SImode)
963 from = convert_to_mode (SImode, from, unsignedp);
965 #ifdef HAVE_truncsipsi2
966 if (HAVE_truncsipsi2)
968 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
969 return;
971 #endif /* HAVE_truncsipsi2 */
972 abort ();
975 if (from_mode == PSImode)
977 if (to_mode != SImode)
979 from = convert_to_mode (SImode, from, unsignedp);
980 from_mode = SImode;
982 else
984 #ifdef HAVE_extendpsisi2
985 if (HAVE_extendpsisi2)
987 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
988 return;
990 #endif /* HAVE_extendpsisi2 */
991 abort ();
995 if (to_mode == PDImode)
997 if (from_mode != DImode)
998 from = convert_to_mode (DImode, from, unsignedp);
1000 #ifdef HAVE_truncdipdi2
1001 if (HAVE_truncdipdi2)
1003 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1004 return;
1006 #endif /* HAVE_truncdipdi2 */
1007 abort ();
1010 if (from_mode == PDImode)
1012 if (to_mode != DImode)
1014 from = convert_to_mode (DImode, from, unsignedp);
1015 from_mode = DImode;
1017 else
1019 #ifdef HAVE_extendpdidi2
1020 if (HAVE_extendpdidi2)
1022 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1023 return;
1025 #endif /* HAVE_extendpdidi2 */
1026 abort ();
1030 /* Now follow all the conversions between integers
1031 no more than a word long. */
1033 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1034 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1035 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1036 GET_MODE_BITSIZE (from_mode)))
1038 if (!((GET_CODE (from) == MEM
1039 && ! MEM_VOLATILE_P (from)
1040 && direct_load[(int) to_mode]
1041 && ! mode_dependent_address_p (XEXP (from, 0)))
1042 || GET_CODE (from) == REG
1043 || GET_CODE (from) == SUBREG))
1044 from = force_reg (from_mode, from);
1045 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1046 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1047 from = copy_to_reg (from);
1048 emit_move_insn (to, gen_lowpart (to_mode, from));
1049 return;
1052 /* Handle extension. */
1053 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1055 /* Convert directly if that works. */
1056 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1057 != CODE_FOR_nothing)
1059 emit_unop_insn (code, to, from, equiv_code);
1060 return;
1062 else
1064 enum machine_mode intermediate;
1066 /* Search for a mode to convert via. */
1067 for (intermediate = from_mode; intermediate != VOIDmode;
1068 intermediate = GET_MODE_WIDER_MODE (intermediate))
1069 if (((can_extend_p (to_mode, intermediate, unsignedp)
1070 != CODE_FOR_nothing)
1071 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1072 && TRULY_NOOP_TRUNCATION (to_mode, intermediate)))
1073 && (can_extend_p (intermediate, from_mode, unsignedp)
1074 != CODE_FOR_nothing))
1076 convert_move (to, convert_to_mode (intermediate, from,
1077 unsignedp), unsignedp);
1078 return;
1081 /* No suitable intermediate mode. */
1082 abort ();
1086 /* Support special truncate insns for certain modes. */
1088 if (from_mode == DImode && to_mode == SImode)
1090 #ifdef HAVE_truncdisi2
1091 if (HAVE_truncdisi2)
1093 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1094 return;
1096 #endif
1097 convert_move (to, force_reg (from_mode, from), unsignedp);
1098 return;
1101 if (from_mode == DImode && to_mode == HImode)
1103 #ifdef HAVE_truncdihi2
1104 if (HAVE_truncdihi2)
1106 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1107 return;
1109 #endif
1110 convert_move (to, force_reg (from_mode, from), unsignedp);
1111 return;
1114 if (from_mode == DImode && to_mode == QImode)
1116 #ifdef HAVE_truncdiqi2
1117 if (HAVE_truncdiqi2)
1119 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1120 return;
1122 #endif
1123 convert_move (to, force_reg (from_mode, from), unsignedp);
1124 return;
1127 if (from_mode == SImode && to_mode == HImode)
1129 #ifdef HAVE_truncsihi2
1130 if (HAVE_truncsihi2)
1132 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1133 return;
1135 #endif
1136 convert_move (to, force_reg (from_mode, from), unsignedp);
1137 return;
1140 if (from_mode == SImode && to_mode == QImode)
1142 #ifdef HAVE_truncsiqi2
1143 if (HAVE_truncsiqi2)
1145 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1146 return;
1148 #endif
1149 convert_move (to, force_reg (from_mode, from), unsignedp);
1150 return;
1153 if (from_mode == HImode && to_mode == QImode)
1155 #ifdef HAVE_trunchiqi2
1156 if (HAVE_trunchiqi2)
1158 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1159 return;
1161 #endif
1162 convert_move (to, force_reg (from_mode, from), unsignedp);
1163 return;
1166 if (from_mode == TImode && to_mode == DImode)
1168 #ifdef HAVE_trunctidi2
1169 if (HAVE_trunctidi2)
1171 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1172 return;
1174 #endif
1175 convert_move (to, force_reg (from_mode, from), unsignedp);
1176 return;
1179 if (from_mode == TImode && to_mode == SImode)
1181 #ifdef HAVE_trunctisi2
1182 if (HAVE_trunctisi2)
1184 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1185 return;
1187 #endif
1188 convert_move (to, force_reg (from_mode, from), unsignedp);
1189 return;
1192 if (from_mode == TImode && to_mode == HImode)
1194 #ifdef HAVE_trunctihi2
1195 if (HAVE_trunctihi2)
1197 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1198 return;
1200 #endif
1201 convert_move (to, force_reg (from_mode, from), unsignedp);
1202 return;
1205 if (from_mode == TImode && to_mode == QImode)
1207 #ifdef HAVE_trunctiqi2
1208 if (HAVE_trunctiqi2)
1210 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1211 return;
1213 #endif
1214 convert_move (to, force_reg (from_mode, from), unsignedp);
1215 return;
1218 /* Handle truncation of volatile memrefs, and so on;
1219 the things that couldn't be truncated directly,
1220 and for which there was no special instruction. */
1221 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1223 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1224 emit_move_insn (to, temp);
1225 return;
1228 /* Mode combination is not recognized. */
1229 abort ();
1232 /* Return an rtx for a value that would result
1233 from converting X to mode MODE.
1234 Both X and MODE may be floating, or both integer.
1235 UNSIGNEDP is nonzero if X is an unsigned value.
1236 This can be done by referring to a part of X in place
1237 or by copying to a new temporary with conversion.
1239 This function *must not* call protect_from_queue
1240 except when putting X into an insn (in which case convert_move does it). */
1243 convert_to_mode (mode, x, unsignedp)
1244 enum machine_mode mode;
1245 rtx x;
1246 int unsignedp;
1248 return convert_modes (mode, VOIDmode, x, unsignedp);
1251 /* Return an rtx for a value that would result
1252 from converting X from mode OLDMODE to mode MODE.
1253 Both modes may be floating, or both integer.
1254 UNSIGNEDP is nonzero if X is an unsigned value.
1256 This can be done by referring to a part of X in place
1257 or by copying to a new temporary with conversion.
1259 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1261 This function *must not* call protect_from_queue
1262 except when putting X into an insn (in which case convert_move does it). */
1265 convert_modes (mode, oldmode, x, unsignedp)
1266 enum machine_mode mode, oldmode;
1267 rtx x;
1268 int unsignedp;
1270 register rtx temp;
1272 /* If FROM is a SUBREG that indicates that we have already done at least
1273 the required extension, strip it. */
1275 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1276 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1277 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1278 x = gen_lowpart (mode, x);
1280 if (GET_MODE (x) != VOIDmode)
1281 oldmode = GET_MODE (x);
1283 if (mode == oldmode)
1284 return x;
1286 /* There is one case that we must handle specially: If we are converting
1287 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1288 we are to interpret the constant as unsigned, gen_lowpart will do
1289 the wrong if the constant appears negative. What we want to do is
1290 make the high-order word of the constant zero, not all ones. */
1292 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1293 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1294 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1296 HOST_WIDE_INT val = INTVAL (x);
1298 if (oldmode != VOIDmode
1299 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1301 int width = GET_MODE_BITSIZE (oldmode);
1303 /* We need to zero extend VAL. */
1304 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1307 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1310 /* We can do this with a gen_lowpart if both desired and current modes
1311 are integer, and this is either a constant integer, a register, or a
1312 non-volatile MEM. Except for the constant case where MODE is no
1313 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1315 if ((GET_CODE (x) == CONST_INT
1316 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1317 || (GET_MODE_CLASS (mode) == MODE_INT
1318 && GET_MODE_CLASS (oldmode) == MODE_INT
1319 && (GET_CODE (x) == CONST_DOUBLE
1320 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1321 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1322 && direct_load[(int) mode])
1323 || (GET_CODE (x) == REG
1324 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1325 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1327 /* ?? If we don't know OLDMODE, we have to assume here that
1328 X does not need sign- or zero-extension. This may not be
1329 the case, but it's the best we can do. */
1330 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1331 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1333 HOST_WIDE_INT val = INTVAL (x);
1334 int width = GET_MODE_BITSIZE (oldmode);
1336 /* We must sign or zero-extend in this case. Start by
1337 zero-extending, then sign extend if we need to. */
1338 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1339 if (! unsignedp
1340 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1341 val |= (HOST_WIDE_INT) (-1) << width;
1343 return GEN_INT (val);
1346 return gen_lowpart (mode, x);
1349 temp = gen_reg_rtx (mode);
1350 convert_move (temp, x, unsignedp);
1351 return temp;
1354 /* Generate several move instructions to copy LEN bytes
1355 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1356 The caller must pass FROM and TO
1357 through protect_from_queue before calling.
1358 ALIGN (in bytes) is maximum alignment we can assume. */
1360 static void
1361 move_by_pieces (to, from, len, align)
1362 rtx to, from;
1363 int len, align;
1365 struct move_by_pieces data;
1366 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1367 int max_size = MOVE_MAX + 1;
1369 data.offset = 0;
1370 data.to_addr = to_addr;
1371 data.from_addr = from_addr;
1372 data.to = to;
1373 data.from = from;
1374 data.autinc_to
1375 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1376 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1377 data.autinc_from
1378 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1379 || GET_CODE (from_addr) == POST_INC
1380 || GET_CODE (from_addr) == POST_DEC);
1382 data.explicit_inc_from = 0;
1383 data.explicit_inc_to = 0;
1384 data.reverse
1385 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1386 if (data.reverse) data.offset = len;
1387 data.len = len;
1389 data.to_struct = MEM_IN_STRUCT_P (to);
1390 data.from_struct = MEM_IN_STRUCT_P (from);
1392 /* If copying requires more than two move insns,
1393 copy addresses to registers (to make displacements shorter)
1394 and use post-increment if available. */
1395 if (!(data.autinc_from && data.autinc_to)
1396 && move_by_pieces_ninsns (len, align) > 2)
1398 #ifdef HAVE_PRE_DECREMENT
1399 if (data.reverse && ! data.autinc_from)
1401 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1402 data.autinc_from = 1;
1403 data.explicit_inc_from = -1;
1405 #endif
1406 #ifdef HAVE_POST_INCREMENT
1407 if (! data.autinc_from)
1409 data.from_addr = copy_addr_to_reg (from_addr);
1410 data.autinc_from = 1;
1411 data.explicit_inc_from = 1;
1413 #endif
1414 if (!data.autinc_from && CONSTANT_P (from_addr))
1415 data.from_addr = copy_addr_to_reg (from_addr);
1416 #ifdef HAVE_PRE_DECREMENT
1417 if (data.reverse && ! data.autinc_to)
1419 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1420 data.autinc_to = 1;
1421 data.explicit_inc_to = -1;
1423 #endif
1424 #ifdef HAVE_POST_INCREMENT
1425 if (! data.reverse && ! data.autinc_to)
1427 data.to_addr = copy_addr_to_reg (to_addr);
1428 data.autinc_to = 1;
1429 data.explicit_inc_to = 1;
1431 #endif
1432 if (!data.autinc_to && CONSTANT_P (to_addr))
1433 data.to_addr = copy_addr_to_reg (to_addr);
1436 if (! SLOW_UNALIGNED_ACCESS
1437 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1438 align = MOVE_MAX;
1440 /* First move what we can in the largest integer mode, then go to
1441 successively smaller modes. */
1443 while (max_size > 1)
1445 enum machine_mode mode = VOIDmode, tmode;
1446 enum insn_code icode;
1448 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1449 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1450 if (GET_MODE_SIZE (tmode) < max_size)
1451 mode = tmode;
1453 if (mode == VOIDmode)
1454 break;
1456 icode = mov_optab->handlers[(int) mode].insn_code;
1457 if (icode != CODE_FOR_nothing
1458 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1459 GET_MODE_SIZE (mode)))
1460 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1462 max_size = GET_MODE_SIZE (mode);
1465 /* The code above should have handled everything. */
1466 if (data.len > 0)
1467 abort ();
1470 /* Return number of insns required to move L bytes by pieces.
1471 ALIGN (in bytes) is maximum alignment we can assume. */
1473 static int
1474 move_by_pieces_ninsns (l, align)
1475 unsigned int l;
1476 int align;
1478 register int n_insns = 0;
1479 int max_size = MOVE_MAX + 1;
1481 if (! SLOW_UNALIGNED_ACCESS
1482 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1483 align = MOVE_MAX;
1485 while (max_size > 1)
1487 enum machine_mode mode = VOIDmode, tmode;
1488 enum insn_code icode;
1490 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1491 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1492 if (GET_MODE_SIZE (tmode) < max_size)
1493 mode = tmode;
1495 if (mode == VOIDmode)
1496 break;
1498 icode = mov_optab->handlers[(int) mode].insn_code;
1499 if (icode != CODE_FOR_nothing
1500 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1501 GET_MODE_SIZE (mode)))
1502 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1504 max_size = GET_MODE_SIZE (mode);
1507 return n_insns;
1510 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1511 with move instructions for mode MODE. GENFUN is the gen_... function
1512 to make a move insn for that mode. DATA has all the other info. */
1514 static void
1515 move_by_pieces_1 (genfun, mode, data)
1516 rtx (*genfun) ();
1517 enum machine_mode mode;
1518 struct move_by_pieces *data;
1520 register int size = GET_MODE_SIZE (mode);
1521 register rtx to1, from1;
1523 while (data->len >= size)
1525 if (data->reverse) data->offset -= size;
1527 to1 = (data->autinc_to
1528 ? gen_rtx (MEM, mode, data->to_addr)
1529 : change_address (data->to, mode,
1530 plus_constant (data->to_addr, data->offset)));
1531 MEM_IN_STRUCT_P (to1) = data->to_struct;
1532 from1 =
1533 (data->autinc_from
1534 ? gen_rtx (MEM, mode, data->from_addr)
1535 : change_address (data->from, mode,
1536 plus_constant (data->from_addr, data->offset)));
1537 MEM_IN_STRUCT_P (from1) = data->from_struct;
1539 #ifdef HAVE_PRE_DECREMENT
1540 if (data->explicit_inc_to < 0)
1541 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1542 if (data->explicit_inc_from < 0)
1543 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1544 #endif
1546 emit_insn ((*genfun) (to1, from1));
1547 #ifdef HAVE_POST_INCREMENT
1548 if (data->explicit_inc_to > 0)
1549 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1550 if (data->explicit_inc_from > 0)
1551 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1552 #endif
1554 if (! data->reverse) data->offset += size;
1556 data->len -= size;
1560 /* Emit code to move a block Y to a block X.
1561 This may be done with string-move instructions,
1562 with multiple scalar move instructions, or with a library call.
1564 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1565 with mode BLKmode.
1566 SIZE is an rtx that says how long they are.
1567 ALIGN is the maximum alignment we can assume they have,
1568 measured in bytes. */
1570 void
1571 emit_block_move (x, y, size, align)
1572 rtx x, y;
1573 rtx size;
1574 int align;
1576 if (GET_MODE (x) != BLKmode)
1577 abort ();
1579 if (GET_MODE (y) != BLKmode)
1580 abort ();
1582 x = protect_from_queue (x, 1);
1583 y = protect_from_queue (y, 0);
1584 size = protect_from_queue (size, 0);
1586 if (GET_CODE (x) != MEM)
1587 abort ();
1588 if (GET_CODE (y) != MEM)
1589 abort ();
1590 if (size == 0)
1591 abort ();
1593 if (GET_CODE (size) == CONST_INT
1594 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
1595 move_by_pieces (x, y, INTVAL (size), align);
1596 else
1598 /* Try the most limited insn first, because there's no point
1599 including more than one in the machine description unless
1600 the more limited one has some advantage. */
1602 rtx opalign = GEN_INT (align);
1603 enum machine_mode mode;
1605 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1606 mode = GET_MODE_WIDER_MODE (mode))
1608 enum insn_code code = movstr_optab[(int) mode];
1610 if (code != CODE_FOR_nothing
1611 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1612 here because if SIZE is less than the mode mask, as it is
1613 returned by the macro, it will definitely be less than the
1614 actual mode mask. */
1615 && ((GET_CODE (size) == CONST_INT
1616 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1617 <= GET_MODE_MASK (mode)))
1618 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1619 && (insn_operand_predicate[(int) code][0] == 0
1620 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1621 && (insn_operand_predicate[(int) code][1] == 0
1622 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1623 && (insn_operand_predicate[(int) code][3] == 0
1624 || (*insn_operand_predicate[(int) code][3]) (opalign,
1625 VOIDmode)))
1627 rtx op2;
1628 rtx last = get_last_insn ();
1629 rtx pat;
1631 op2 = convert_to_mode (mode, size, 1);
1632 if (insn_operand_predicate[(int) code][2] != 0
1633 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1634 op2 = copy_to_mode_reg (mode, op2);
1636 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1637 if (pat)
1639 emit_insn (pat);
1640 return;
1642 else
1643 delete_insns_since (last);
1647 #ifdef TARGET_MEM_FUNCTIONS
1648 emit_library_call (memcpy_libfunc, 0,
1649 VOIDmode, 3, XEXP (x, 0), Pmode,
1650 XEXP (y, 0), Pmode,
1651 convert_to_mode (TYPE_MODE (sizetype), size,
1652 TREE_UNSIGNED (sizetype)),
1653 TYPE_MODE (sizetype));
1654 #else
1655 emit_library_call (bcopy_libfunc, 0,
1656 VOIDmode, 3, XEXP (y, 0), Pmode,
1657 XEXP (x, 0), Pmode,
1658 convert_to_mode (TYPE_MODE (integer_type_node), size,
1659 TREE_UNSIGNED (integer_type_node)),
1660 TYPE_MODE (integer_type_node));
1661 #endif
1665 /* Copy all or part of a value X into registers starting at REGNO.
1666 The number of registers to be filled is NREGS. */
1668 void
1669 move_block_to_reg (regno, x, nregs, mode)
1670 int regno;
1671 rtx x;
1672 int nregs;
1673 enum machine_mode mode;
1675 int i;
1676 rtx pat, last;
1678 if (nregs == 0)
1679 return;
1681 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1682 x = validize_mem (force_const_mem (mode, x));
1684 /* See if the machine can do this with a load multiple insn. */
1685 #ifdef HAVE_load_multiple
1686 if (HAVE_load_multiple)
1688 last = get_last_insn ();
1689 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
1690 GEN_INT (nregs));
1691 if (pat)
1693 emit_insn (pat);
1694 return;
1696 else
1697 delete_insns_since (last);
1699 #endif
1701 for (i = 0; i < nregs; i++)
1702 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1703 operand_subword_force (x, i, mode));
1706 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1707 The number of registers to be filled is NREGS. SIZE indicates the number
1708 of bytes in the object X. */
1711 void
1712 move_block_from_reg (regno, x, nregs, size)
1713 int regno;
1714 rtx x;
1715 int nregs;
1716 int size;
1718 int i;
1719 rtx pat, last;
1721 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1722 to the left before storing to memory. */
1723 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1725 rtx tem = operand_subword (x, 0, 1, BLKmode);
1726 rtx shift;
1728 if (tem == 0)
1729 abort ();
1731 shift = expand_shift (LSHIFT_EXPR, word_mode,
1732 gen_rtx (REG, word_mode, regno),
1733 build_int_2 ((UNITS_PER_WORD - size)
1734 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1735 emit_move_insn (tem, shift);
1736 return;
1739 /* See if the machine can do this with a store multiple insn. */
1740 #ifdef HAVE_store_multiple
1741 if (HAVE_store_multiple)
1743 last = get_last_insn ();
1744 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
1745 GEN_INT (nregs));
1746 if (pat)
1748 emit_insn (pat);
1749 return;
1751 else
1752 delete_insns_since (last);
1754 #endif
1756 for (i = 0; i < nregs; i++)
1758 rtx tem = operand_subword (x, i, 1, BLKmode);
1760 if (tem == 0)
1761 abort ();
1763 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1767 /* Emit code to move a block Y to a block X, where X is non-consecutive
1768 registers represented by a PARALLEL. */
1770 void
1771 emit_group_load (x, y)
1772 rtx x, y;
1774 rtx target_reg, source;
1775 int i;
1777 if (GET_CODE (x) != PARALLEL)
1778 abort ();
1780 /* Check for a NULL entry, used to indicate that the parameter goes
1781 both on the stack and in registers. */
1782 if (XEXP (XVECEXP (x, 0, 0), 0))
1783 i = 0;
1784 else
1785 i = 1;
1787 for (; i < XVECLEN (x, 0); i++)
1789 rtx element = XVECEXP (x, 0, i);
1791 target_reg = XEXP (element, 0);
1793 if (GET_CODE (y) == MEM)
1794 source = change_address (y, GET_MODE (target_reg),
1795 plus_constant (XEXP (y, 0),
1796 INTVAL (XEXP (element, 1))));
1797 else if (XEXP (element, 1) == const0_rtx)
1799 if (GET_MODE (target_reg) == GET_MODE (y))
1800 source = y;
1801 /* Allow for the target_reg to be smaller than the input register
1802 to allow for AIX with 4 DF arguments after a single SI arg. The
1803 last DF argument will only load 1 word into the integer registers,
1804 but load a DF value into the float registers. */
1805 else if ((GET_MODE_SIZE (GET_MODE (target_reg))
1806 <= GET_MODE_SIZE (GET_MODE (y)))
1807 && GET_MODE (target_reg) == word_mode)
1808 /* This might be a const_double, so we can't just use SUBREG. */
1809 source = operand_subword (y, 0, 0, VOIDmode);
1810 else
1811 abort ();
1813 else
1814 abort ();
1816 emit_move_insn (target_reg, source);
1820 /* Emit code to move a block Y to a block X, where Y is non-consecutive
1821 registers represented by a PARALLEL. */
1823 void
1824 emit_group_store (x, y)
1825 rtx x, y;
1827 rtx source_reg, target;
1828 int i;
1830 if (GET_CODE (y) != PARALLEL)
1831 abort ();
1833 /* Check for a NULL entry, used to indicate that the parameter goes
1834 both on the stack and in registers. */
1835 if (XEXP (XVECEXP (y, 0, 0), 0))
1836 i = 0;
1837 else
1838 i = 1;
1840 for (; i < XVECLEN (y, 0); i++)
1842 rtx element = XVECEXP (y, 0, i);
1844 source_reg = XEXP (element, 0);
1846 if (GET_CODE (x) == MEM)
1847 target = change_address (x, GET_MODE (source_reg),
1848 plus_constant (XEXP (x, 0),
1849 INTVAL (XEXP (element, 1))));
1850 else if (XEXP (element, 1) == const0_rtx)
1852 target = x;
1853 if (GET_MODE (target) != GET_MODE (source_reg))
1854 target = gen_lowpart (GET_MODE (source_reg), target);
1856 else
1857 abort ();
1859 emit_move_insn (target, source_reg);
1863 /* Add a USE expression for REG to the (possibly empty) list pointed
1864 to by CALL_FUSAGE. REG must denote a hard register. */
1866 void
1867 use_reg (call_fusage, reg)
1868 rtx *call_fusage, reg;
1870 if (GET_CODE (reg) != REG
1871 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
1872 abort();
1874 *call_fusage
1875 = gen_rtx (EXPR_LIST, VOIDmode,
1876 gen_rtx (USE, VOIDmode, reg), *call_fusage);
1879 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
1880 starting at REGNO. All of these registers must be hard registers. */
1882 void
1883 use_regs (call_fusage, regno, nregs)
1884 rtx *call_fusage;
1885 int regno;
1886 int nregs;
1888 int i;
1890 if (regno + nregs > FIRST_PSEUDO_REGISTER)
1891 abort ();
1893 for (i = 0; i < nregs; i++)
1894 use_reg (call_fusage, gen_rtx (REG, reg_raw_mode[regno + i], regno + i));
1897 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
1898 PARALLEL REGS. This is for calls that pass values in multiple
1899 non-contiguous locations. The Irix 6 ABI has examples of this. */
1901 void
1902 use_group_regs (call_fusage, regs)
1903 rtx *call_fusage;
1904 rtx regs;
1906 int i;
1908 /* Check for a NULL entry, used to indicate that the parameter goes
1909 both on the stack and in registers. */
1910 if (XEXP (XVECEXP (regs, 0, 0), 0))
1911 i = 0;
1912 else
1913 i = 1;
1915 for (; i < XVECLEN (regs, 0); i++)
1916 use_reg (call_fusage, XEXP (XVECEXP (regs, 0, i), 0));
1919 /* Generate several move instructions to clear LEN bytes of block TO.
1920 (A MEM rtx with BLKmode). The caller must pass TO through
1921 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
1922 we can assume. */
1924 static void
1925 clear_by_pieces (to, len, align)
1926 rtx to;
1927 int len, align;
1929 struct clear_by_pieces data;
1930 rtx to_addr = XEXP (to, 0);
1931 int max_size = MOVE_MAX + 1;
1933 data.offset = 0;
1934 data.to_addr = to_addr;
1935 data.to = to;
1936 data.autinc_to
1937 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1938 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1940 data.explicit_inc_to = 0;
1941 data.reverse
1942 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1943 if (data.reverse) data.offset = len;
1944 data.len = len;
1946 data.to_struct = MEM_IN_STRUCT_P (to);
1948 /* If copying requires more than two move insns,
1949 copy addresses to registers (to make displacements shorter)
1950 and use post-increment if available. */
1951 if (!data.autinc_to
1952 && move_by_pieces_ninsns (len, align) > 2)
1954 #ifdef HAVE_PRE_DECREMENT
1955 if (data.reverse && ! data.autinc_to)
1957 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1958 data.autinc_to = 1;
1959 data.explicit_inc_to = -1;
1961 #endif
1962 #ifdef HAVE_POST_INCREMENT
1963 if (! data.reverse && ! data.autinc_to)
1965 data.to_addr = copy_addr_to_reg (to_addr);
1966 data.autinc_to = 1;
1967 data.explicit_inc_to = 1;
1969 #endif
1970 if (!data.autinc_to && CONSTANT_P (to_addr))
1971 data.to_addr = copy_addr_to_reg (to_addr);
1974 if (! SLOW_UNALIGNED_ACCESS
1975 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1976 align = MOVE_MAX;
1978 /* First move what we can in the largest integer mode, then go to
1979 successively smaller modes. */
1981 while (max_size > 1)
1983 enum machine_mode mode = VOIDmode, tmode;
1984 enum insn_code icode;
1986 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1987 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1988 if (GET_MODE_SIZE (tmode) < max_size)
1989 mode = tmode;
1991 if (mode == VOIDmode)
1992 break;
1994 icode = mov_optab->handlers[(int) mode].insn_code;
1995 if (icode != CODE_FOR_nothing
1996 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1997 GET_MODE_SIZE (mode)))
1998 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2000 max_size = GET_MODE_SIZE (mode);
2003 /* The code above should have handled everything. */
2004 if (data.len != 0)
2005 abort ();
2008 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2009 with move instructions for mode MODE. GENFUN is the gen_... function
2010 to make a move insn for that mode. DATA has all the other info. */
2012 static void
2013 clear_by_pieces_1 (genfun, mode, data)
2014 rtx (*genfun) ();
2015 enum machine_mode mode;
2016 struct clear_by_pieces *data;
2018 register int size = GET_MODE_SIZE (mode);
2019 register rtx to1;
2021 while (data->len >= size)
2023 if (data->reverse) data->offset -= size;
2025 to1 = (data->autinc_to
2026 ? gen_rtx (MEM, mode, data->to_addr)
2027 : change_address (data->to, mode,
2028 plus_constant (data->to_addr, data->offset)));
2029 MEM_IN_STRUCT_P (to1) = data->to_struct;
2031 #ifdef HAVE_PRE_DECREMENT
2032 if (data->explicit_inc_to < 0)
2033 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2034 #endif
2036 emit_insn ((*genfun) (to1, const0_rtx));
2037 #ifdef HAVE_POST_INCREMENT
2038 if (data->explicit_inc_to > 0)
2039 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2040 #endif
2042 if (! data->reverse) data->offset += size;
2044 data->len -= size;
2048 /* Write zeros through the storage of OBJECT.
2049 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
2050 the maximum alignment we can is has, measured in bytes. */
2052 void
2053 clear_storage (object, size, align)
2054 rtx object;
2055 rtx size;
2056 int align;
2058 if (GET_MODE (object) == BLKmode)
2060 object = protect_from_queue (object, 1);
2061 size = protect_from_queue (size, 0);
2063 if (GET_CODE (size) == CONST_INT
2064 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
2065 clear_by_pieces (object, INTVAL (size), align);
2067 else
2069 /* Try the most limited insn first, because there's no point
2070 including more than one in the machine description unless
2071 the more limited one has some advantage. */
2073 rtx opalign = GEN_INT (align);
2074 enum machine_mode mode;
2076 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2077 mode = GET_MODE_WIDER_MODE (mode))
2079 enum insn_code code = clrstr_optab[(int) mode];
2081 if (code != CODE_FOR_nothing
2082 /* We don't need MODE to be narrower than
2083 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2084 the mode mask, as it is returned by the macro, it will
2085 definitely be less than the actual mode mask. */
2086 && ((GET_CODE (size) == CONST_INT
2087 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2088 <= GET_MODE_MASK (mode)))
2089 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2090 && (insn_operand_predicate[(int) code][0] == 0
2091 || (*insn_operand_predicate[(int) code][0]) (object,
2092 BLKmode))
2093 && (insn_operand_predicate[(int) code][2] == 0
2094 || (*insn_operand_predicate[(int) code][2]) (opalign,
2095 VOIDmode)))
2097 rtx op1;
2098 rtx last = get_last_insn ();
2099 rtx pat;
2101 op1 = convert_to_mode (mode, size, 1);
2102 if (insn_operand_predicate[(int) code][1] != 0
2103 && ! (*insn_operand_predicate[(int) code][1]) (op1,
2104 mode))
2105 op1 = copy_to_mode_reg (mode, op1);
2107 pat = GEN_FCN ((int) code) (object, op1, opalign);
2108 if (pat)
2110 emit_insn (pat);
2111 return;
2113 else
2114 delete_insns_since (last);
2119 #ifdef TARGET_MEM_FUNCTIONS
2120 emit_library_call (memset_libfunc, 0,
2121 VOIDmode, 3,
2122 XEXP (object, 0), Pmode,
2123 const0_rtx, TYPE_MODE (integer_type_node),
2124 convert_to_mode (TYPE_MODE (sizetype),
2125 size, TREE_UNSIGNED (sizetype)),
2126 TYPE_MODE (sizetype));
2127 #else
2128 emit_library_call (bzero_libfunc, 0,
2129 VOIDmode, 2,
2130 XEXP (object, 0), Pmode,
2131 convert_to_mode (TYPE_MODE (integer_type_node),
2132 size,
2133 TREE_UNSIGNED (integer_type_node)),
2134 TYPE_MODE (integer_type_node));
2135 #endif
2138 else
2139 emit_move_insn (object, const0_rtx);
2142 /* Generate code to copy Y into X.
2143 Both Y and X must have the same mode, except that
2144 Y can be a constant with VOIDmode.
2145 This mode cannot be BLKmode; use emit_block_move for that.
2147 Return the last instruction emitted. */
2150 emit_move_insn (x, y)
2151 rtx x, y;
2153 enum machine_mode mode = GET_MODE (x);
2155 x = protect_from_queue (x, 1);
2156 y = protect_from_queue (y, 0);
2158 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2159 abort ();
2161 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2162 y = force_const_mem (mode, y);
2164 /* If X or Y are memory references, verify that their addresses are valid
2165 for the machine. */
2166 if (GET_CODE (x) == MEM
2167 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2168 && ! push_operand (x, GET_MODE (x)))
2169 || (flag_force_addr
2170 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2171 x = change_address (x, VOIDmode, XEXP (x, 0));
2173 if (GET_CODE (y) == MEM
2174 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2175 || (flag_force_addr
2176 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2177 y = change_address (y, VOIDmode, XEXP (y, 0));
2179 if (mode == BLKmode)
2180 abort ();
2182 return emit_move_insn_1 (x, y);
2185 /* Low level part of emit_move_insn.
2186 Called just like emit_move_insn, but assumes X and Y
2187 are basically valid. */
2190 emit_move_insn_1 (x, y)
2191 rtx x, y;
2193 enum machine_mode mode = GET_MODE (x);
2194 enum machine_mode submode;
2195 enum mode_class class = GET_MODE_CLASS (mode);
2196 int i;
2198 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2199 return
2200 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2202 /* Expand complex moves by moving real part and imag part, if possible. */
2203 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2204 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2205 * BITS_PER_UNIT),
2206 (class == MODE_COMPLEX_INT
2207 ? MODE_INT : MODE_FLOAT),
2209 && (mov_optab->handlers[(int) submode].insn_code
2210 != CODE_FOR_nothing))
2212 /* Don't split destination if it is a stack push. */
2213 int stack = push_operand (x, GET_MODE (x));
2214 rtx insns;
2216 /* If this is a stack, push the highpart first, so it
2217 will be in the argument order.
2219 In that case, change_address is used only to convert
2220 the mode, not to change the address. */
2221 if (stack)
2223 /* Note that the real part always precedes the imag part in memory
2224 regardless of machine's endianness. */
2225 #ifdef STACK_GROWS_DOWNWARD
2226 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2227 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2228 gen_imagpart (submode, y)));
2229 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2230 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2231 gen_realpart (submode, y)));
2232 #else
2233 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2234 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2235 gen_realpart (submode, y)));
2236 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2237 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2238 gen_imagpart (submode, y)));
2239 #endif
2241 else
2243 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2244 (gen_realpart (submode, x), gen_realpart (submode, y)));
2245 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2246 (gen_imagpart (submode, x), gen_imagpart (submode, y)));
2249 return get_last_insn ();
2252 /* This will handle any multi-word mode that lacks a move_insn pattern.
2253 However, you will get better code if you define such patterns,
2254 even if they must turn into multiple assembler instructions. */
2255 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2257 rtx last_insn = 0;
2258 rtx insns;
2260 #ifdef PUSH_ROUNDING
2262 /* If X is a push on the stack, do the push now and replace
2263 X with a reference to the stack pointer. */
2264 if (push_operand (x, GET_MODE (x)))
2266 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2267 x = change_address (x, VOIDmode, stack_pointer_rtx);
2269 #endif
2271 /* Show the output dies here. */
2272 if (x != y)
2273 emit_insn (gen_rtx (CLOBBER, VOIDmode, x));
2275 for (i = 0;
2276 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2277 i++)
2279 rtx xpart = operand_subword (x, i, 1, mode);
2280 rtx ypart = operand_subword (y, i, 1, mode);
2282 /* If we can't get a part of Y, put Y into memory if it is a
2283 constant. Otherwise, force it into a register. If we still
2284 can't get a part of Y, abort. */
2285 if (ypart == 0 && CONSTANT_P (y))
2287 y = force_const_mem (mode, y);
2288 ypart = operand_subword (y, i, 1, mode);
2290 else if (ypart == 0)
2291 ypart = operand_subword_force (y, i, mode);
2293 if (xpart == 0 || ypart == 0)
2294 abort ();
2296 last_insn = emit_move_insn (xpart, ypart);
2299 return last_insn;
2301 else
2302 abort ();
2305 /* Pushing data onto the stack. */
2307 /* Push a block of length SIZE (perhaps variable)
2308 and return an rtx to address the beginning of the block.
2309 Note that it is not possible for the value returned to be a QUEUED.
2310 The value may be virtual_outgoing_args_rtx.
2312 EXTRA is the number of bytes of padding to push in addition to SIZE.
2313 BELOW nonzero means this padding comes at low addresses;
2314 otherwise, the padding comes at high addresses. */
2317 push_block (size, extra, below)
2318 rtx size;
2319 int extra, below;
2321 register rtx temp;
2323 size = convert_modes (Pmode, ptr_mode, size, 1);
2324 if (CONSTANT_P (size))
2325 anti_adjust_stack (plus_constant (size, extra));
2326 else if (GET_CODE (size) == REG && extra == 0)
2327 anti_adjust_stack (size);
2328 else
2330 rtx temp = copy_to_mode_reg (Pmode, size);
2331 if (extra != 0)
2332 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2333 temp, 0, OPTAB_LIB_WIDEN);
2334 anti_adjust_stack (temp);
2337 #ifdef STACK_GROWS_DOWNWARD
2338 temp = virtual_outgoing_args_rtx;
2339 if (extra != 0 && below)
2340 temp = plus_constant (temp, extra);
2341 #else
2342 if (GET_CODE (size) == CONST_INT)
2343 temp = plus_constant (virtual_outgoing_args_rtx,
2344 - INTVAL (size) - (below ? 0 : extra));
2345 else if (extra != 0 && !below)
2346 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
2347 negate_rtx (Pmode, plus_constant (size, extra)));
2348 else
2349 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
2350 negate_rtx (Pmode, size));
2351 #endif
2353 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2357 gen_push_operand ()
2359 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2362 /* Generate code to push X onto the stack, assuming it has mode MODE and
2363 type TYPE.
2364 MODE is redundant except when X is a CONST_INT (since they don't
2365 carry mode info).
2366 SIZE is an rtx for the size of data to be copied (in bytes),
2367 needed only if X is BLKmode.
2369 ALIGN (in bytes) is maximum alignment we can assume.
2371 If PARTIAL and REG are both nonzero, then copy that many of the first
2372 words of X into registers starting with REG, and push the rest of X.
2373 The amount of space pushed is decreased by PARTIAL words,
2374 rounded *down* to a multiple of PARM_BOUNDARY.
2375 REG must be a hard register in this case.
2376 If REG is zero but PARTIAL is not, take any all others actions for an
2377 argument partially in registers, but do not actually load any
2378 registers.
2380 EXTRA is the amount in bytes of extra space to leave next to this arg.
2381 This is ignored if an argument block has already been allocated.
2383 On a machine that lacks real push insns, ARGS_ADDR is the address of
2384 the bottom of the argument block for this call. We use indexing off there
2385 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2386 argument block has not been preallocated.
2388 ARGS_SO_FAR is the size of args previously pushed for this call. */
2390 void
2391 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2392 args_addr, args_so_far)
2393 register rtx x;
2394 enum machine_mode mode;
2395 tree type;
2396 rtx size;
2397 int align;
2398 int partial;
2399 rtx reg;
2400 int extra;
2401 rtx args_addr;
2402 rtx args_so_far;
2404 rtx xinner;
2405 enum direction stack_direction
2406 #ifdef STACK_GROWS_DOWNWARD
2407 = downward;
2408 #else
2409 = upward;
2410 #endif
2412 /* Decide where to pad the argument: `downward' for below,
2413 `upward' for above, or `none' for don't pad it.
2414 Default is below for small data on big-endian machines; else above. */
2415 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2417 /* If we're placing part of X into a register and part of X onto
2418 the stack, indicate that the entire register is clobbered to
2419 keep flow from thinking the unused part of the register is live. */
2420 if (partial > 0 && reg != 0)
2421 emit_insn (gen_rtx (CLOBBER, VOIDmode, reg));
2423 /* Invert direction if stack is post-update. */
2424 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2425 if (where_pad != none)
2426 where_pad = (where_pad == downward ? upward : downward);
2428 xinner = x = protect_from_queue (x, 0);
2430 if (mode == BLKmode)
2432 /* Copy a block into the stack, entirely or partially. */
2434 register rtx temp;
2435 int used = partial * UNITS_PER_WORD;
2436 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2437 int skip;
2439 if (size == 0)
2440 abort ();
2442 used -= offset;
2444 /* USED is now the # of bytes we need not copy to the stack
2445 because registers will take care of them. */
2447 if (partial != 0)
2448 xinner = change_address (xinner, BLKmode,
2449 plus_constant (XEXP (xinner, 0), used));
2451 /* If the partial register-part of the arg counts in its stack size,
2452 skip the part of stack space corresponding to the registers.
2453 Otherwise, start copying to the beginning of the stack space,
2454 by setting SKIP to 0. */
2455 #ifndef REG_PARM_STACK_SPACE
2456 skip = 0;
2457 #else
2458 skip = used;
2459 #endif
2461 #ifdef PUSH_ROUNDING
2462 /* Do it with several push insns if that doesn't take lots of insns
2463 and if there is no difficulty with push insns that skip bytes
2464 on the stack for alignment purposes. */
2465 if (args_addr == 0
2466 && GET_CODE (size) == CONST_INT
2467 && skip == 0
2468 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
2469 < MOVE_RATIO)
2470 /* Here we avoid the case of a structure whose weak alignment
2471 forces many pushes of a small amount of data,
2472 and such small pushes do rounding that causes trouble. */
2473 && ((! SLOW_UNALIGNED_ACCESS)
2474 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
2475 || PUSH_ROUNDING (align) == align)
2476 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2478 /* Push padding now if padding above and stack grows down,
2479 or if padding below and stack grows up.
2480 But if space already allocated, this has already been done. */
2481 if (extra && args_addr == 0
2482 && where_pad != none && where_pad != stack_direction)
2483 anti_adjust_stack (GEN_INT (extra));
2485 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
2486 INTVAL (size) - used, align);
2488 else
2489 #endif /* PUSH_ROUNDING */
2491 /* Otherwise make space on the stack and copy the data
2492 to the address of that space. */
2494 /* Deduct words put into registers from the size we must copy. */
2495 if (partial != 0)
2497 if (GET_CODE (size) == CONST_INT)
2498 size = GEN_INT (INTVAL (size) - used);
2499 else
2500 size = expand_binop (GET_MODE (size), sub_optab, size,
2501 GEN_INT (used), NULL_RTX, 0,
2502 OPTAB_LIB_WIDEN);
2505 /* Get the address of the stack space.
2506 In this case, we do not deal with EXTRA separately.
2507 A single stack adjust will do. */
2508 if (! args_addr)
2510 temp = push_block (size, extra, where_pad == downward);
2511 extra = 0;
2513 else if (GET_CODE (args_so_far) == CONST_INT)
2514 temp = memory_address (BLKmode,
2515 plus_constant (args_addr,
2516 skip + INTVAL (args_so_far)));
2517 else
2518 temp = memory_address (BLKmode,
2519 plus_constant (gen_rtx (PLUS, Pmode,
2520 args_addr, args_so_far),
2521 skip));
2523 /* TEMP is the address of the block. Copy the data there. */
2524 if (GET_CODE (size) == CONST_INT
2525 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
2526 < MOVE_RATIO))
2528 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
2529 INTVAL (size), align);
2530 goto ret;
2532 /* Try the most limited insn first, because there's no point
2533 including more than one in the machine description unless
2534 the more limited one has some advantage. */
2535 #ifdef HAVE_movstrqi
2536 if (HAVE_movstrqi
2537 && GET_CODE (size) == CONST_INT
2538 && ((unsigned) INTVAL (size)
2539 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
2541 rtx pat = gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
2542 xinner, size, GEN_INT (align));
2543 if (pat != 0)
2545 emit_insn (pat);
2546 goto ret;
2549 #endif
2550 #ifdef HAVE_movstrhi
2551 if (HAVE_movstrhi
2552 && GET_CODE (size) == CONST_INT
2553 && ((unsigned) INTVAL (size)
2554 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
2556 rtx pat = gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
2557 xinner, size, GEN_INT (align));
2558 if (pat != 0)
2560 emit_insn (pat);
2561 goto ret;
2564 #endif
2565 #ifdef HAVE_movstrsi
2566 if (HAVE_movstrsi)
2568 rtx pat = gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
2569 xinner, size, GEN_INT (align));
2570 if (pat != 0)
2572 emit_insn (pat);
2573 goto ret;
2576 #endif
2577 #ifdef HAVE_movstrdi
2578 if (HAVE_movstrdi)
2580 rtx pat = gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
2581 xinner, size, GEN_INT (align));
2582 if (pat != 0)
2584 emit_insn (pat);
2585 goto ret;
2588 #endif
2590 #ifndef ACCUMULATE_OUTGOING_ARGS
2591 /* If the source is referenced relative to the stack pointer,
2592 copy it to another register to stabilize it. We do not need
2593 to do this if we know that we won't be changing sp. */
2595 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
2596 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
2597 temp = copy_to_reg (temp);
2598 #endif
2600 /* Make inhibit_defer_pop nonzero around the library call
2601 to force it to pop the bcopy-arguments right away. */
2602 NO_DEFER_POP;
2603 #ifdef TARGET_MEM_FUNCTIONS
2604 emit_library_call (memcpy_libfunc, 0,
2605 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
2606 convert_to_mode (TYPE_MODE (sizetype),
2607 size, TREE_UNSIGNED (sizetype)),
2608 TYPE_MODE (sizetype));
2609 #else
2610 emit_library_call (bcopy_libfunc, 0,
2611 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
2612 convert_to_mode (TYPE_MODE (integer_type_node),
2613 size,
2614 TREE_UNSIGNED (integer_type_node)),
2615 TYPE_MODE (integer_type_node));
2616 #endif
2617 OK_DEFER_POP;
2620 else if (partial > 0)
2622 /* Scalar partly in registers. */
2624 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
2625 int i;
2626 int not_stack;
2627 /* # words of start of argument
2628 that we must make space for but need not store. */
2629 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
2630 int args_offset = INTVAL (args_so_far);
2631 int skip;
2633 /* Push padding now if padding above and stack grows down,
2634 or if padding below and stack grows up.
2635 But if space already allocated, this has already been done. */
2636 if (extra && args_addr == 0
2637 && where_pad != none && where_pad != stack_direction)
2638 anti_adjust_stack (GEN_INT (extra));
2640 /* If we make space by pushing it, we might as well push
2641 the real data. Otherwise, we can leave OFFSET nonzero
2642 and leave the space uninitialized. */
2643 if (args_addr == 0)
2644 offset = 0;
2646 /* Now NOT_STACK gets the number of words that we don't need to
2647 allocate on the stack. */
2648 not_stack = partial - offset;
2650 /* If the partial register-part of the arg counts in its stack size,
2651 skip the part of stack space corresponding to the registers.
2652 Otherwise, start copying to the beginning of the stack space,
2653 by setting SKIP to 0. */
2654 #ifndef REG_PARM_STACK_SPACE
2655 skip = 0;
2656 #else
2657 skip = not_stack;
2658 #endif
2660 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2661 x = validize_mem (force_const_mem (mode, x));
2663 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2664 SUBREGs of such registers are not allowed. */
2665 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2666 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2667 x = copy_to_reg (x);
2669 /* Loop over all the words allocated on the stack for this arg. */
2670 /* We can do it by words, because any scalar bigger than a word
2671 has a size a multiple of a word. */
2672 #ifndef PUSH_ARGS_REVERSED
2673 for (i = not_stack; i < size; i++)
2674 #else
2675 for (i = size - 1; i >= not_stack; i--)
2676 #endif
2677 if (i >= not_stack + offset)
2678 emit_push_insn (operand_subword_force (x, i, mode),
2679 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2680 0, args_addr,
2681 GEN_INT (args_offset + ((i - not_stack + skip)
2682 * UNITS_PER_WORD)));
2684 else
2686 rtx addr;
2688 /* Push padding now if padding above and stack grows down,
2689 or if padding below and stack grows up.
2690 But if space already allocated, this has already been done. */
2691 if (extra && args_addr == 0
2692 && where_pad != none && where_pad != stack_direction)
2693 anti_adjust_stack (GEN_INT (extra));
2695 #ifdef PUSH_ROUNDING
2696 if (args_addr == 0)
2697 addr = gen_push_operand ();
2698 else
2699 #endif
2700 if (GET_CODE (args_so_far) == CONST_INT)
2701 addr
2702 = memory_address (mode,
2703 plus_constant (args_addr, INTVAL (args_so_far)));
2704 else
2705 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
2706 args_so_far));
2708 emit_move_insn (gen_rtx (MEM, mode, addr), x);
2711 ret:
2712 /* If part should go in registers, copy that part
2713 into the appropriate registers. Do this now, at the end,
2714 since mem-to-mem copies above may do function calls. */
2715 if (partial > 0 && reg != 0)
2717 /* Handle calls that pass values in multiple non-contiguous locations.
2718 The Irix 6 ABI has examples of this. */
2719 if (GET_CODE (reg) == PARALLEL)
2720 emit_group_load (reg, x);
2721 else
2722 move_block_to_reg (REGNO (reg), x, partial, mode);
2725 if (extra && args_addr == 0 && where_pad == stack_direction)
2726 anti_adjust_stack (GEN_INT (extra));
2729 /* Expand an assignment that stores the value of FROM into TO.
2730 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2731 (This may contain a QUEUED rtx;
2732 if the value is constant, this rtx is a constant.)
2733 Otherwise, the returned value is NULL_RTX.
2735 SUGGEST_REG is no longer actually used.
2736 It used to mean, copy the value through a register
2737 and return that register, if that is possible.
2738 We now use WANT_VALUE to decide whether to do this. */
2741 expand_assignment (to, from, want_value, suggest_reg)
2742 tree to, from;
2743 int want_value;
2744 int suggest_reg;
2746 register rtx to_rtx = 0;
2747 rtx result;
2749 /* Don't crash if the lhs of the assignment was erroneous. */
2751 if (TREE_CODE (to) == ERROR_MARK)
2753 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
2754 return want_value ? result : NULL_RTX;
2757 if (output_bytecode)
2759 tree dest_innermost;
2761 bc_expand_expr (from);
2762 bc_emit_instruction (duplicate);
2764 dest_innermost = bc_expand_address (to);
2766 /* Can't deduce from TYPE that we're dealing with a bitfield, so
2767 take care of it here. */
2769 bc_store_memory (TREE_TYPE (to), dest_innermost);
2770 return NULL;
2773 /* Assignment of a structure component needs special treatment
2774 if the structure component's rtx is not simply a MEM.
2775 Assignment of an array element at a constant index, and assignment of
2776 an array element in an unaligned packed structure field, has the same
2777 problem. */
2779 if (TREE_CODE (to) == COMPONENT_REF
2780 || TREE_CODE (to) == BIT_FIELD_REF
2781 || (TREE_CODE (to) == ARRAY_REF
2782 && ((TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2783 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST)
2784 || (SLOW_UNALIGNED_ACCESS && get_inner_unaligned_p (to)))))
2786 enum machine_mode mode1;
2787 int bitsize;
2788 int bitpos;
2789 tree offset;
2790 int unsignedp;
2791 int volatilep = 0;
2792 tree tem;
2793 int alignment;
2795 push_temp_slots ();
2796 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
2797 &unsignedp, &volatilep, &alignment);
2799 /* If we are going to use store_bit_field and extract_bit_field,
2800 make sure to_rtx will be safe for multiple use. */
2802 if (mode1 == VOIDmode && want_value)
2803 tem = stabilize_reference (tem);
2805 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
2806 if (offset != 0)
2808 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2810 if (GET_CODE (to_rtx) != MEM)
2811 abort ();
2812 to_rtx = change_address (to_rtx, VOIDmode,
2813 gen_rtx (PLUS, ptr_mode, XEXP (to_rtx, 0),
2814 force_reg (ptr_mode, offset_rtx)));
2816 if (volatilep)
2818 if (GET_CODE (to_rtx) == MEM)
2820 /* When the offset is zero, to_rtx is the address of the
2821 structure we are storing into, and hence may be shared.
2822 We must make a new MEM before setting the volatile bit. */
2823 if (offset == 0)
2824 to_rtx = change_address (to_rtx, VOIDmode, XEXP (to_rtx, 0));
2825 MEM_VOLATILE_P (to_rtx) = 1;
2827 #if 0 /* This was turned off because, when a field is volatile
2828 in an object which is not volatile, the object may be in a register,
2829 and then we would abort over here. */
2830 else
2831 abort ();
2832 #endif
2835 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2836 (want_value
2837 /* Spurious cast makes HPUX compiler happy. */
2838 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2839 : VOIDmode),
2840 unsignedp,
2841 /* Required alignment of containing datum. */
2842 alignment,
2843 int_size_in_bytes (TREE_TYPE (tem)));
2844 preserve_temp_slots (result);
2845 free_temp_slots ();
2846 pop_temp_slots ();
2848 /* If the value is meaningful, convert RESULT to the proper mode.
2849 Otherwise, return nothing. */
2850 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
2851 TYPE_MODE (TREE_TYPE (from)),
2852 result,
2853 TREE_UNSIGNED (TREE_TYPE (to)))
2854 : NULL_RTX);
2857 /* If the rhs is a function call and its value is not an aggregate,
2858 call the function before we start to compute the lhs.
2859 This is needed for correct code for cases such as
2860 val = setjmp (buf) on machines where reference to val
2861 requires loading up part of an address in a separate insn.
2863 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
2864 a promoted variable where the zero- or sign- extension needs to be done.
2865 Handling this in the normal way is safe because no computation is done
2866 before the call. */
2867 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
2868 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
2869 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
2871 rtx value;
2873 push_temp_slots ();
2874 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
2875 if (to_rtx == 0)
2876 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2878 /* Handle calls that return values in multiple non-contiguous locations.
2879 The Irix 6 ABI has examples of this. */
2880 if (GET_CODE (to_rtx) == PARALLEL)
2881 emit_group_load (to_rtx, value);
2882 else if (GET_MODE (to_rtx) == BLKmode)
2883 emit_block_move (to_rtx, value, expr_size (from),
2884 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
2885 else
2886 emit_move_insn (to_rtx, value);
2887 preserve_temp_slots (to_rtx);
2888 free_temp_slots ();
2889 pop_temp_slots ();
2890 return want_value ? to_rtx : NULL_RTX;
2893 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2894 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2896 if (to_rtx == 0)
2897 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2899 /* Don't move directly into a return register. */
2900 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
2902 rtx temp;
2904 push_temp_slots ();
2905 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
2906 emit_move_insn (to_rtx, temp);
2907 preserve_temp_slots (to_rtx);
2908 free_temp_slots ();
2909 pop_temp_slots ();
2910 return want_value ? to_rtx : NULL_RTX;
2913 /* In case we are returning the contents of an object which overlaps
2914 the place the value is being stored, use a safe function when copying
2915 a value through a pointer into a structure value return block. */
2916 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2917 && current_function_returns_struct
2918 && !current_function_returns_pcc_struct)
2920 rtx from_rtx, size;
2922 push_temp_slots ();
2923 size = expr_size (from);
2924 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
2926 #ifdef TARGET_MEM_FUNCTIONS
2927 emit_library_call (memcpy_libfunc, 0,
2928 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2929 XEXP (from_rtx, 0), Pmode,
2930 convert_to_mode (TYPE_MODE (sizetype),
2931 size, TREE_UNSIGNED (sizetype)),
2932 TYPE_MODE (sizetype));
2933 #else
2934 emit_library_call (bcopy_libfunc, 0,
2935 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2936 XEXP (to_rtx, 0), Pmode,
2937 convert_to_mode (TYPE_MODE (integer_type_node),
2938 size, TREE_UNSIGNED (integer_type_node)),
2939 TYPE_MODE (integer_type_node));
2940 #endif
2942 preserve_temp_slots (to_rtx);
2943 free_temp_slots ();
2944 pop_temp_slots ();
2945 return want_value ? to_rtx : NULL_RTX;
2948 /* Compute FROM and store the value in the rtx we got. */
2950 push_temp_slots ();
2951 result = store_expr (from, to_rtx, want_value);
2952 preserve_temp_slots (result);
2953 free_temp_slots ();
2954 pop_temp_slots ();
2955 return want_value ? result : NULL_RTX;
2958 /* Generate code for computing expression EXP,
2959 and storing the value into TARGET.
2960 TARGET may contain a QUEUED rtx.
2962 If WANT_VALUE is nonzero, return a copy of the value
2963 not in TARGET, so that we can be sure to use the proper
2964 value in a containing expression even if TARGET has something
2965 else stored in it. If possible, we copy the value through a pseudo
2966 and return that pseudo. Or, if the value is constant, we try to
2967 return the constant. In some cases, we return a pseudo
2968 copied *from* TARGET.
2970 If the mode is BLKmode then we may return TARGET itself.
2971 It turns out that in BLKmode it doesn't cause a problem.
2972 because C has no operators that could combine two different
2973 assignments into the same BLKmode object with different values
2974 with no sequence point. Will other languages need this to
2975 be more thorough?
2977 If WANT_VALUE is 0, we return NULL, to make sure
2978 to catch quickly any cases where the caller uses the value
2979 and fails to set WANT_VALUE. */
2982 store_expr (exp, target, want_value)
2983 register tree exp;
2984 register rtx target;
2985 int want_value;
2987 register rtx temp;
2988 int dont_return_target = 0;
2990 if (TREE_CODE (exp) == COMPOUND_EXPR)
2992 /* Perform first part of compound expression, then assign from second
2993 part. */
2994 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
2995 emit_queue ();
2996 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
2998 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3000 /* For conditional expression, get safe form of the target. Then
3001 test the condition, doing the appropriate assignment on either
3002 side. This avoids the creation of unnecessary temporaries.
3003 For non-BLKmode, it is more efficient not to do this. */
3005 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3006 rtx flag = NULL_RTX;
3007 tree left_cleanups = NULL_TREE;
3008 tree right_cleanups = NULL_TREE;
3009 tree old_cleanups = cleanups_this_call;
3011 /* Used to save a pointer to the place to put the setting of
3012 the flag that indicates if this side of the conditional was
3013 taken. We backpatch the code, if we find out later that we
3014 have any conditional cleanups that need to be performed. */
3015 rtx dest_right_flag = NULL_RTX;
3016 rtx dest_left_flag = NULL_RTX;
3018 emit_queue ();
3019 target = protect_from_queue (target, 1);
3021 do_pending_stack_adjust ();
3022 NO_DEFER_POP;
3023 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3024 store_expr (TREE_OPERAND (exp, 1), target, 0);
3025 dest_left_flag = get_last_insn ();
3026 /* Handle conditional cleanups, if any. */
3027 left_cleanups = defer_cleanups_to (old_cleanups);
3028 emit_queue ();
3029 emit_jump_insn (gen_jump (lab2));
3030 emit_barrier ();
3031 emit_label (lab1);
3032 store_expr (TREE_OPERAND (exp, 2), target, 0);
3033 dest_right_flag = get_last_insn ();
3034 /* Handle conditional cleanups, if any. */
3035 right_cleanups = defer_cleanups_to (old_cleanups);
3036 emit_queue ();
3037 emit_label (lab2);
3038 OK_DEFER_POP;
3040 /* Add back in any conditional cleanups. */
3041 if (left_cleanups || right_cleanups)
3043 tree new_cleanups;
3044 tree cond;
3045 rtx last;
3047 /* Now that we know that a flag is needed, go back and add in the
3048 setting of the flag. */
3050 flag = gen_reg_rtx (word_mode);
3052 /* Do the left side flag. */
3053 last = get_last_insn ();
3054 /* Flag left cleanups as needed. */
3055 emit_move_insn (flag, const1_rtx);
3056 /* ??? deprecated, use sequences instead. */
3057 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_left_flag);
3059 /* Do the right side flag. */
3060 last = get_last_insn ();
3061 /* Flag left cleanups as needed. */
3062 emit_move_insn (flag, const0_rtx);
3063 /* ??? deprecated, use sequences instead. */
3064 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_right_flag);
3066 /* All cleanups must be on the function_obstack. */
3067 push_obstacks_nochange ();
3068 resume_temporary_allocation ();
3070 /* convert flag, which is an rtx, into a tree. */
3071 cond = make_node (RTL_EXPR);
3072 TREE_TYPE (cond) = integer_type_node;
3073 RTL_EXPR_RTL (cond) = flag;
3074 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
3075 cond = save_expr (cond);
3077 if (! left_cleanups)
3078 left_cleanups = integer_zero_node;
3079 if (! right_cleanups)
3080 right_cleanups = integer_zero_node;
3081 new_cleanups = build (COND_EXPR, void_type_node,
3082 truthvalue_conversion (cond),
3083 left_cleanups, right_cleanups);
3084 new_cleanups = fold (new_cleanups);
3086 pop_obstacks ();
3088 /* Now add in the conditionalized cleanups. */
3089 cleanups_this_call
3090 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
3091 expand_eh_region_start ();
3093 return want_value ? target : NULL_RTX;
3095 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3096 && GET_MODE (target) != BLKmode)
3097 /* If target is in memory and caller wants value in a register instead,
3098 arrange that. Pass TARGET as target for expand_expr so that,
3099 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3100 We know expand_expr will not use the target in that case.
3101 Don't do this if TARGET is volatile because we are supposed
3102 to write it and then read it. */
3104 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
3105 GET_MODE (target), 0);
3106 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3107 temp = copy_to_reg (temp);
3108 dont_return_target = 1;
3110 else if (queued_subexp_p (target))
3111 /* If target contains a postincrement, let's not risk
3112 using it as the place to generate the rhs. */
3114 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3116 /* Expand EXP into a new pseudo. */
3117 temp = gen_reg_rtx (GET_MODE (target));
3118 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3120 else
3121 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3123 /* If target is volatile, ANSI requires accessing the value
3124 *from* the target, if it is accessed. So make that happen.
3125 In no case return the target itself. */
3126 if (! MEM_VOLATILE_P (target) && want_value)
3127 dont_return_target = 1;
3129 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3130 /* If this is an scalar in a register that is stored in a wider mode
3131 than the declared mode, compute the result into its declared mode
3132 and then convert to the wider mode. Our value is the computed
3133 expression. */
3135 /* If we don't want a value, we can do the conversion inside EXP,
3136 which will often result in some optimizations. Do the conversion
3137 in two steps: first change the signedness, if needed, then
3138 the extend. But don't do this if the type of EXP is a subtype
3139 of something else since then the conversion might involve
3140 more than just converting modes. */
3141 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3142 && TREE_TYPE (TREE_TYPE (exp)) == 0)
3144 if (TREE_UNSIGNED (TREE_TYPE (exp))
3145 != SUBREG_PROMOTED_UNSIGNED_P (target))
3147 = convert
3148 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3149 TREE_TYPE (exp)),
3150 exp);
3152 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3153 SUBREG_PROMOTED_UNSIGNED_P (target)),
3154 exp);
3157 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3159 /* If TEMP is a volatile MEM and we want a result value, make
3160 the access now so it gets done only once. Likewise if
3161 it contains TARGET. */
3162 if (GET_CODE (temp) == MEM && want_value
3163 && (MEM_VOLATILE_P (temp)
3164 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
3165 temp = copy_to_reg (temp);
3167 /* If TEMP is a VOIDmode constant, use convert_modes to make
3168 sure that we properly convert it. */
3169 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3170 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3171 TYPE_MODE (TREE_TYPE (exp)), temp,
3172 SUBREG_PROMOTED_UNSIGNED_P (target));
3174 convert_move (SUBREG_REG (target), temp,
3175 SUBREG_PROMOTED_UNSIGNED_P (target));
3176 return want_value ? temp : NULL_RTX;
3178 else
3180 temp = expand_expr (exp, target, GET_MODE (target), 0);
3181 /* Return TARGET if it's a specified hardware register.
3182 If TARGET is a volatile mem ref, either return TARGET
3183 or return a reg copied *from* TARGET; ANSI requires this.
3185 Otherwise, if TEMP is not TARGET, return TEMP
3186 if it is constant (for efficiency),
3187 or if we really want the correct value. */
3188 if (!(target && GET_CODE (target) == REG
3189 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3190 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3191 && temp != target
3192 && (CONSTANT_P (temp) || want_value))
3193 dont_return_target = 1;
3196 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3197 the same as that of TARGET, adjust the constant. This is needed, for
3198 example, in case it is a CONST_DOUBLE and we want only a word-sized
3199 value. */
3200 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3201 && TREE_CODE (exp) != ERROR_MARK
3202 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3203 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3204 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3206 /* If value was not generated in the target, store it there.
3207 Convert the value to TARGET's type first if nec. */
3209 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
3211 target = protect_from_queue (target, 1);
3212 if (GET_MODE (temp) != GET_MODE (target)
3213 && GET_MODE (temp) != VOIDmode)
3215 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3216 if (dont_return_target)
3218 /* In this case, we will return TEMP,
3219 so make sure it has the proper mode.
3220 But don't forget to store the value into TARGET. */
3221 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3222 emit_move_insn (target, temp);
3224 else
3225 convert_move (target, temp, unsignedp);
3228 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3230 /* Handle copying a string constant into an array.
3231 The string constant may be shorter than the array.
3232 So copy just the string's actual length, and clear the rest. */
3233 rtx size;
3234 rtx addr;
3236 /* Get the size of the data type of the string,
3237 which is actually the size of the target. */
3238 size = expr_size (exp);
3239 if (GET_CODE (size) == CONST_INT
3240 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3241 emit_block_move (target, temp, size,
3242 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3243 else
3245 /* Compute the size of the data to copy from the string. */
3246 tree copy_size
3247 = size_binop (MIN_EXPR,
3248 make_tree (sizetype, size),
3249 convert (sizetype,
3250 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
3251 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3252 VOIDmode, 0);
3253 rtx label = 0;
3255 /* Copy that much. */
3256 emit_block_move (target, temp, copy_size_rtx,
3257 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3259 /* Figure out how much is left in TARGET that we have to clear.
3260 Do all calculations in ptr_mode. */
3262 addr = XEXP (target, 0);
3263 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3265 if (GET_CODE (copy_size_rtx) == CONST_INT)
3267 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
3268 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
3270 else
3272 addr = force_reg (ptr_mode, addr);
3273 addr = expand_binop (ptr_mode, add_optab, addr,
3274 copy_size_rtx, NULL_RTX, 0,
3275 OPTAB_LIB_WIDEN);
3277 size = expand_binop (ptr_mode, sub_optab, size,
3278 copy_size_rtx, NULL_RTX, 0,
3279 OPTAB_LIB_WIDEN);
3281 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
3282 GET_MODE (size), 0, 0);
3283 label = gen_label_rtx ();
3284 emit_jump_insn (gen_blt (label));
3287 if (size != const0_rtx)
3289 #ifdef TARGET_MEM_FUNCTIONS
3290 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
3291 addr, ptr_mode,
3292 const0_rtx, TYPE_MODE (integer_type_node),
3293 convert_to_mode (TYPE_MODE (sizetype),
3294 size,
3295 TREE_UNSIGNED (sizetype)),
3296 TYPE_MODE (sizetype));
3297 #else
3298 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
3299 addr, ptr_mode,
3300 convert_to_mode (TYPE_MODE (integer_type_node),
3301 size,
3302 TREE_UNSIGNED (integer_type_node)),
3303 TYPE_MODE (integer_type_node));
3304 #endif
3307 if (label)
3308 emit_label (label);
3311 /* Handle calls that return values in multiple non-contiguous locations.
3312 The Irix 6 ABI has examples of this. */
3313 else if (GET_CODE (target) == PARALLEL)
3314 emit_group_load (target, temp);
3315 else if (GET_MODE (temp) == BLKmode)
3316 emit_block_move (target, temp, expr_size (exp),
3317 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3318 else
3319 emit_move_insn (target, temp);
3322 /* If we don't want a value, return NULL_RTX. */
3323 if (! want_value)
3324 return NULL_RTX;
3326 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3327 ??? The latter test doesn't seem to make sense. */
3328 else if (dont_return_target && GET_CODE (temp) != MEM)
3329 return temp;
3331 /* Return TARGET itself if it is a hard register. */
3332 else if (want_value && GET_MODE (target) != BLKmode
3333 && ! (GET_CODE (target) == REG
3334 && REGNO (target) < FIRST_PSEUDO_REGISTER))
3335 return copy_to_reg (target);
3337 else
3338 return target;
3341 /* Return 1 if EXP just contains zeros. */
3343 static int
3344 is_zeros_p (exp)
3345 tree exp;
3347 tree elt;
3349 switch (TREE_CODE (exp))
3351 case CONVERT_EXPR:
3352 case NOP_EXPR:
3353 case NON_LVALUE_EXPR:
3354 return is_zeros_p (TREE_OPERAND (exp, 0));
3356 case INTEGER_CST:
3357 return TREE_INT_CST_LOW (exp) == 0 && TREE_INT_CST_HIGH (exp) == 0;
3359 case COMPLEX_CST:
3360 return
3361 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
3363 case REAL_CST:
3364 return REAL_VALUES_EQUAL (TREE_REAL_CST (exp), dconst0);
3366 case CONSTRUCTOR:
3367 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3368 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
3369 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3370 if (! is_zeros_p (TREE_VALUE (elt)))
3371 return 0;
3373 return 1;
3376 return 0;
3379 /* Return 1 if EXP contains mostly (3/4) zeros. */
3381 static int
3382 mostly_zeros_p (exp)
3383 tree exp;
3385 if (TREE_CODE (exp) == CONSTRUCTOR)
3387 int elts = 0, zeros = 0;
3388 tree elt = CONSTRUCTOR_ELTS (exp);
3389 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3391 /* If there are no ranges of true bits, it is all zero. */
3392 return elt == NULL_TREE;
3394 for (; elt; elt = TREE_CHAIN (elt))
3396 /* We do not handle the case where the index is a RANGE_EXPR,
3397 so the statistic will be somewhat inaccurate.
3398 We do make a more accurate count in store_constructor itself,
3399 so since this function is only used for nested array elements,
3400 this should be close enough. */
3401 if (mostly_zeros_p (TREE_VALUE (elt)))
3402 zeros++;
3403 elts++;
3406 return 4 * zeros >= 3 * elts;
3409 return is_zeros_p (exp);
3412 /* Helper function for store_constructor.
3413 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
3414 TYPE is the type of the CONSTRUCTOR, not the element type.
3415 CLEARED is as for store_constructor.
3417 This provides a recursive shortcut back to store_constructor when it isn't
3418 necessary to go through store_field. This is so that we can pass through
3419 the cleared field to let store_constructor know that we may not have to
3420 clear a substructure if the outer structure has already been cleared. */
3422 static void
3423 store_constructor_field (target, bitsize, bitpos,
3424 mode, exp, type, cleared)
3425 rtx target;
3426 int bitsize, bitpos;
3427 enum machine_mode mode;
3428 tree exp, type;
3429 int cleared;
3431 if (TREE_CODE (exp) == CONSTRUCTOR
3432 && bitpos % BITS_PER_UNIT == 0
3433 /* If we have a non-zero bitpos for a register target, then we just
3434 let store_field do the bitfield handling. This is unlikely to
3435 generate unnecessary clear instructions anyways. */
3436 && (bitpos == 0 || GET_CODE (target) == MEM))
3438 if (bitpos != 0)
3439 target = change_address (target, VOIDmode,
3440 plus_constant (XEXP (target, 0),
3441 bitpos / BITS_PER_UNIT));
3442 store_constructor (exp, target, cleared);
3444 else
3445 store_field (target, bitsize, bitpos, mode, exp,
3446 VOIDmode, 0, TYPE_ALIGN (type) / BITS_PER_UNIT,
3447 int_size_in_bytes (type));
3450 /* Store the value of constructor EXP into the rtx TARGET.
3451 TARGET is either a REG or a MEM.
3452 CLEARED is true if TARGET is known to have been zero'd. */
3454 static void
3455 store_constructor (exp, target, cleared)
3456 tree exp;
3457 rtx target;
3458 int cleared;
3460 tree type = TREE_TYPE (exp);
3462 /* We know our target cannot conflict, since safe_from_p has been called. */
3463 #if 0
3464 /* Don't try copying piece by piece into a hard register
3465 since that is vulnerable to being clobbered by EXP.
3466 Instead, construct in a pseudo register and then copy it all. */
3467 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
3469 rtx temp = gen_reg_rtx (GET_MODE (target));
3470 store_constructor (exp, temp, 0);
3471 emit_move_insn (target, temp);
3472 return;
3474 #endif
3476 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
3477 || TREE_CODE (type) == QUAL_UNION_TYPE)
3479 register tree elt;
3481 /* Inform later passes that the whole union value is dead. */
3482 if (TREE_CODE (type) == UNION_TYPE
3483 || TREE_CODE (type) == QUAL_UNION_TYPE)
3484 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3486 /* If we are building a static constructor into a register,
3487 set the initial value as zero so we can fold the value into
3488 a constant. But if more than one register is involved,
3489 this probably loses. */
3490 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
3491 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
3493 if (! cleared)
3494 emit_move_insn (target, const0_rtx);
3496 cleared = 1;
3499 /* If the constructor has fewer fields than the structure
3500 or if we are initializing the structure to mostly zeros,
3501 clear the whole structure first. */
3502 else if ((list_length (CONSTRUCTOR_ELTS (exp))
3503 != list_length (TYPE_FIELDS (type)))
3504 || mostly_zeros_p (exp))
3506 if (! cleared)
3507 clear_storage (target, expr_size (exp),
3508 TYPE_ALIGN (type) / BITS_PER_UNIT);
3510 cleared = 1;
3512 else
3513 /* Inform later passes that the old value is dead. */
3514 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3516 /* Store each element of the constructor into
3517 the corresponding field of TARGET. */
3519 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3521 register tree field = TREE_PURPOSE (elt);
3522 register enum machine_mode mode;
3523 int bitsize;
3524 int bitpos = 0;
3525 int unsignedp;
3526 tree pos, constant = 0, offset = 0;
3527 rtx to_rtx = target;
3529 /* Just ignore missing fields.
3530 We cleared the whole structure, above,
3531 if any fields are missing. */
3532 if (field == 0)
3533 continue;
3535 if (cleared && is_zeros_p (TREE_VALUE (elt)))
3536 continue;
3538 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
3539 unsignedp = TREE_UNSIGNED (field);
3540 mode = DECL_MODE (field);
3541 if (DECL_BIT_FIELD (field))
3542 mode = VOIDmode;
3544 pos = DECL_FIELD_BITPOS (field);
3545 if (TREE_CODE (pos) == INTEGER_CST)
3546 constant = pos;
3547 else if (TREE_CODE (pos) == PLUS_EXPR
3548 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
3549 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
3550 else
3551 offset = pos;
3553 if (constant)
3554 bitpos = TREE_INT_CST_LOW (constant);
3556 if (offset)
3558 rtx offset_rtx;
3560 if (contains_placeholder_p (offset))
3561 offset = build (WITH_RECORD_EXPR, sizetype,
3562 offset, exp);
3564 offset = size_binop (FLOOR_DIV_EXPR, offset,
3565 size_int (BITS_PER_UNIT));
3567 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3568 if (GET_CODE (to_rtx) != MEM)
3569 abort ();
3571 to_rtx
3572 = change_address (to_rtx, VOIDmode,
3573 gen_rtx (PLUS, ptr_mode, XEXP (to_rtx, 0),
3574 force_reg (ptr_mode, offset_rtx)));
3576 if (TREE_READONLY (field))
3578 if (GET_CODE (to_rtx) == MEM)
3579 to_rtx = change_address (to_rtx, GET_MODE (to_rtx),
3580 XEXP (to_rtx, 0));
3581 RTX_UNCHANGING_P (to_rtx) = 1;
3584 store_constructor_field (to_rtx, bitsize, bitpos,
3585 mode, TREE_VALUE (elt), type, cleared);
3588 else if (TREE_CODE (type) == ARRAY_TYPE)
3590 register tree elt;
3591 register int i;
3592 int need_to_clear;
3593 tree domain = TYPE_DOMAIN (type);
3594 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
3595 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
3596 tree elttype = TREE_TYPE (type);
3598 /* If the constructor has fewer elements than the array,
3599 clear the whole array first. Similarly if this this is
3600 static constructor of a non-BLKmode object. */
3601 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
3602 need_to_clear = 1;
3603 else
3605 HOST_WIDE_INT count = 0, zero_count = 0;
3606 need_to_clear = 0;
3607 /* This loop is a more accurate version of the loop in
3608 mostly_zeros_p (it handles RANGE_EXPR in an index).
3609 It is also needed to check for missing elements. */
3610 for (elt = CONSTRUCTOR_ELTS (exp);
3611 elt != NULL_TREE;
3612 elt = TREE_CHAIN (elt))
3614 tree index = TREE_PURPOSE (elt);
3615 HOST_WIDE_INT this_node_count;
3616 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
3618 tree lo_index = TREE_OPERAND (index, 0);
3619 tree hi_index = TREE_OPERAND (index, 1);
3620 if (TREE_CODE (lo_index) != INTEGER_CST
3621 || TREE_CODE (hi_index) != INTEGER_CST)
3623 need_to_clear = 1;
3624 break;
3626 this_node_count = TREE_INT_CST_LOW (hi_index)
3627 - TREE_INT_CST_LOW (lo_index) + 1;
3629 else
3630 this_node_count = 1;
3631 count += this_node_count;
3632 if (mostly_zeros_p (TREE_VALUE (elt)))
3633 zero_count += this_node_count;
3635 /* Clear the entire array first if there are any missing elements,
3636 or if the incidence of zero elements is >= 75%. */
3637 if (count < maxelt - minelt + 1
3638 || 4 * zero_count >= 3 * count)
3639 need_to_clear = 1;
3641 if (need_to_clear)
3643 if (! cleared)
3644 clear_storage (target, expr_size (exp),
3645 TYPE_ALIGN (type) / BITS_PER_UNIT);
3646 cleared = 1;
3648 else
3649 /* Inform later passes that the old value is dead. */
3650 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3652 /* Store each element of the constructor into
3653 the corresponding element of TARGET, determined
3654 by counting the elements. */
3655 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
3656 elt;
3657 elt = TREE_CHAIN (elt), i++)
3659 register enum machine_mode mode;
3660 int bitsize;
3661 int bitpos;
3662 int unsignedp;
3663 tree value = TREE_VALUE (elt);
3664 tree index = TREE_PURPOSE (elt);
3665 rtx xtarget = target;
3667 if (cleared && is_zeros_p (value))
3668 continue;
3670 mode = TYPE_MODE (elttype);
3671 bitsize = GET_MODE_BITSIZE (mode);
3672 unsignedp = TREE_UNSIGNED (elttype);
3674 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
3676 tree lo_index = TREE_OPERAND (index, 0);
3677 tree hi_index = TREE_OPERAND (index, 1);
3678 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
3679 struct nesting *loop;
3680 HOST_WIDE_INT lo, hi, count;
3681 tree position;
3683 /* If the range is constant and "small", unroll the loop. */
3684 if (TREE_CODE (lo_index) == INTEGER_CST
3685 && TREE_CODE (hi_index) == INTEGER_CST
3686 && (lo = TREE_INT_CST_LOW (lo_index),
3687 hi = TREE_INT_CST_LOW (hi_index),
3688 count = hi - lo + 1,
3689 (GET_CODE (target) != MEM
3690 || count <= 2
3691 || (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
3692 && TREE_INT_CST_LOW (TYPE_SIZE (elttype)) * count
3693 <= 40 * 8))))
3695 lo -= minelt; hi -= minelt;
3696 for (; lo <= hi; lo++)
3698 bitpos = lo * TREE_INT_CST_LOW (TYPE_SIZE (elttype));
3699 store_constructor_field (target, bitsize, bitpos,
3700 mode, value, type, cleared);
3703 else
3705 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
3706 loop_top = gen_label_rtx ();
3707 loop_end = gen_label_rtx ();
3709 unsignedp = TREE_UNSIGNED (domain);
3711 index = build_decl (VAR_DECL, NULL_TREE, domain);
3713 DECL_RTL (index) = index_r
3714 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
3715 &unsignedp, 0));
3717 if (TREE_CODE (value) == SAVE_EXPR
3718 && SAVE_EXPR_RTL (value) == 0)
3720 /* Make sure value gets expanded once before the
3721 loop. */
3722 expand_expr (value, const0_rtx, VOIDmode, 0);
3723 emit_queue ();
3725 store_expr (lo_index, index_r, 0);
3726 loop = expand_start_loop (0);
3728 /* Assign value to element index. */
3729 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
3730 size_int (BITS_PER_UNIT));
3731 position = size_binop (MULT_EXPR,
3732 size_binop (MINUS_EXPR, index,
3733 TYPE_MIN_VALUE (domain)),
3734 position);
3735 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
3736 addr = gen_rtx (PLUS, Pmode, XEXP (target, 0), pos_rtx);
3737 xtarget = change_address (target, mode, addr);
3738 if (TREE_CODE (value) == CONSTRUCTOR)
3739 store_constructor (value, xtarget, cleared);
3740 else
3741 store_expr (value, xtarget, 0);
3743 expand_exit_loop_if_false (loop,
3744 build (LT_EXPR, integer_type_node,
3745 index, hi_index));
3747 expand_increment (build (PREINCREMENT_EXPR,
3748 TREE_TYPE (index),
3749 index, integer_one_node), 0, 0);
3750 expand_end_loop ();
3751 emit_label (loop_end);
3753 /* Needed by stupid register allocation. to extend the
3754 lifetime of pseudo-regs used by target past the end
3755 of the loop. */
3756 emit_insn (gen_rtx (USE, GET_MODE (target), target));
3759 else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
3760 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
3762 rtx pos_rtx, addr;
3763 tree position;
3765 if (index == 0)
3766 index = size_int (i);
3768 if (minelt)
3769 index = size_binop (MINUS_EXPR, index,
3770 TYPE_MIN_VALUE (domain));
3771 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
3772 size_int (BITS_PER_UNIT));
3773 position = size_binop (MULT_EXPR, index, position);
3774 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
3775 addr = gen_rtx (PLUS, Pmode, XEXP (target, 0), pos_rtx);
3776 xtarget = change_address (target, mode, addr);
3777 store_expr (value, xtarget, 0);
3779 else
3781 if (index != 0)
3782 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
3783 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3784 else
3785 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3786 store_constructor_field (target, bitsize, bitpos,
3787 mode, value, type, cleared);
3791 /* set constructor assignments */
3792 else if (TREE_CODE (type) == SET_TYPE)
3794 tree elt = CONSTRUCTOR_ELTS (exp);
3795 rtx xtarget = XEXP (target, 0);
3796 int set_word_size = TYPE_ALIGN (type);
3797 int nbytes = int_size_in_bytes (type), nbits;
3798 tree domain = TYPE_DOMAIN (type);
3799 tree domain_min, domain_max, bitlength;
3801 /* The default implementation strategy is to extract the constant
3802 parts of the constructor, use that to initialize the target,
3803 and then "or" in whatever non-constant ranges we need in addition.
3805 If a large set is all zero or all ones, it is
3806 probably better to set it using memset (if available) or bzero.
3807 Also, if a large set has just a single range, it may also be
3808 better to first clear all the first clear the set (using
3809 bzero/memset), and set the bits we want. */
3811 /* Check for all zeros. */
3812 if (elt == NULL_TREE)
3814 if (!cleared)
3815 clear_storage (target, expr_size (exp),
3816 TYPE_ALIGN (type) / BITS_PER_UNIT);
3817 return;
3820 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
3821 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
3822 bitlength = size_binop (PLUS_EXPR,
3823 size_binop (MINUS_EXPR, domain_max, domain_min),
3824 size_one_node);
3826 if (nbytes < 0 || TREE_CODE (bitlength) != INTEGER_CST)
3827 abort ();
3828 nbits = TREE_INT_CST_LOW (bitlength);
3830 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
3831 are "complicated" (more than one range), initialize (the
3832 constant parts) by copying from a constant. */
3833 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
3834 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
3836 int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
3837 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
3838 char *bit_buffer = (char *) alloca (nbits);
3839 HOST_WIDE_INT word = 0;
3840 int bit_pos = 0;
3841 int ibit = 0;
3842 int offset = 0; /* In bytes from beginning of set. */
3843 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
3844 for (;;)
3846 if (bit_buffer[ibit])
3848 if (BYTES_BIG_ENDIAN)
3849 word |= (1 << (set_word_size - 1 - bit_pos));
3850 else
3851 word |= 1 << bit_pos;
3853 bit_pos++; ibit++;
3854 if (bit_pos >= set_word_size || ibit == nbits)
3856 if (word != 0 || ! cleared)
3858 rtx datum = GEN_INT (word);
3859 rtx to_rtx;
3860 /* The assumption here is that it is safe to use
3861 XEXP if the set is multi-word, but not if
3862 it's single-word. */
3863 if (GET_CODE (target) == MEM)
3865 to_rtx = plus_constant (XEXP (target, 0), offset);
3866 to_rtx = change_address (target, mode, to_rtx);
3868 else if (offset == 0)
3869 to_rtx = target;
3870 else
3871 abort ();
3872 emit_move_insn (to_rtx, datum);
3874 if (ibit == nbits)
3875 break;
3876 word = 0;
3877 bit_pos = 0;
3878 offset += set_word_size / BITS_PER_UNIT;
3882 else if (!cleared)
3884 /* Don't bother clearing storage if the set is all ones. */
3885 if (TREE_CHAIN (elt) != NULL_TREE
3886 || (TREE_PURPOSE (elt) == NULL_TREE
3887 ? nbits != 1
3888 : (TREE_CODE (TREE_VALUE (elt)) != INTEGER_CST
3889 || TREE_CODE (TREE_PURPOSE (elt)) != INTEGER_CST
3890 || (TREE_INT_CST_LOW (TREE_VALUE (elt))
3891 - TREE_INT_CST_LOW (TREE_PURPOSE (elt)) + 1
3892 != nbits))))
3893 clear_storage (target, expr_size (exp),
3894 TYPE_ALIGN (type) / BITS_PER_UNIT);
3897 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
3899 /* start of range of element or NULL */
3900 tree startbit = TREE_PURPOSE (elt);
3901 /* end of range of element, or element value */
3902 tree endbit = TREE_VALUE (elt);
3903 HOST_WIDE_INT startb, endb;
3904 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
3906 bitlength_rtx = expand_expr (bitlength,
3907 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
3909 /* handle non-range tuple element like [ expr ] */
3910 if (startbit == NULL_TREE)
3912 startbit = save_expr (endbit);
3913 endbit = startbit;
3915 startbit = convert (sizetype, startbit);
3916 endbit = convert (sizetype, endbit);
3917 if (! integer_zerop (domain_min))
3919 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
3920 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
3922 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
3923 EXPAND_CONST_ADDRESS);
3924 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
3925 EXPAND_CONST_ADDRESS);
3927 if (REG_P (target))
3929 targetx = assign_stack_temp (GET_MODE (target),
3930 GET_MODE_SIZE (GET_MODE (target)),
3932 emit_move_insn (targetx, target);
3934 else if (GET_CODE (target) == MEM)
3935 targetx = target;
3936 else
3937 abort ();
3939 #ifdef TARGET_MEM_FUNCTIONS
3940 /* Optimization: If startbit and endbit are
3941 constants divisible by BITS_PER_UNIT,
3942 call memset instead. */
3943 if (TREE_CODE (startbit) == INTEGER_CST
3944 && TREE_CODE (endbit) == INTEGER_CST
3945 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
3946 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
3948 emit_library_call (memset_libfunc, 0,
3949 VOIDmode, 3,
3950 plus_constant (XEXP (targetx, 0),
3951 startb / BITS_PER_UNIT),
3952 Pmode,
3953 constm1_rtx, TYPE_MODE (integer_type_node),
3954 GEN_INT ((endb - startb) / BITS_PER_UNIT),
3955 TYPE_MODE (sizetype));
3957 else
3958 #endif
3960 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "__setbits"),
3961 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
3962 bitlength_rtx, TYPE_MODE (sizetype),
3963 startbit_rtx, TYPE_MODE (sizetype),
3964 endbit_rtx, TYPE_MODE (sizetype));
3966 if (REG_P (target))
3967 emit_move_insn (target, targetx);
3971 else
3972 abort ();
3975 /* Store the value of EXP (an expression tree)
3976 into a subfield of TARGET which has mode MODE and occupies
3977 BITSIZE bits, starting BITPOS bits from the start of TARGET.
3978 If MODE is VOIDmode, it means that we are storing into a bit-field.
3980 If VALUE_MODE is VOIDmode, return nothing in particular.
3981 UNSIGNEDP is not used in this case.
3983 Otherwise, return an rtx for the value stored. This rtx
3984 has mode VALUE_MODE if that is convenient to do.
3985 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
3987 ALIGN is the alignment that TARGET is known to have, measured in bytes.
3988 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
3990 static rtx
3991 store_field (target, bitsize, bitpos, mode, exp, value_mode,
3992 unsignedp, align, total_size)
3993 rtx target;
3994 int bitsize, bitpos;
3995 enum machine_mode mode;
3996 tree exp;
3997 enum machine_mode value_mode;
3998 int unsignedp;
3999 int align;
4000 int total_size;
4002 HOST_WIDE_INT width_mask = 0;
4004 if (bitsize < HOST_BITS_PER_WIDE_INT)
4005 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
4007 /* If we are storing into an unaligned field of an aligned union that is
4008 in a register, we may have the mode of TARGET being an integer mode but
4009 MODE == BLKmode. In that case, get an aligned object whose size and
4010 alignment are the same as TARGET and store TARGET into it (we can avoid
4011 the store if the field being stored is the entire width of TARGET). Then
4012 call ourselves recursively to store the field into a BLKmode version of
4013 that object. Finally, load from the object into TARGET. This is not
4014 very efficient in general, but should only be slightly more expensive
4015 than the otherwise-required unaligned accesses. Perhaps this can be
4016 cleaned up later. */
4018 if (mode == BLKmode
4019 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4021 rtx object = assign_stack_temp (GET_MODE (target),
4022 GET_MODE_SIZE (GET_MODE (target)), 0);
4023 rtx blk_object = copy_rtx (object);
4025 MEM_IN_STRUCT_P (object) = 1;
4026 MEM_IN_STRUCT_P (blk_object) = 1;
4027 PUT_MODE (blk_object, BLKmode);
4029 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4030 emit_move_insn (object, target);
4032 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4033 align, total_size);
4035 /* Even though we aren't returning target, we need to
4036 give it the updated value. */
4037 emit_move_insn (target, object);
4039 return blk_object;
4042 /* If the structure is in a register or if the component
4043 is a bit field, we cannot use addressing to access it.
4044 Use bit-field techniques or SUBREG to store in it. */
4046 if (mode == VOIDmode
4047 || (mode != BLKmode && ! direct_store[(int) mode])
4048 || GET_CODE (target) == REG
4049 || GET_CODE (target) == SUBREG
4050 /* If the field isn't aligned enough to store as an ordinary memref,
4051 store it as a bit field. */
4052 || (SLOW_UNALIGNED_ACCESS
4053 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
4054 || (SLOW_UNALIGNED_ACCESS && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
4056 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4058 /* If BITSIZE is narrower than the size of the type of EXP
4059 we will be narrowing TEMP. Normally, what's wanted are the
4060 low-order bits. However, if EXP's type is a record and this is
4061 big-endian machine, we want the upper BITSIZE bits. */
4062 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4063 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4064 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4065 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4066 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4067 - bitsize),
4068 temp, 1);
4070 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4071 MODE. */
4072 if (mode != VOIDmode && mode != BLKmode
4073 && mode != TYPE_MODE (TREE_TYPE (exp)))
4074 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4076 /* If the modes of TARGET and TEMP are both BLKmode, both
4077 must be in memory and BITPOS must be aligned on a byte
4078 boundary. If so, we simply do a block copy. */
4079 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4081 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4082 || bitpos % BITS_PER_UNIT != 0)
4083 abort ();
4085 target = change_address (target, VOIDmode,
4086 plus_constant (XEXP (target, 0),
4087 bitpos / BITS_PER_UNIT));
4089 emit_block_move (target, temp,
4090 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4091 / BITS_PER_UNIT),
4094 return value_mode == VOIDmode ? const0_rtx : target;
4097 /* Store the value in the bitfield. */
4098 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4099 if (value_mode != VOIDmode)
4101 /* The caller wants an rtx for the value. */
4102 /* If possible, avoid refetching from the bitfield itself. */
4103 if (width_mask != 0
4104 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
4106 tree count;
4107 enum machine_mode tmode;
4109 if (unsignedp)
4110 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4111 tmode = GET_MODE (temp);
4112 if (tmode == VOIDmode)
4113 tmode = value_mode;
4114 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4115 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4116 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4118 return extract_bit_field (target, bitsize, bitpos, unsignedp,
4119 NULL_RTX, value_mode, 0, align,
4120 total_size);
4122 return const0_rtx;
4124 else
4126 rtx addr = XEXP (target, 0);
4127 rtx to_rtx;
4129 /* If a value is wanted, it must be the lhs;
4130 so make the address stable for multiple use. */
4132 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4133 && ! CONSTANT_ADDRESS_P (addr)
4134 /* A frame-pointer reference is already stable. */
4135 && ! (GET_CODE (addr) == PLUS
4136 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4137 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4138 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4139 addr = copy_to_reg (addr);
4141 /* Now build a reference to just the desired component. */
4143 to_rtx = change_address (target, mode,
4144 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
4145 MEM_IN_STRUCT_P (to_rtx) = 1;
4147 return store_expr (exp, to_rtx, value_mode != VOIDmode);
4151 /* Return true if any object containing the innermost array is an unaligned
4152 packed structure field. */
4154 static int
4155 get_inner_unaligned_p (exp)
4156 tree exp;
4158 int needed_alignment = TYPE_ALIGN (TREE_TYPE (exp));
4160 while (1)
4162 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
4164 if (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
4165 < needed_alignment)
4166 return 1;
4168 else if (TREE_CODE (exp) != ARRAY_REF
4169 && TREE_CODE (exp) != NON_LVALUE_EXPR
4170 && ! ((TREE_CODE (exp) == NOP_EXPR
4171 || TREE_CODE (exp) == CONVERT_EXPR)
4172 && (TYPE_MODE (TREE_TYPE (exp))
4173 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
4174 break;
4176 exp = TREE_OPERAND (exp, 0);
4179 return 0;
4182 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4183 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
4184 ARRAY_REFs and find the ultimate containing object, which we return.
4186 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4187 bit position, and *PUNSIGNEDP to the signedness of the field.
4188 If the position of the field is variable, we store a tree
4189 giving the variable offset (in units) in *POFFSET.
4190 This offset is in addition to the bit position.
4191 If the position is not variable, we store 0 in *POFFSET.
4192 We set *PALIGNMENT to the alignment in bytes of the address that will be
4193 computed. This is the alignment of the thing we return if *POFFSET
4194 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
4196 If any of the extraction expressions is volatile,
4197 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4199 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4200 is a mode that can be used to access the field. In that case, *PBITSIZE
4201 is redundant.
4203 If the field describes a variable-sized object, *PMODE is set to
4204 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
4205 this case, but the address of the object can be found. */
4207 tree
4208 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
4209 punsignedp, pvolatilep, palignment)
4210 tree exp;
4211 int *pbitsize;
4212 int *pbitpos;
4213 tree *poffset;
4214 enum machine_mode *pmode;
4215 int *punsignedp;
4216 int *pvolatilep;
4217 int *palignment;
4219 tree orig_exp = exp;
4220 tree size_tree = 0;
4221 enum machine_mode mode = VOIDmode;
4222 tree offset = integer_zero_node;
4223 int alignment = BIGGEST_ALIGNMENT;
4225 if (TREE_CODE (exp) == COMPONENT_REF)
4227 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4228 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4229 mode = DECL_MODE (TREE_OPERAND (exp, 1));
4230 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
4232 else if (TREE_CODE (exp) == BIT_FIELD_REF)
4234 size_tree = TREE_OPERAND (exp, 1);
4235 *punsignedp = TREE_UNSIGNED (exp);
4237 else
4239 mode = TYPE_MODE (TREE_TYPE (exp));
4240 *pbitsize = GET_MODE_BITSIZE (mode);
4241 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4244 if (size_tree)
4246 if (TREE_CODE (size_tree) != INTEGER_CST)
4247 mode = BLKmode, *pbitsize = -1;
4248 else
4249 *pbitsize = TREE_INT_CST_LOW (size_tree);
4252 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4253 and find the ultimate containing object. */
4255 *pbitpos = 0;
4257 while (1)
4259 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
4261 tree pos = (TREE_CODE (exp) == COMPONENT_REF
4262 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
4263 : TREE_OPERAND (exp, 2));
4264 tree constant = integer_zero_node, var = pos;
4266 /* If this field hasn't been filled in yet, don't go
4267 past it. This should only happen when folding expressions
4268 made during type construction. */
4269 if (pos == 0)
4270 break;
4272 /* Assume here that the offset is a multiple of a unit.
4273 If not, there should be an explicitly added constant. */
4274 if (TREE_CODE (pos) == PLUS_EXPR
4275 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4276 constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
4277 else if (TREE_CODE (pos) == INTEGER_CST)
4278 constant = pos, var = integer_zero_node;
4280 *pbitpos += TREE_INT_CST_LOW (constant);
4281 offset = size_binop (PLUS_EXPR, offset,
4282 size_binop (EXACT_DIV_EXPR, var,
4283 size_int (BITS_PER_UNIT)));
4286 else if (TREE_CODE (exp) == ARRAY_REF)
4288 /* This code is based on the code in case ARRAY_REF in expand_expr
4289 below. We assume here that the size of an array element is
4290 always an integral multiple of BITS_PER_UNIT. */
4292 tree index = TREE_OPERAND (exp, 1);
4293 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
4294 tree low_bound
4295 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4296 tree index_type = TREE_TYPE (index);
4298 if (! integer_zerop (low_bound))
4299 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
4301 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
4303 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
4304 index);
4305 index_type = TREE_TYPE (index);
4308 index = fold (build (MULT_EXPR, index_type, index,
4309 convert (index_type,
4310 TYPE_SIZE (TREE_TYPE (exp)))));
4312 if (TREE_CODE (index) == INTEGER_CST
4313 && TREE_INT_CST_HIGH (index) == 0)
4314 *pbitpos += TREE_INT_CST_LOW (index);
4315 else
4316 offset = size_binop (PLUS_EXPR, offset,
4317 size_binop (FLOOR_DIV_EXPR, index,
4318 size_int (BITS_PER_UNIT)));
4320 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
4321 && ! ((TREE_CODE (exp) == NOP_EXPR
4322 || TREE_CODE (exp) == CONVERT_EXPR)
4323 && ! (TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
4324 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0)))
4325 != UNION_TYPE))
4326 && (TYPE_MODE (TREE_TYPE (exp))
4327 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
4328 break;
4330 /* If any reference in the chain is volatile, the effect is volatile. */
4331 if (TREE_THIS_VOLATILE (exp))
4332 *pvolatilep = 1;
4334 /* If the offset is non-constant already, then we can't assume any
4335 alignment more than the alignment here. */
4336 if (! integer_zerop (offset))
4337 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4339 exp = TREE_OPERAND (exp, 0);
4342 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
4343 alignment = MIN (alignment, DECL_ALIGN (exp));
4344 else if (TREE_TYPE (exp) != 0)
4345 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4347 if (integer_zerop (offset))
4348 offset = 0;
4350 if (offset != 0 && contains_placeholder_p (offset))
4351 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
4353 *pmode = mode;
4354 *poffset = offset;
4355 *palignment = alignment / BITS_PER_UNIT;
4356 return exp;
4359 /* Given an rtx VALUE that may contain additions and multiplications,
4360 return an equivalent value that just refers to a register or memory.
4361 This is done by generating instructions to perform the arithmetic
4362 and returning a pseudo-register containing the value.
4364 The returned value may be a REG, SUBREG, MEM or constant. */
4367 force_operand (value, target)
4368 rtx value, target;
4370 register optab binoptab = 0;
4371 /* Use a temporary to force order of execution of calls to
4372 `force_operand'. */
4373 rtx tmp;
4374 register rtx op2;
4375 /* Use subtarget as the target for operand 0 of a binary operation. */
4376 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4378 if (GET_CODE (value) == PLUS)
4379 binoptab = add_optab;
4380 else if (GET_CODE (value) == MINUS)
4381 binoptab = sub_optab;
4382 else if (GET_CODE (value) == MULT)
4384 op2 = XEXP (value, 1);
4385 if (!CONSTANT_P (op2)
4386 && !(GET_CODE (op2) == REG && op2 != subtarget))
4387 subtarget = 0;
4388 tmp = force_operand (XEXP (value, 0), subtarget);
4389 return expand_mult (GET_MODE (value), tmp,
4390 force_operand (op2, NULL_RTX),
4391 target, 0);
4394 if (binoptab)
4396 op2 = XEXP (value, 1);
4397 if (!CONSTANT_P (op2)
4398 && !(GET_CODE (op2) == REG && op2 != subtarget))
4399 subtarget = 0;
4400 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
4402 binoptab = add_optab;
4403 op2 = negate_rtx (GET_MODE (value), op2);
4406 /* Check for an addition with OP2 a constant integer and our first
4407 operand a PLUS of a virtual register and something else. In that
4408 case, we want to emit the sum of the virtual register and the
4409 constant first and then add the other value. This allows virtual
4410 register instantiation to simply modify the constant rather than
4411 creating another one around this addition. */
4412 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
4413 && GET_CODE (XEXP (value, 0)) == PLUS
4414 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
4415 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
4416 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
4418 rtx temp = expand_binop (GET_MODE (value), binoptab,
4419 XEXP (XEXP (value, 0), 0), op2,
4420 subtarget, 0, OPTAB_LIB_WIDEN);
4421 return expand_binop (GET_MODE (value), binoptab, temp,
4422 force_operand (XEXP (XEXP (value, 0), 1), 0),
4423 target, 0, OPTAB_LIB_WIDEN);
4426 tmp = force_operand (XEXP (value, 0), subtarget);
4427 return expand_binop (GET_MODE (value), binoptab, tmp,
4428 force_operand (op2, NULL_RTX),
4429 target, 0, OPTAB_LIB_WIDEN);
4430 /* We give UNSIGNEDP = 0 to expand_binop
4431 because the only operations we are expanding here are signed ones. */
4433 return value;
4436 /* Subroutine of expand_expr:
4437 save the non-copied parts (LIST) of an expr (LHS), and return a list
4438 which can restore these values to their previous values,
4439 should something modify their storage. */
4441 static tree
4442 save_noncopied_parts (lhs, list)
4443 tree lhs;
4444 tree list;
4446 tree tail;
4447 tree parts = 0;
4449 for (tail = list; tail; tail = TREE_CHAIN (tail))
4450 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4451 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
4452 else
4454 tree part = TREE_VALUE (tail);
4455 tree part_type = TREE_TYPE (part);
4456 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
4457 rtx target = assign_temp (part_type, 0, 1, 1);
4458 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
4459 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
4460 parts = tree_cons (to_be_saved,
4461 build (RTL_EXPR, part_type, NULL_TREE,
4462 (tree) target),
4463 parts);
4464 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
4466 return parts;
4469 /* Subroutine of expand_expr:
4470 record the non-copied parts (LIST) of an expr (LHS), and return a list
4471 which specifies the initial values of these parts. */
4473 static tree
4474 init_noncopied_parts (lhs, list)
4475 tree lhs;
4476 tree list;
4478 tree tail;
4479 tree parts = 0;
4481 for (tail = list; tail; tail = TREE_CHAIN (tail))
4482 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4483 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
4484 else
4486 tree part = TREE_VALUE (tail);
4487 tree part_type = TREE_TYPE (part);
4488 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
4489 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
4491 return parts;
4494 /* Subroutine of expand_expr: return nonzero iff there is no way that
4495 EXP can reference X, which is being modified. */
4497 static int
4498 safe_from_p (x, exp)
4499 rtx x;
4500 tree exp;
4502 rtx exp_rtl = 0;
4503 int i, nops;
4505 if (x == 0
4506 /* If EXP has varying size, we MUST use a target since we currently
4507 have no way of allocating temporaries of variable size
4508 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
4509 So we assume here that something at a higher level has prevented a
4510 clash. This is somewhat bogus, but the best we can do. Only
4511 do this when X is BLKmode. */
4512 || (TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
4513 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
4514 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
4515 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
4516 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
4517 != INTEGER_CST)
4518 && GET_MODE (x) == BLKmode))
4519 return 1;
4521 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
4522 find the underlying pseudo. */
4523 if (GET_CODE (x) == SUBREG)
4525 x = SUBREG_REG (x);
4526 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
4527 return 0;
4530 /* If X is a location in the outgoing argument area, it is always safe. */
4531 if (GET_CODE (x) == MEM
4532 && (XEXP (x, 0) == virtual_outgoing_args_rtx
4533 || (GET_CODE (XEXP (x, 0)) == PLUS
4534 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
4535 return 1;
4537 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
4539 case 'd':
4540 exp_rtl = DECL_RTL (exp);
4541 break;
4543 case 'c':
4544 return 1;
4546 case 'x':
4547 if (TREE_CODE (exp) == TREE_LIST)
4548 return ((TREE_VALUE (exp) == 0
4549 || safe_from_p (x, TREE_VALUE (exp)))
4550 && (TREE_CHAIN (exp) == 0
4551 || safe_from_p (x, TREE_CHAIN (exp))));
4552 else
4553 return 0;
4555 case '1':
4556 return safe_from_p (x, TREE_OPERAND (exp, 0));
4558 case '2':
4559 case '<':
4560 return (safe_from_p (x, TREE_OPERAND (exp, 0))
4561 && safe_from_p (x, TREE_OPERAND (exp, 1)));
4563 case 'e':
4564 case 'r':
4565 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
4566 the expression. If it is set, we conflict iff we are that rtx or
4567 both are in memory. Otherwise, we check all operands of the
4568 expression recursively. */
4570 switch (TREE_CODE (exp))
4572 case ADDR_EXPR:
4573 return (staticp (TREE_OPERAND (exp, 0))
4574 || safe_from_p (x, TREE_OPERAND (exp, 0)));
4576 case INDIRECT_REF:
4577 if (GET_CODE (x) == MEM)
4578 return 0;
4579 break;
4581 case CALL_EXPR:
4582 exp_rtl = CALL_EXPR_RTL (exp);
4583 if (exp_rtl == 0)
4585 /* Assume that the call will clobber all hard registers and
4586 all of memory. */
4587 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
4588 || GET_CODE (x) == MEM)
4589 return 0;
4592 break;
4594 case RTL_EXPR:
4595 /* If a sequence exists, we would have to scan every instruction
4596 in the sequence to see if it was safe. This is probably not
4597 worthwhile. */
4598 if (RTL_EXPR_SEQUENCE (exp))
4599 return 0;
4601 exp_rtl = RTL_EXPR_RTL (exp);
4602 break;
4604 case WITH_CLEANUP_EXPR:
4605 exp_rtl = RTL_EXPR_RTL (exp);
4606 break;
4608 case CLEANUP_POINT_EXPR:
4609 return safe_from_p (x, TREE_OPERAND (exp, 0));
4611 case SAVE_EXPR:
4612 exp_rtl = SAVE_EXPR_RTL (exp);
4613 break;
4615 case BIND_EXPR:
4616 /* The only operand we look at is operand 1. The rest aren't
4617 part of the expression. */
4618 return safe_from_p (x, TREE_OPERAND (exp, 1));
4620 case METHOD_CALL_EXPR:
4621 /* This takes a rtx argument, but shouldn't appear here. */
4622 abort ();
4625 /* If we have an rtx, we do not need to scan our operands. */
4626 if (exp_rtl)
4627 break;
4629 nops = tree_code_length[(int) TREE_CODE (exp)];
4630 for (i = 0; i < nops; i++)
4631 if (TREE_OPERAND (exp, i) != 0
4632 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
4633 return 0;
4636 /* If we have an rtl, find any enclosed object. Then see if we conflict
4637 with it. */
4638 if (exp_rtl)
4640 if (GET_CODE (exp_rtl) == SUBREG)
4642 exp_rtl = SUBREG_REG (exp_rtl);
4643 if (GET_CODE (exp_rtl) == REG
4644 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
4645 return 0;
4648 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
4649 are memory and EXP is not readonly. */
4650 return ! (rtx_equal_p (x, exp_rtl)
4651 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
4652 && ! TREE_READONLY (exp)));
4655 /* If we reach here, it is safe. */
4656 return 1;
4659 /* Subroutine of expand_expr: return nonzero iff EXP is an
4660 expression whose type is statically determinable. */
4662 static int
4663 fixed_type_p (exp)
4664 tree exp;
4666 if (TREE_CODE (exp) == PARM_DECL
4667 || TREE_CODE (exp) == VAR_DECL
4668 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
4669 || TREE_CODE (exp) == COMPONENT_REF
4670 || TREE_CODE (exp) == ARRAY_REF)
4671 return 1;
4672 return 0;
4675 /* Subroutine of expand_expr: return rtx if EXP is a
4676 variable or parameter; else return 0. */
4678 static rtx
4679 var_rtx (exp)
4680 tree exp;
4682 STRIP_NOPS (exp);
4683 switch (TREE_CODE (exp))
4685 case PARM_DECL:
4686 case VAR_DECL:
4687 return DECL_RTL (exp);
4688 default:
4689 return 0;
4693 /* expand_expr: generate code for computing expression EXP.
4694 An rtx for the computed value is returned. The value is never null.
4695 In the case of a void EXP, const0_rtx is returned.
4697 The value may be stored in TARGET if TARGET is nonzero.
4698 TARGET is just a suggestion; callers must assume that
4699 the rtx returned may not be the same as TARGET.
4701 If TARGET is CONST0_RTX, it means that the value will be ignored.
4703 If TMODE is not VOIDmode, it suggests generating the
4704 result in mode TMODE. But this is done only when convenient.
4705 Otherwise, TMODE is ignored and the value generated in its natural mode.
4706 TMODE is just a suggestion; callers must assume that
4707 the rtx returned may not have mode TMODE.
4709 Note that TARGET may have neither TMODE nor MODE. In that case, it
4710 probably will not be used.
4712 If MODIFIER is EXPAND_SUM then when EXP is an addition
4713 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
4714 or a nest of (PLUS ...) and (MINUS ...) where the terms are
4715 products as above, or REG or MEM, or constant.
4716 Ordinarily in such cases we would output mul or add instructions
4717 and then return a pseudo reg containing the sum.
4719 EXPAND_INITIALIZER is much like EXPAND_SUM except that
4720 it also marks a label as absolutely required (it can't be dead).
4721 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
4722 This is used for outputting expressions used in initializers.
4724 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
4725 with a constant address even if that address is not normally legitimate.
4726 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
4729 expand_expr (exp, target, tmode, modifier)
4730 register tree exp;
4731 rtx target;
4732 enum machine_mode tmode;
4733 enum expand_modifier modifier;
4735 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
4736 This is static so it will be accessible to our recursive callees. */
4737 static tree placeholder_list = 0;
4738 register rtx op0, op1, temp;
4739 tree type = TREE_TYPE (exp);
4740 int unsignedp = TREE_UNSIGNED (type);
4741 register enum machine_mode mode = TYPE_MODE (type);
4742 register enum tree_code code = TREE_CODE (exp);
4743 optab this_optab;
4744 /* Use subtarget as the target for operand 0 of a binary operation. */
4745 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4746 rtx original_target = target;
4747 /* Maybe defer this until sure not doing bytecode? */
4748 int ignore = (target == const0_rtx
4749 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
4750 || code == CONVERT_EXPR || code == REFERENCE_EXPR
4751 || code == COND_EXPR)
4752 && TREE_CODE (type) == VOID_TYPE));
4753 tree context;
4756 if (output_bytecode && modifier != EXPAND_INITIALIZER)
4758 bc_expand_expr (exp);
4759 return NULL;
4762 /* Don't use hard regs as subtargets, because the combiner
4763 can only handle pseudo regs. */
4764 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
4765 subtarget = 0;
4766 /* Avoid subtargets inside loops,
4767 since they hide some invariant expressions. */
4768 if (preserve_subexpressions_p ())
4769 subtarget = 0;
4771 /* If we are going to ignore this result, we need only do something
4772 if there is a side-effect somewhere in the expression. If there
4773 is, short-circuit the most common cases here. Note that we must
4774 not call expand_expr with anything but const0_rtx in case this
4775 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
4777 if (ignore)
4779 if (! TREE_SIDE_EFFECTS (exp))
4780 return const0_rtx;
4782 /* Ensure we reference a volatile object even if value is ignored. */
4783 if (TREE_THIS_VOLATILE (exp)
4784 && TREE_CODE (exp) != FUNCTION_DECL
4785 && mode != VOIDmode && mode != BLKmode)
4787 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
4788 if (GET_CODE (temp) == MEM)
4789 temp = copy_to_reg (temp);
4790 return const0_rtx;
4793 if (TREE_CODE_CLASS (code) == '1')
4794 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
4795 VOIDmode, modifier);
4796 else if (TREE_CODE_CLASS (code) == '2'
4797 || TREE_CODE_CLASS (code) == '<')
4799 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
4800 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
4801 return const0_rtx;
4803 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
4804 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
4805 /* If the second operand has no side effects, just evaluate
4806 the first. */
4807 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
4808 VOIDmode, modifier);
4810 target = 0;
4813 /* If will do cse, generate all results into pseudo registers
4814 since 1) that allows cse to find more things
4815 and 2) otherwise cse could produce an insn the machine
4816 cannot support. */
4818 if (! cse_not_expected && mode != BLKmode && target
4819 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
4820 target = subtarget;
4822 switch (code)
4824 case LABEL_DECL:
4826 tree function = decl_function_context (exp);
4827 /* Handle using a label in a containing function. */
4828 if (function != current_function_decl && function != 0)
4830 struct function *p = find_function_data (function);
4831 /* Allocate in the memory associated with the function
4832 that the label is in. */
4833 push_obstacks (p->function_obstack,
4834 p->function_maybepermanent_obstack);
4836 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
4837 label_rtx (exp), p->forced_labels);
4838 pop_obstacks ();
4840 else if (modifier == EXPAND_INITIALIZER)
4841 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
4842 label_rtx (exp), forced_labels);
4843 temp = gen_rtx (MEM, FUNCTION_MODE,
4844 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
4845 if (function != current_function_decl && function != 0)
4846 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
4847 return temp;
4850 case PARM_DECL:
4851 if (DECL_RTL (exp) == 0)
4853 error_with_decl (exp, "prior parameter's size depends on `%s'");
4854 return CONST0_RTX (mode);
4857 /* ... fall through ... */
4859 case VAR_DECL:
4860 /* If a static var's type was incomplete when the decl was written,
4861 but the type is complete now, lay out the decl now. */
4862 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
4863 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
4865 push_obstacks_nochange ();
4866 end_temporary_allocation ();
4867 layout_decl (exp, 0);
4868 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
4869 pop_obstacks ();
4872 /* ... fall through ... */
4874 case FUNCTION_DECL:
4875 case RESULT_DECL:
4876 if (DECL_RTL (exp) == 0)
4877 abort ();
4879 /* Ensure variable marked as used even if it doesn't go through
4880 a parser. If it hasn't be used yet, write out an external
4881 definition. */
4882 if (! TREE_USED (exp))
4884 assemble_external (exp);
4885 TREE_USED (exp) = 1;
4888 /* Show we haven't gotten RTL for this yet. */
4889 temp = 0;
4891 /* Handle variables inherited from containing functions. */
4892 context = decl_function_context (exp);
4894 /* We treat inline_function_decl as an alias for the current function
4895 because that is the inline function whose vars, types, etc.
4896 are being merged into the current function.
4897 See expand_inline_function. */
4899 if (context != 0 && context != current_function_decl
4900 && context != inline_function_decl
4901 /* If var is static, we don't need a static chain to access it. */
4902 && ! (GET_CODE (DECL_RTL (exp)) == MEM
4903 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
4905 rtx addr;
4907 /* Mark as non-local and addressable. */
4908 DECL_NONLOCAL (exp) = 1;
4909 if (DECL_NO_STATIC_CHAIN (current_function_decl))
4910 abort ();
4911 mark_addressable (exp);
4912 if (GET_CODE (DECL_RTL (exp)) != MEM)
4913 abort ();
4914 addr = XEXP (DECL_RTL (exp), 0);
4915 if (GET_CODE (addr) == MEM)
4916 addr = gen_rtx (MEM, Pmode,
4917 fix_lexical_addr (XEXP (addr, 0), exp));
4918 else
4919 addr = fix_lexical_addr (addr, exp);
4920 temp = change_address (DECL_RTL (exp), mode, addr);
4923 /* This is the case of an array whose size is to be determined
4924 from its initializer, while the initializer is still being parsed.
4925 See expand_decl. */
4927 else if (GET_CODE (DECL_RTL (exp)) == MEM
4928 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
4929 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
4930 XEXP (DECL_RTL (exp), 0));
4932 /* If DECL_RTL is memory, we are in the normal case and either
4933 the address is not valid or it is not a register and -fforce-addr
4934 is specified, get the address into a register. */
4936 else if (GET_CODE (DECL_RTL (exp)) == MEM
4937 && modifier != EXPAND_CONST_ADDRESS
4938 && modifier != EXPAND_SUM
4939 && modifier != EXPAND_INITIALIZER
4940 && (! memory_address_p (DECL_MODE (exp),
4941 XEXP (DECL_RTL (exp), 0))
4942 || (flag_force_addr
4943 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
4944 temp = change_address (DECL_RTL (exp), VOIDmode,
4945 copy_rtx (XEXP (DECL_RTL (exp), 0)));
4947 /* If we got something, return it. But first, set the alignment
4948 the address is a register. */
4949 if (temp != 0)
4951 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
4952 mark_reg_pointer (XEXP (temp, 0),
4953 DECL_ALIGN (exp) / BITS_PER_UNIT);
4955 return temp;
4958 /* If the mode of DECL_RTL does not match that of the decl, it
4959 must be a promoted value. We return a SUBREG of the wanted mode,
4960 but mark it so that we know that it was already extended. */
4962 if (GET_CODE (DECL_RTL (exp)) == REG
4963 && GET_MODE (DECL_RTL (exp)) != mode)
4965 /* Get the signedness used for this variable. Ensure we get the
4966 same mode we got when the variable was declared. */
4967 if (GET_MODE (DECL_RTL (exp))
4968 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
4969 abort ();
4971 temp = gen_rtx (SUBREG, mode, DECL_RTL (exp), 0);
4972 SUBREG_PROMOTED_VAR_P (temp) = 1;
4973 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
4974 return temp;
4977 return DECL_RTL (exp);
4979 case INTEGER_CST:
4980 return immed_double_const (TREE_INT_CST_LOW (exp),
4981 TREE_INT_CST_HIGH (exp),
4982 mode);
4984 case CONST_DECL:
4985 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
4987 case REAL_CST:
4988 /* If optimized, generate immediate CONST_DOUBLE
4989 which will be turned into memory by reload if necessary.
4991 We used to force a register so that loop.c could see it. But
4992 this does not allow gen_* patterns to perform optimizations with
4993 the constants. It also produces two insns in cases like "x = 1.0;".
4994 On most machines, floating-point constants are not permitted in
4995 many insns, so we'd end up copying it to a register in any case.
4997 Now, we do the copying in expand_binop, if appropriate. */
4998 return immed_real_const (exp);
5000 case COMPLEX_CST:
5001 case STRING_CST:
5002 if (! TREE_CST_RTL (exp))
5003 output_constant_def (exp);
5005 /* TREE_CST_RTL probably contains a constant address.
5006 On RISC machines where a constant address isn't valid,
5007 make some insns to get that address into a register. */
5008 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
5009 && modifier != EXPAND_CONST_ADDRESS
5010 && modifier != EXPAND_INITIALIZER
5011 && modifier != EXPAND_SUM
5012 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
5013 || (flag_force_addr
5014 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
5015 return change_address (TREE_CST_RTL (exp), VOIDmode,
5016 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
5017 return TREE_CST_RTL (exp);
5019 case SAVE_EXPR:
5020 context = decl_function_context (exp);
5022 /* We treat inline_function_decl as an alias for the current function
5023 because that is the inline function whose vars, types, etc.
5024 are being merged into the current function.
5025 See expand_inline_function. */
5026 if (context == current_function_decl || context == inline_function_decl)
5027 context = 0;
5029 /* If this is non-local, handle it. */
5030 if (context)
5032 temp = SAVE_EXPR_RTL (exp);
5033 if (temp && GET_CODE (temp) == REG)
5035 put_var_into_stack (exp);
5036 temp = SAVE_EXPR_RTL (exp);
5038 if (temp == 0 || GET_CODE (temp) != MEM)
5039 abort ();
5040 return change_address (temp, mode,
5041 fix_lexical_addr (XEXP (temp, 0), exp));
5043 if (SAVE_EXPR_RTL (exp) == 0)
5045 if (mode == VOIDmode)
5046 temp = const0_rtx;
5047 else
5048 temp = assign_temp (type, 0, 0, 0);
5050 SAVE_EXPR_RTL (exp) = temp;
5051 if (!optimize && GET_CODE (temp) == REG)
5052 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
5053 save_expr_regs);
5055 /* If the mode of TEMP does not match that of the expression, it
5056 must be a promoted value. We pass store_expr a SUBREG of the
5057 wanted mode but mark it so that we know that it was already
5058 extended. Note that `unsignedp' was modified above in
5059 this case. */
5061 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
5063 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
5064 SUBREG_PROMOTED_VAR_P (temp) = 1;
5065 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5068 if (temp == const0_rtx)
5069 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
5070 else
5071 store_expr (TREE_OPERAND (exp, 0), temp, 0);
5074 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
5075 must be a promoted value. We return a SUBREG of the wanted mode,
5076 but mark it so that we know that it was already extended. */
5078 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
5079 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
5081 /* Compute the signedness and make the proper SUBREG. */
5082 promote_mode (type, mode, &unsignedp, 0);
5083 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
5084 SUBREG_PROMOTED_VAR_P (temp) = 1;
5085 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5086 return temp;
5089 return SAVE_EXPR_RTL (exp);
5091 case UNSAVE_EXPR:
5093 rtx temp;
5094 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5095 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
5096 return temp;
5099 case PLACEHOLDER_EXPR:
5100 /* If there is an object on the head of the placeholder list,
5101 see if some object in it's references is of type TYPE. For
5102 further information, see tree.def. */
5103 if (placeholder_list)
5105 tree need_type = TYPE_MAIN_VARIANT (type);
5106 tree object = 0;
5107 tree old_list = placeholder_list;
5108 tree elt;
5110 /* See if the object is the type that we want. Then see if
5111 the operand of any reference is the type we want. */
5112 if ((TYPE_MAIN_VARIANT (TREE_TYPE (TREE_PURPOSE (placeholder_list)))
5113 == need_type))
5114 object = TREE_PURPOSE (placeholder_list);
5116 /* Find the innermost reference that is of the type we want. */
5117 for (elt = TREE_PURPOSE (placeholder_list);
5118 elt != 0
5119 && (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5120 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5121 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5122 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e');
5123 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
5124 || TREE_CODE (elt) == COND_EXPR)
5125 ? TREE_OPERAND (elt, 1) : TREE_OPERAND (elt, 0)))
5126 if (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5127 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (elt, 0)))
5128 == need_type))
5129 object = TREE_OPERAND (elt, 0);
5131 if (object != 0)
5133 /* Expand this object skipping the list entries before
5134 it was found in case it is also a PLACEHOLDER_EXPR.
5135 In that case, we want to translate it using subsequent
5136 entries. */
5137 placeholder_list = TREE_CHAIN (placeholder_list);
5138 temp = expand_expr (object, original_target, tmode, modifier);
5139 placeholder_list = old_list;
5140 return temp;
5144 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
5145 abort ();
5147 case WITH_RECORD_EXPR:
5148 /* Put the object on the placeholder list, expand our first operand,
5149 and pop the list. */
5150 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
5151 placeholder_list);
5152 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
5153 tmode, modifier);
5154 placeholder_list = TREE_CHAIN (placeholder_list);
5155 return target;
5157 case EXIT_EXPR:
5158 expand_exit_loop_if_false (NULL_PTR,
5159 invert_truthvalue (TREE_OPERAND (exp, 0)));
5160 return const0_rtx;
5162 case LOOP_EXPR:
5163 push_temp_slots ();
5164 expand_start_loop (1);
5165 expand_expr_stmt (TREE_OPERAND (exp, 0));
5166 expand_end_loop ();
5167 pop_temp_slots ();
5169 return const0_rtx;
5171 case BIND_EXPR:
5173 tree vars = TREE_OPERAND (exp, 0);
5174 int vars_need_expansion = 0;
5176 /* Need to open a binding contour here because
5177 if there are any cleanups they most be contained here. */
5178 expand_start_bindings (0);
5180 /* Mark the corresponding BLOCK for output in its proper place. */
5181 if (TREE_OPERAND (exp, 2) != 0
5182 && ! TREE_USED (TREE_OPERAND (exp, 2)))
5183 insert_block (TREE_OPERAND (exp, 2));
5185 /* If VARS have not yet been expanded, expand them now. */
5186 while (vars)
5188 if (DECL_RTL (vars) == 0)
5190 vars_need_expansion = 1;
5191 expand_decl (vars);
5193 expand_decl_init (vars);
5194 vars = TREE_CHAIN (vars);
5197 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
5199 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
5201 return temp;
5204 case RTL_EXPR:
5205 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
5206 abort ();
5207 emit_insns (RTL_EXPR_SEQUENCE (exp));
5208 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
5209 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
5210 free_temps_for_rtl_expr (exp);
5211 return RTL_EXPR_RTL (exp);
5213 case CONSTRUCTOR:
5214 /* If we don't need the result, just ensure we evaluate any
5215 subexpressions. */
5216 if (ignore)
5218 tree elt;
5219 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
5220 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
5221 return const0_rtx;
5224 /* All elts simple constants => refer to a constant in memory. But
5225 if this is a non-BLKmode mode, let it store a field at a time
5226 since that should make a CONST_INT or CONST_DOUBLE when we
5227 fold. Likewise, if we have a target we can use, it is best to
5228 store directly into the target unless the type is large enough
5229 that memcpy will be used. If we are making an initializer and
5230 all operands are constant, put it in memory as well. */
5231 else if ((TREE_STATIC (exp)
5232 && ((mode == BLKmode
5233 && ! (target != 0 && safe_from_p (target, exp)))
5234 || TREE_ADDRESSABLE (exp)
5235 || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
5236 && (move_by_pieces_ninsns
5237 (TREE_INT_CST_LOW (TYPE_SIZE (type))/BITS_PER_UNIT,
5238 TYPE_ALIGN (type) / BITS_PER_UNIT)
5239 > MOVE_RATIO)
5240 && ! mostly_zeros_p (exp))))
5241 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
5243 rtx constructor = output_constant_def (exp);
5244 if (modifier != EXPAND_CONST_ADDRESS
5245 && modifier != EXPAND_INITIALIZER
5246 && modifier != EXPAND_SUM
5247 && (! memory_address_p (GET_MODE (constructor),
5248 XEXP (constructor, 0))
5249 || (flag_force_addr
5250 && GET_CODE (XEXP (constructor, 0)) != REG)))
5251 constructor = change_address (constructor, VOIDmode,
5252 XEXP (constructor, 0));
5253 return constructor;
5256 else
5258 /* Handle calls that pass values in multiple non-contiguous
5259 locations. The Irix 6 ABI has examples of this. */
5260 if (target == 0 || ! safe_from_p (target, exp)
5261 || GET_CODE (target) == PARALLEL)
5263 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
5264 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5265 else
5266 target = assign_temp (type, 0, 1, 1);
5269 if (TREE_READONLY (exp))
5271 if (GET_CODE (target) == MEM)
5272 target = change_address (target, GET_MODE (target),
5273 XEXP (target, 0));
5274 RTX_UNCHANGING_P (target) = 1;
5277 store_constructor (exp, target, 0);
5278 return target;
5281 case INDIRECT_REF:
5283 tree exp1 = TREE_OPERAND (exp, 0);
5284 tree exp2;
5286 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
5287 op0 = memory_address (mode, op0);
5289 temp = gen_rtx (MEM, mode, op0);
5290 /* If address was computed by addition,
5291 mark this as an element of an aggregate. */
5292 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
5293 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
5294 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
5295 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
5296 || (TREE_CODE (exp1) == ADDR_EXPR
5297 && (exp2 = TREE_OPERAND (exp1, 0))
5298 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
5299 MEM_IN_STRUCT_P (temp) = 1;
5300 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
5302 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
5303 here, because, in C and C++, the fact that a location is accessed
5304 through a pointer to const does not mean that the value there can
5305 never change. Languages where it can never change should
5306 also set TREE_STATIC. */
5307 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
5308 return temp;
5311 case ARRAY_REF:
5312 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
5313 abort ();
5316 tree array = TREE_OPERAND (exp, 0);
5317 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5318 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
5319 tree index = TREE_OPERAND (exp, 1);
5320 tree index_type = TREE_TYPE (index);
5321 int i;
5323 if (TREE_CODE (low_bound) != INTEGER_CST
5324 && contains_placeholder_p (low_bound))
5325 low_bound = build (WITH_RECORD_EXPR, sizetype, low_bound, exp);
5327 /* Optimize the special-case of a zero lower bound.
5329 We convert the low_bound to sizetype to avoid some problems
5330 with constant folding. (E.g. suppose the lower bound is 1,
5331 and its mode is QI. Without the conversion, (ARRAY
5332 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
5333 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
5335 But sizetype isn't quite right either (especially if
5336 the lowbound is negative). FIXME */
5338 if (! integer_zerop (low_bound))
5339 index = fold (build (MINUS_EXPR, index_type, index,
5340 convert (sizetype, low_bound)));
5342 if ((TREE_CODE (index) != INTEGER_CST
5343 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5344 && (! SLOW_UNALIGNED_ACCESS || ! get_inner_unaligned_p (exp)))
5346 /* Nonconstant array index or nonconstant element size, and
5347 not an array in an unaligned (packed) structure field.
5348 Generate the tree for *(&array+index) and expand that,
5349 except do it in a language-independent way
5350 and don't complain about non-lvalue arrays.
5351 `mark_addressable' should already have been called
5352 for any array for which this case will be reached. */
5354 /* Don't forget the const or volatile flag from the array
5355 element. */
5356 tree variant_type = build_type_variant (type,
5357 TREE_READONLY (exp),
5358 TREE_THIS_VOLATILE (exp));
5359 tree array_adr = build1 (ADDR_EXPR,
5360 build_pointer_type (variant_type), array);
5361 tree elt;
5362 tree size = size_in_bytes (type);
5364 /* Convert the integer argument to a type the same size as sizetype
5365 so the multiply won't overflow spuriously. */
5366 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
5367 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
5368 index);
5370 if (TREE_CODE (size) != INTEGER_CST
5371 && contains_placeholder_p (size))
5372 size = build (WITH_RECORD_EXPR, sizetype, size, exp);
5374 /* Don't think the address has side effects
5375 just because the array does.
5376 (In some cases the address might have side effects,
5377 and we fail to record that fact here. However, it should not
5378 matter, since expand_expr should not care.) */
5379 TREE_SIDE_EFFECTS (array_adr) = 0;
5382 = build1
5383 (INDIRECT_REF, type,
5384 fold (build (PLUS_EXPR,
5385 TYPE_POINTER_TO (variant_type),
5386 array_adr,
5387 fold
5388 (build1
5389 (NOP_EXPR,
5390 TYPE_POINTER_TO (variant_type),
5391 fold (build (MULT_EXPR, TREE_TYPE (index),
5392 index,
5393 convert (TREE_TYPE (index),
5394 size))))))));;
5396 /* Volatility, etc., of new expression is same as old
5397 expression. */
5398 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
5399 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
5400 TREE_READONLY (elt) = TREE_READONLY (exp);
5402 return expand_expr (elt, target, tmode, modifier);
5405 /* Fold an expression like: "foo"[2].
5406 This is not done in fold so it won't happen inside &.
5407 Don't fold if this is for wide characters since it's too
5408 difficult to do correctly and this is a very rare case. */
5410 if (TREE_CODE (array) == STRING_CST
5411 && TREE_CODE (index) == INTEGER_CST
5412 && !TREE_INT_CST_HIGH (index)
5413 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
5414 && GET_MODE_CLASS (mode) == MODE_INT
5415 && GET_MODE_SIZE (mode) == 1)
5416 return GEN_INT (TREE_STRING_POINTER (array)[i]);
5418 /* If this is a constant index into a constant array,
5419 just get the value from the array. Handle both the cases when
5420 we have an explicit constructor and when our operand is a variable
5421 that was declared const. */
5423 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
5425 if (TREE_CODE (index) == INTEGER_CST
5426 && TREE_INT_CST_HIGH (index) == 0)
5428 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
5430 i = TREE_INT_CST_LOW (index);
5431 while (elem && i--)
5432 elem = TREE_CHAIN (elem);
5433 if (elem)
5434 return expand_expr (fold (TREE_VALUE (elem)), target,
5435 tmode, modifier);
5439 else if (optimize >= 1
5440 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
5441 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
5442 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
5444 if (TREE_CODE (index) == INTEGER_CST
5445 && TREE_INT_CST_HIGH (index) == 0)
5447 tree init = DECL_INITIAL (array);
5449 i = TREE_INT_CST_LOW (index);
5450 if (TREE_CODE (init) == CONSTRUCTOR)
5452 tree elem = CONSTRUCTOR_ELTS (init);
5454 while (elem
5455 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
5456 elem = TREE_CHAIN (elem);
5457 if (elem)
5458 return expand_expr (fold (TREE_VALUE (elem)), target,
5459 tmode, modifier);
5461 else if (TREE_CODE (init) == STRING_CST
5462 && i < TREE_STRING_LENGTH (init))
5463 return GEN_INT (TREE_STRING_POINTER (init)[i]);
5468 /* Treat array-ref with constant index as a component-ref. */
5470 case COMPONENT_REF:
5471 case BIT_FIELD_REF:
5472 /* If the operand is a CONSTRUCTOR, we can just extract the
5473 appropriate field if it is present. Don't do this if we have
5474 already written the data since we want to refer to that copy
5475 and varasm.c assumes that's what we'll do. */
5476 if (code != ARRAY_REF
5477 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
5478 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
5480 tree elt;
5482 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
5483 elt = TREE_CHAIN (elt))
5484 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
5485 /* We can normally use the value of the field in the
5486 CONSTRUCTOR. However, if this is a bitfield in
5487 an integral mode that we can fit in a HOST_WIDE_INT,
5488 we must mask only the number of bits in the bitfield,
5489 since this is done implicitly by the constructor. If
5490 the bitfield does not meet either of those conditions,
5491 we can't do this optimization. */
5492 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
5493 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
5494 == MODE_INT)
5495 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
5496 <= HOST_BITS_PER_WIDE_INT))))
5498 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
5499 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
5501 int bitsize = DECL_FIELD_SIZE (TREE_PURPOSE (elt));
5502 enum machine_mode imode
5503 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
5505 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
5507 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
5508 op0 = expand_and (op0, op1, target);
5510 else
5512 tree count
5513 = build_int_2 (imode - bitsize, 0);
5515 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
5516 target, 0);
5517 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
5518 target, 0);
5522 return op0;
5527 enum machine_mode mode1;
5528 int bitsize;
5529 int bitpos;
5530 tree offset;
5531 int volatilep = 0;
5532 int alignment;
5533 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
5534 &mode1, &unsignedp, &volatilep,
5535 &alignment);
5537 /* If we got back the original object, something is wrong. Perhaps
5538 we are evaluating an expression too early. In any event, don't
5539 infinitely recurse. */
5540 if (tem == exp)
5541 abort ();
5543 /* If TEM's type is a union of variable size, pass TARGET to the inner
5544 computation, since it will need a temporary and TARGET is known
5545 to have to do. This occurs in unchecked conversion in Ada. */
5547 op0 = expand_expr (tem,
5548 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
5549 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
5550 != INTEGER_CST)
5551 ? target : NULL_RTX),
5552 VOIDmode,
5553 modifier == EXPAND_INITIALIZER ? modifier : 0);
5555 /* If this is a constant, put it into a register if it is a
5556 legitimate constant and memory if it isn't. */
5557 if (CONSTANT_P (op0))
5559 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
5560 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
5561 op0 = force_reg (mode, op0);
5562 else
5563 op0 = validize_mem (force_const_mem (mode, op0));
5566 if (offset != 0)
5568 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
5570 if (GET_CODE (op0) != MEM)
5571 abort ();
5572 op0 = change_address (op0, VOIDmode,
5573 gen_rtx (PLUS, ptr_mode, XEXP (op0, 0),
5574 force_reg (ptr_mode, offset_rtx)));
5577 /* Don't forget about volatility even if this is a bitfield. */
5578 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
5580 op0 = copy_rtx (op0);
5581 MEM_VOLATILE_P (op0) = 1;
5584 /* In cases where an aligned union has an unaligned object
5585 as a field, we might be extracting a BLKmode value from
5586 an integer-mode (e.g., SImode) object. Handle this case
5587 by doing the extract into an object as wide as the field
5588 (which we know to be the width of a basic mode), then
5589 storing into memory, and changing the mode to BLKmode.
5590 If we ultimately want the address (EXPAND_CONST_ADDRESS or
5591 EXPAND_INITIALIZER), then we must not copy to a temporary. */
5592 if (mode1 == VOIDmode
5593 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
5594 || (modifier != EXPAND_CONST_ADDRESS
5595 && modifier != EXPAND_INITIALIZER
5596 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
5597 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5598 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5599 /* If the field isn't aligned enough to fetch as a memref,
5600 fetch it as a bit field. */
5601 || (SLOW_UNALIGNED_ACCESS
5602 && ((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode))
5603 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))))))
5605 enum machine_mode ext_mode = mode;
5607 if (ext_mode == BLKmode)
5608 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
5610 if (ext_mode == BLKmode)
5612 /* In this case, BITPOS must start at a byte boundary and
5613 TARGET, if specified, must be a MEM. */
5614 if (GET_CODE (op0) != MEM
5615 || (target != 0 && GET_CODE (target) != MEM)
5616 || bitpos % BITS_PER_UNIT != 0)
5617 abort ();
5619 op0 = change_address (op0, VOIDmode,
5620 plus_constant (XEXP (op0, 0),
5621 bitpos / BITS_PER_UNIT));
5622 if (target == 0)
5623 target = assign_temp (type, 0, 1, 1);
5625 emit_block_move (target, op0,
5626 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5627 / BITS_PER_UNIT),
5630 return target;
5633 op0 = validize_mem (op0);
5635 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
5636 mark_reg_pointer (XEXP (op0, 0), alignment);
5638 op0 = extract_bit_field (op0, bitsize, bitpos,
5639 unsignedp, target, ext_mode, ext_mode,
5640 alignment,
5641 int_size_in_bytes (TREE_TYPE (tem)));
5643 /* If the result is a record type and BITSIZE is narrower than
5644 the mode of OP0, an integral mode, and this is a big endian
5645 machine, we must put the field into the high-order bits. */
5646 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
5647 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
5648 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
5649 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
5650 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
5651 - bitsize),
5652 op0, 1);
5654 if (mode == BLKmode)
5656 rtx new = assign_stack_temp (ext_mode,
5657 bitsize / BITS_PER_UNIT, 0);
5659 emit_move_insn (new, op0);
5660 op0 = copy_rtx (new);
5661 PUT_MODE (op0, BLKmode);
5662 MEM_IN_STRUCT_P (op0) = 1;
5665 return op0;
5668 /* If the result is BLKmode, use that to access the object
5669 now as well. */
5670 if (mode == BLKmode)
5671 mode1 = BLKmode;
5673 /* Get a reference to just this component. */
5674 if (modifier == EXPAND_CONST_ADDRESS
5675 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5676 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
5677 (bitpos / BITS_PER_UNIT)));
5678 else
5679 op0 = change_address (op0, mode1,
5680 plus_constant (XEXP (op0, 0),
5681 (bitpos / BITS_PER_UNIT)));
5682 if (GET_CODE (XEXP (op0, 0)) == REG)
5683 mark_reg_pointer (XEXP (op0, 0), alignment);
5685 MEM_IN_STRUCT_P (op0) = 1;
5686 MEM_VOLATILE_P (op0) |= volatilep;
5687 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
5688 || modifier == EXPAND_CONST_ADDRESS
5689 || modifier == EXPAND_INITIALIZER)
5690 return op0;
5691 else if (target == 0)
5692 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5694 convert_move (target, op0, unsignedp);
5695 return target;
5698 /* Intended for a reference to a buffer of a file-object in Pascal.
5699 But it's not certain that a special tree code will really be
5700 necessary for these. INDIRECT_REF might work for them. */
5701 case BUFFER_REF:
5702 abort ();
5704 case IN_EXPR:
5706 /* Pascal set IN expression.
5708 Algorithm:
5709 rlo = set_low - (set_low%bits_per_word);
5710 the_word = set [ (index - rlo)/bits_per_word ];
5711 bit_index = index % bits_per_word;
5712 bitmask = 1 << bit_index;
5713 return !!(the_word & bitmask); */
5715 tree set = TREE_OPERAND (exp, 0);
5716 tree index = TREE_OPERAND (exp, 1);
5717 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
5718 tree set_type = TREE_TYPE (set);
5719 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
5720 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
5721 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
5722 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
5723 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
5724 rtx setval = expand_expr (set, 0, VOIDmode, 0);
5725 rtx setaddr = XEXP (setval, 0);
5726 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
5727 rtx rlow;
5728 rtx diff, quo, rem, addr, bit, result;
5730 preexpand_calls (exp);
5732 /* If domain is empty, answer is no. Likewise if index is constant
5733 and out of bounds. */
5734 if ((TREE_CODE (set_high_bound) == INTEGER_CST
5735 && TREE_CODE (set_low_bound) == INTEGER_CST
5736 && tree_int_cst_lt (set_high_bound, set_low_bound)
5737 || (TREE_CODE (index) == INTEGER_CST
5738 && TREE_CODE (set_low_bound) == INTEGER_CST
5739 && tree_int_cst_lt (index, set_low_bound))
5740 || (TREE_CODE (set_high_bound) == INTEGER_CST
5741 && TREE_CODE (index) == INTEGER_CST
5742 && tree_int_cst_lt (set_high_bound, index))))
5743 return const0_rtx;
5745 if (target == 0)
5746 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5748 /* If we get here, we have to generate the code for both cases
5749 (in range and out of range). */
5751 op0 = gen_label_rtx ();
5752 op1 = gen_label_rtx ();
5754 if (! (GET_CODE (index_val) == CONST_INT
5755 && GET_CODE (lo_r) == CONST_INT))
5757 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
5758 GET_MODE (index_val), iunsignedp, 0);
5759 emit_jump_insn (gen_blt (op1));
5762 if (! (GET_CODE (index_val) == CONST_INT
5763 && GET_CODE (hi_r) == CONST_INT))
5765 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
5766 GET_MODE (index_val), iunsignedp, 0);
5767 emit_jump_insn (gen_bgt (op1));
5770 /* Calculate the element number of bit zero in the first word
5771 of the set. */
5772 if (GET_CODE (lo_r) == CONST_INT)
5773 rlow = GEN_INT (INTVAL (lo_r)
5774 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
5775 else
5776 rlow = expand_binop (index_mode, and_optab, lo_r,
5777 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
5778 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
5780 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
5781 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
5783 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
5784 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
5785 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
5786 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
5788 addr = memory_address (byte_mode,
5789 expand_binop (index_mode, add_optab, diff,
5790 setaddr, NULL_RTX, iunsignedp,
5791 OPTAB_LIB_WIDEN));
5793 /* Extract the bit we want to examine */
5794 bit = expand_shift (RSHIFT_EXPR, byte_mode,
5795 gen_rtx (MEM, byte_mode, addr),
5796 make_tree (TREE_TYPE (index), rem),
5797 NULL_RTX, 1);
5798 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
5799 GET_MODE (target) == byte_mode ? target : 0,
5800 1, OPTAB_LIB_WIDEN);
5802 if (result != target)
5803 convert_move (target, result, 1);
5805 /* Output the code to handle the out-of-range case. */
5806 emit_jump (op0);
5807 emit_label (op1);
5808 emit_move_insn (target, const0_rtx);
5809 emit_label (op0);
5810 return target;
5813 case WITH_CLEANUP_EXPR:
5814 if (RTL_EXPR_RTL (exp) == 0)
5816 RTL_EXPR_RTL (exp)
5817 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5818 cleanups_this_call
5819 = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
5820 /* That's it for this cleanup. */
5821 TREE_OPERAND (exp, 2) = 0;
5822 expand_eh_region_start ();
5824 return RTL_EXPR_RTL (exp);
5826 case CLEANUP_POINT_EXPR:
5828 extern int temp_slot_level;
5829 tree old_cleanups = cleanups_this_call;
5830 int old_temp_level = target_temp_slot_level;
5831 push_temp_slots ();
5832 target_temp_slot_level = temp_slot_level;
5833 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5834 /* If we're going to use this value, load it up now. */
5835 if (! ignore)
5836 op0 = force_not_mem (op0);
5837 expand_cleanups_to (old_cleanups);
5838 preserve_temp_slots (op0);
5839 free_temp_slots ();
5840 pop_temp_slots ();
5841 target_temp_slot_level = old_temp_level;
5843 return op0;
5845 case CALL_EXPR:
5846 /* Check for a built-in function. */
5847 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5848 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5849 == FUNCTION_DECL)
5850 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5851 return expand_builtin (exp, target, subtarget, tmode, ignore);
5853 /* If this call was expanded already by preexpand_calls,
5854 just return the result we got. */
5855 if (CALL_EXPR_RTL (exp) != 0)
5856 return CALL_EXPR_RTL (exp);
5858 return expand_call (exp, target, ignore);
5860 case NON_LVALUE_EXPR:
5861 case NOP_EXPR:
5862 case CONVERT_EXPR:
5863 case REFERENCE_EXPR:
5864 if (TREE_CODE (type) == UNION_TYPE)
5866 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
5867 if (target == 0)
5869 if (mode != BLKmode)
5870 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5871 else
5872 target = assign_temp (type, 0, 1, 1);
5875 if (GET_CODE (target) == MEM)
5876 /* Store data into beginning of memory target. */
5877 store_expr (TREE_OPERAND (exp, 0),
5878 change_address (target, TYPE_MODE (valtype), 0), 0);
5880 else if (GET_CODE (target) == REG)
5881 /* Store this field into a union of the proper type. */
5882 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
5883 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
5884 VOIDmode, 0, 1,
5885 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
5886 else
5887 abort ();
5889 /* Return the entire union. */
5890 return target;
5893 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
5895 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
5896 modifier);
5898 /* If the signedness of the conversion differs and OP0 is
5899 a promoted SUBREG, clear that indication since we now
5900 have to do the proper extension. */
5901 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
5902 && GET_CODE (op0) == SUBREG)
5903 SUBREG_PROMOTED_VAR_P (op0) = 0;
5905 return op0;
5908 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
5909 if (GET_MODE (op0) == mode)
5910 return op0;
5912 /* If OP0 is a constant, just convert it into the proper mode. */
5913 if (CONSTANT_P (op0))
5914 return
5915 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
5916 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5918 if (modifier == EXPAND_INITIALIZER)
5919 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
5921 if (target == 0)
5922 return
5923 convert_to_mode (mode, op0,
5924 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5925 else
5926 convert_move (target, op0,
5927 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5928 return target;
5930 case PLUS_EXPR:
5931 /* We come here from MINUS_EXPR when the second operand is a
5932 constant. */
5933 plus_expr:
5934 this_optab = add_optab;
5936 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
5937 something else, make sure we add the register to the constant and
5938 then to the other thing. This case can occur during strength
5939 reduction and doing it this way will produce better code if the
5940 frame pointer or argument pointer is eliminated.
5942 fold-const.c will ensure that the constant is always in the inner
5943 PLUS_EXPR, so the only case we need to do anything about is if
5944 sp, ap, or fp is our second argument, in which case we must swap
5945 the innermost first argument and our second argument. */
5947 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
5948 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
5949 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
5950 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
5951 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
5952 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
5954 tree t = TREE_OPERAND (exp, 1);
5956 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5957 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
5960 /* If the result is to be ptr_mode and we are adding an integer to
5961 something, we might be forming a constant. So try to use
5962 plus_constant. If it produces a sum and we can't accept it,
5963 use force_operand. This allows P = &ARR[const] to generate
5964 efficient code on machines where a SYMBOL_REF is not a valid
5965 address.
5967 If this is an EXPAND_SUM call, always return the sum. */
5968 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
5969 || mode == ptr_mode)
5971 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
5972 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5973 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
5975 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
5976 EXPAND_SUM);
5977 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
5978 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5979 op1 = force_operand (op1, target);
5980 return op1;
5983 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
5984 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
5985 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
5987 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
5988 EXPAND_SUM);
5989 if (! CONSTANT_P (op0))
5991 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
5992 VOIDmode, modifier);
5993 /* Don't go to both_summands if modifier
5994 says it's not right to return a PLUS. */
5995 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5996 goto binop2;
5997 goto both_summands;
5999 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
6000 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6001 op0 = force_operand (op0, target);
6002 return op0;
6006 /* No sense saving up arithmetic to be done
6007 if it's all in the wrong mode to form part of an address.
6008 And force_operand won't know whether to sign-extend or
6009 zero-extend. */
6010 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6011 || mode != ptr_mode)
6012 goto binop;
6014 preexpand_calls (exp);
6015 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6016 subtarget = 0;
6018 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
6019 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
6021 both_summands:
6022 /* Make sure any term that's a sum with a constant comes last. */
6023 if (GET_CODE (op0) == PLUS
6024 && CONSTANT_P (XEXP (op0, 1)))
6026 temp = op0;
6027 op0 = op1;
6028 op1 = temp;
6030 /* If adding to a sum including a constant,
6031 associate it to put the constant outside. */
6032 if (GET_CODE (op1) == PLUS
6033 && CONSTANT_P (XEXP (op1, 1)))
6035 rtx constant_term = const0_rtx;
6037 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
6038 if (temp != 0)
6039 op0 = temp;
6040 /* Ensure that MULT comes first if there is one. */
6041 else if (GET_CODE (op0) == MULT)
6042 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
6043 else
6044 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
6046 /* Let's also eliminate constants from op0 if possible. */
6047 op0 = eliminate_constant_term (op0, &constant_term);
6049 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
6050 their sum should be a constant. Form it into OP1, since the
6051 result we want will then be OP0 + OP1. */
6053 temp = simplify_binary_operation (PLUS, mode, constant_term,
6054 XEXP (op1, 1));
6055 if (temp != 0)
6056 op1 = temp;
6057 else
6058 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
6061 /* Put a constant term last and put a multiplication first. */
6062 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
6063 temp = op1, op1 = op0, op0 = temp;
6065 temp = simplify_binary_operation (PLUS, mode, op0, op1);
6066 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
6068 case MINUS_EXPR:
6069 /* For initializers, we are allowed to return a MINUS of two
6070 symbolic constants. Here we handle all cases when both operands
6071 are constant. */
6072 /* Handle difference of two symbolic constants,
6073 for the sake of an initializer. */
6074 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6075 && really_constant_p (TREE_OPERAND (exp, 0))
6076 && really_constant_p (TREE_OPERAND (exp, 1)))
6078 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
6079 VOIDmode, modifier);
6080 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6081 VOIDmode, modifier);
6083 /* If the last operand is a CONST_INT, use plus_constant of
6084 the negated constant. Else make the MINUS. */
6085 if (GET_CODE (op1) == CONST_INT)
6086 return plus_constant (op0, - INTVAL (op1));
6087 else
6088 return gen_rtx (MINUS, mode, op0, op1);
6090 /* Convert A - const to A + (-const). */
6091 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6093 tree negated = fold (build1 (NEGATE_EXPR, type,
6094 TREE_OPERAND (exp, 1)));
6096 /* Deal with the case where we can't negate the constant
6097 in TYPE. */
6098 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
6100 tree newtype = signed_type (type);
6101 tree newop0 = convert (newtype, TREE_OPERAND (exp, 0));
6102 tree newop1 = convert (newtype, TREE_OPERAND (exp, 1));
6103 tree newneg = fold (build1 (NEGATE_EXPR, newtype, newop1));
6105 if (! TREE_OVERFLOW (newneg))
6106 return expand_expr (convert (type,
6107 build (PLUS_EXPR, newtype,
6108 newop0, newneg)),
6109 target, tmode, modifier);
6111 else
6113 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
6114 goto plus_expr;
6117 this_optab = sub_optab;
6118 goto binop;
6120 case MULT_EXPR:
6121 preexpand_calls (exp);
6122 /* If first operand is constant, swap them.
6123 Thus the following special case checks need only
6124 check the second operand. */
6125 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
6127 register tree t1 = TREE_OPERAND (exp, 0);
6128 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
6129 TREE_OPERAND (exp, 1) = t1;
6132 /* Attempt to return something suitable for generating an
6133 indexed address, for machines that support that. */
6135 if (modifier == EXPAND_SUM && mode == ptr_mode
6136 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6137 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
6139 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
6141 /* Apply distributive law if OP0 is x+c. */
6142 if (GET_CODE (op0) == PLUS
6143 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
6144 return gen_rtx (PLUS, mode,
6145 gen_rtx (MULT, mode, XEXP (op0, 0),
6146 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
6147 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
6148 * INTVAL (XEXP (op0, 1))));
6150 if (GET_CODE (op0) != REG)
6151 op0 = force_operand (op0, NULL_RTX);
6152 if (GET_CODE (op0) != REG)
6153 op0 = copy_to_mode_reg (mode, op0);
6155 return gen_rtx (MULT, mode, op0,
6156 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
6159 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6160 subtarget = 0;
6162 /* Check for multiplying things that have been extended
6163 from a narrower type. If this machine supports multiplying
6164 in that narrower type with a result in the desired type,
6165 do it that way, and avoid the explicit type-conversion. */
6166 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
6167 && TREE_CODE (type) == INTEGER_TYPE
6168 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6169 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
6170 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6171 && int_fits_type_p (TREE_OPERAND (exp, 1),
6172 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6173 /* Don't use a widening multiply if a shift will do. */
6174 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
6175 > HOST_BITS_PER_WIDE_INT)
6176 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
6178 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
6179 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
6181 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
6182 /* If both operands are extended, they must either both
6183 be zero-extended or both be sign-extended. */
6184 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
6186 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
6188 enum machine_mode innermode
6189 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
6190 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6191 ? smul_widen_optab : umul_widen_optab);
6192 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6193 ? umul_widen_optab : smul_widen_optab);
6194 if (mode == GET_MODE_WIDER_MODE (innermode))
6196 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
6198 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6199 NULL_RTX, VOIDmode, 0);
6200 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6201 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6202 VOIDmode, 0);
6203 else
6204 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
6205 NULL_RTX, VOIDmode, 0);
6206 goto binop2;
6208 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
6209 && innermode == word_mode)
6211 rtx htem;
6212 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6213 NULL_RTX, VOIDmode, 0);
6214 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6215 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6216 VOIDmode, 0);
6217 else
6218 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
6219 NULL_RTX, VOIDmode, 0);
6220 temp = expand_binop (mode, other_optab, op0, op1, target,
6221 unsignedp, OPTAB_LIB_WIDEN);
6222 htem = expand_mult_highpart_adjust (innermode,
6223 gen_highpart (innermode, temp),
6224 op0, op1,
6225 gen_highpart (innermode, temp),
6226 unsignedp);
6227 emit_move_insn (gen_highpart (innermode, temp), htem);
6228 return temp;
6232 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6233 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6234 return expand_mult (mode, op0, op1, target, unsignedp);
6236 case TRUNC_DIV_EXPR:
6237 case FLOOR_DIV_EXPR:
6238 case CEIL_DIV_EXPR:
6239 case ROUND_DIV_EXPR:
6240 case EXACT_DIV_EXPR:
6241 preexpand_calls (exp);
6242 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6243 subtarget = 0;
6244 /* Possible optimization: compute the dividend with EXPAND_SUM
6245 then if the divisor is constant can optimize the case
6246 where some terms of the dividend have coeffs divisible by it. */
6247 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6248 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6249 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
6251 case RDIV_EXPR:
6252 this_optab = flodiv_optab;
6253 goto binop;
6255 case TRUNC_MOD_EXPR:
6256 case FLOOR_MOD_EXPR:
6257 case CEIL_MOD_EXPR:
6258 case ROUND_MOD_EXPR:
6259 preexpand_calls (exp);
6260 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6261 subtarget = 0;
6262 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6263 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6264 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
6266 case FIX_ROUND_EXPR:
6267 case FIX_FLOOR_EXPR:
6268 case FIX_CEIL_EXPR:
6269 abort (); /* Not used for C. */
6271 case FIX_TRUNC_EXPR:
6272 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6273 if (target == 0)
6274 target = gen_reg_rtx (mode);
6275 expand_fix (target, op0, unsignedp);
6276 return target;
6278 case FLOAT_EXPR:
6279 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6280 if (target == 0)
6281 target = gen_reg_rtx (mode);
6282 /* expand_float can't figure out what to do if FROM has VOIDmode.
6283 So give it the correct mode. With -O, cse will optimize this. */
6284 if (GET_MODE (op0) == VOIDmode)
6285 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6286 op0);
6287 expand_float (target, op0,
6288 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6289 return target;
6291 case NEGATE_EXPR:
6292 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6293 temp = expand_unop (mode, neg_optab, op0, target, 0);
6294 if (temp == 0)
6295 abort ();
6296 return temp;
6298 case ABS_EXPR:
6299 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6301 /* Handle complex values specially. */
6302 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
6303 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
6304 return expand_complex_abs (mode, op0, target, unsignedp);
6306 /* Unsigned abs is simply the operand. Testing here means we don't
6307 risk generating incorrect code below. */
6308 if (TREE_UNSIGNED (type))
6309 return op0;
6311 return expand_abs (mode, op0, target, unsignedp,
6312 safe_from_p (target, TREE_OPERAND (exp, 0)));
6314 case MAX_EXPR:
6315 case MIN_EXPR:
6316 target = original_target;
6317 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
6318 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
6319 || GET_MODE (target) != mode
6320 || (GET_CODE (target) == REG
6321 && REGNO (target) < FIRST_PSEUDO_REGISTER))
6322 target = gen_reg_rtx (mode);
6323 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6324 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
6326 /* First try to do it with a special MIN or MAX instruction.
6327 If that does not win, use a conditional jump to select the proper
6328 value. */
6329 this_optab = (TREE_UNSIGNED (type)
6330 ? (code == MIN_EXPR ? umin_optab : umax_optab)
6331 : (code == MIN_EXPR ? smin_optab : smax_optab));
6333 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
6334 OPTAB_WIDEN);
6335 if (temp != 0)
6336 return temp;
6338 /* At this point, a MEM target is no longer useful; we will get better
6339 code without it. */
6341 if (GET_CODE (target) == MEM)
6342 target = gen_reg_rtx (mode);
6344 if (target != op0)
6345 emit_move_insn (target, op0);
6347 op0 = gen_label_rtx ();
6349 /* If this mode is an integer too wide to compare properly,
6350 compare word by word. Rely on cse to optimize constant cases. */
6351 if (GET_MODE_CLASS (mode) == MODE_INT && !can_compare_p (mode))
6353 if (code == MAX_EXPR)
6354 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
6355 target, op1, NULL_RTX, op0);
6356 else
6357 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
6358 op1, target, NULL_RTX, op0);
6359 emit_move_insn (target, op1);
6361 else
6363 if (code == MAX_EXPR)
6364 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
6365 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
6366 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
6367 else
6368 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
6369 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
6370 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
6371 if (temp == const0_rtx)
6372 emit_move_insn (target, op1);
6373 else if (temp != const_true_rtx)
6375 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
6376 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
6377 else
6378 abort ();
6379 emit_move_insn (target, op1);
6382 emit_label (op0);
6383 return target;
6385 case BIT_NOT_EXPR:
6386 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6387 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
6388 if (temp == 0)
6389 abort ();
6390 return temp;
6392 case FFS_EXPR:
6393 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6394 temp = expand_unop (mode, ffs_optab, op0, target, 1);
6395 if (temp == 0)
6396 abort ();
6397 return temp;
6399 /* ??? Can optimize bitwise operations with one arg constant.
6400 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
6401 and (a bitwise1 b) bitwise2 b (etc)
6402 but that is probably not worth while. */
6404 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
6405 boolean values when we want in all cases to compute both of them. In
6406 general it is fastest to do TRUTH_AND_EXPR by computing both operands
6407 as actual zero-or-1 values and then bitwise anding. In cases where
6408 there cannot be any side effects, better code would be made by
6409 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
6410 how to recognize those cases. */
6412 case TRUTH_AND_EXPR:
6413 case BIT_AND_EXPR:
6414 this_optab = and_optab;
6415 goto binop;
6417 case TRUTH_OR_EXPR:
6418 case BIT_IOR_EXPR:
6419 this_optab = ior_optab;
6420 goto binop;
6422 case TRUTH_XOR_EXPR:
6423 case BIT_XOR_EXPR:
6424 this_optab = xor_optab;
6425 goto binop;
6427 case LSHIFT_EXPR:
6428 case RSHIFT_EXPR:
6429 case LROTATE_EXPR:
6430 case RROTATE_EXPR:
6431 preexpand_calls (exp);
6432 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6433 subtarget = 0;
6434 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6435 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
6436 unsignedp);
6438 /* Could determine the answer when only additive constants differ. Also,
6439 the addition of one can be handled by changing the condition. */
6440 case LT_EXPR:
6441 case LE_EXPR:
6442 case GT_EXPR:
6443 case GE_EXPR:
6444 case EQ_EXPR:
6445 case NE_EXPR:
6446 preexpand_calls (exp);
6447 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
6448 if (temp != 0)
6449 return temp;
6451 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
6452 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
6453 && original_target
6454 && GET_CODE (original_target) == REG
6455 && (GET_MODE (original_target)
6456 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6458 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
6459 VOIDmode, 0);
6461 if (temp != original_target)
6462 temp = copy_to_reg (temp);
6464 op1 = gen_label_rtx ();
6465 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
6466 GET_MODE (temp), unsignedp, 0);
6467 emit_jump_insn (gen_beq (op1));
6468 emit_move_insn (temp, const1_rtx);
6469 emit_label (op1);
6470 return temp;
6473 /* If no set-flag instruction, must generate a conditional
6474 store into a temporary variable. Drop through
6475 and handle this like && and ||. */
6477 case TRUTH_ANDIF_EXPR:
6478 case TRUTH_ORIF_EXPR:
6479 if (! ignore
6480 && (target == 0 || ! safe_from_p (target, exp)
6481 /* Make sure we don't have a hard reg (such as function's return
6482 value) live across basic blocks, if not optimizing. */
6483 || (!optimize && GET_CODE (target) == REG
6484 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
6485 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6487 if (target)
6488 emit_clr_insn (target);
6490 op1 = gen_label_rtx ();
6491 jumpifnot (exp, op1);
6493 if (target)
6494 emit_0_to_1_insn (target);
6496 emit_label (op1);
6497 return ignore ? const0_rtx : target;
6499 case TRUTH_NOT_EXPR:
6500 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
6501 /* The parser is careful to generate TRUTH_NOT_EXPR
6502 only with operands that are always zero or one. */
6503 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
6504 target, 1, OPTAB_LIB_WIDEN);
6505 if (temp == 0)
6506 abort ();
6507 return temp;
6509 case COMPOUND_EXPR:
6510 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6511 emit_queue ();
6512 return expand_expr (TREE_OPERAND (exp, 1),
6513 (ignore ? const0_rtx : target),
6514 VOIDmode, 0);
6516 case COND_EXPR:
6517 /* If we would have a "singleton" (see below) were it not for a
6518 conversion in each arm, bring that conversion back out. */
6519 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
6520 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
6521 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
6522 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
6524 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
6525 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
6527 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
6528 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
6529 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
6530 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
6531 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
6532 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
6533 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
6534 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
6535 return expand_expr (build1 (NOP_EXPR, type,
6536 build (COND_EXPR, TREE_TYPE (true),
6537 TREE_OPERAND (exp, 0),
6538 true, false)),
6539 target, tmode, modifier);
6543 rtx flag = NULL_RTX;
6544 tree left_cleanups = NULL_TREE;
6545 tree right_cleanups = NULL_TREE;
6547 /* Used to save a pointer to the place to put the setting of
6548 the flag that indicates if this side of the conditional was
6549 taken. We backpatch the code, if we find out later that we
6550 have any conditional cleanups that need to be performed. */
6551 rtx dest_right_flag = NULL_RTX;
6552 rtx dest_left_flag = NULL_RTX;
6554 /* Note that COND_EXPRs whose type is a structure or union
6555 are required to be constructed to contain assignments of
6556 a temporary variable, so that we can evaluate them here
6557 for side effect only. If type is void, we must do likewise. */
6559 /* If an arm of the branch requires a cleanup,
6560 only that cleanup is performed. */
6562 tree singleton = 0;
6563 tree binary_op = 0, unary_op = 0;
6564 tree old_cleanups = cleanups_this_call;
6566 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
6567 convert it to our mode, if necessary. */
6568 if (integer_onep (TREE_OPERAND (exp, 1))
6569 && integer_zerop (TREE_OPERAND (exp, 2))
6570 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
6572 if (ignore)
6574 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6575 modifier);
6576 return const0_rtx;
6579 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
6580 if (GET_MODE (op0) == mode)
6581 return op0;
6583 if (target == 0)
6584 target = gen_reg_rtx (mode);
6585 convert_move (target, op0, unsignedp);
6586 return target;
6589 /* Check for X ? A + B : A. If we have this, we can copy A to the
6590 output and conditionally add B. Similarly for unary operations.
6591 Don't do this if X has side-effects because those side effects
6592 might affect A or B and the "?" operation is a sequence point in
6593 ANSI. (operand_equal_p tests for side effects.) */
6595 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
6596 && operand_equal_p (TREE_OPERAND (exp, 2),
6597 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
6598 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
6599 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
6600 && operand_equal_p (TREE_OPERAND (exp, 1),
6601 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
6602 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
6603 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
6604 && operand_equal_p (TREE_OPERAND (exp, 2),
6605 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
6606 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
6607 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
6608 && operand_equal_p (TREE_OPERAND (exp, 1),
6609 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
6610 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
6612 /* If we are not to produce a result, we have no target. Otherwise,
6613 if a target was specified use it; it will not be used as an
6614 intermediate target unless it is safe. If no target, use a
6615 temporary. */
6617 if (ignore)
6618 temp = 0;
6619 else if (original_target
6620 && (safe_from_p (original_target, TREE_OPERAND (exp, 0))
6621 || (singleton && GET_CODE (original_target) == REG
6622 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
6623 && original_target == var_rtx (singleton)))
6624 && GET_MODE (original_target) == mode
6625 && ! (GET_CODE (original_target) == MEM
6626 && MEM_VOLATILE_P (original_target)))
6627 temp = original_target;
6628 else if (TREE_ADDRESSABLE (type))
6629 abort ();
6630 else
6631 temp = assign_temp (type, 0, 0, 1);
6633 /* If we had X ? A + C : A, with C a constant power of 2, and we can
6634 do the test of X as a store-flag operation, do this as
6635 A + ((X != 0) << log C). Similarly for other simple binary
6636 operators. Only do for C == 1 if BRANCH_COST is low. */
6637 if (temp && singleton && binary_op
6638 && (TREE_CODE (binary_op) == PLUS_EXPR
6639 || TREE_CODE (binary_op) == MINUS_EXPR
6640 || TREE_CODE (binary_op) == BIT_IOR_EXPR
6641 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
6642 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
6643 : integer_onep (TREE_OPERAND (binary_op, 1)))
6644 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
6646 rtx result;
6647 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
6648 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
6649 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
6650 : xor_optab);
6652 /* If we had X ? A : A + 1, do this as A + (X == 0).
6654 We have to invert the truth value here and then put it
6655 back later if do_store_flag fails. We cannot simply copy
6656 TREE_OPERAND (exp, 0) to another variable and modify that
6657 because invert_truthvalue can modify the tree pointed to
6658 by its argument. */
6659 if (singleton == TREE_OPERAND (exp, 1))
6660 TREE_OPERAND (exp, 0)
6661 = invert_truthvalue (TREE_OPERAND (exp, 0));
6663 result = do_store_flag (TREE_OPERAND (exp, 0),
6664 (safe_from_p (temp, singleton)
6665 ? temp : NULL_RTX),
6666 mode, BRANCH_COST <= 1);
6668 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
6669 result = expand_shift (LSHIFT_EXPR, mode, result,
6670 build_int_2 (tree_log2
6671 (TREE_OPERAND
6672 (binary_op, 1)),
6674 (safe_from_p (temp, singleton)
6675 ? temp : NULL_RTX), 0);
6677 if (result)
6679 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
6680 return expand_binop (mode, boptab, op1, result, temp,
6681 unsignedp, OPTAB_LIB_WIDEN);
6683 else if (singleton == TREE_OPERAND (exp, 1))
6684 TREE_OPERAND (exp, 0)
6685 = invert_truthvalue (TREE_OPERAND (exp, 0));
6688 do_pending_stack_adjust ();
6689 NO_DEFER_POP;
6690 op0 = gen_label_rtx ();
6692 flag = gen_reg_rtx (word_mode);
6693 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
6695 if (temp != 0)
6697 /* If the target conflicts with the other operand of the
6698 binary op, we can't use it. Also, we can't use the target
6699 if it is a hard register, because evaluating the condition
6700 might clobber it. */
6701 if ((binary_op
6702 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
6703 || (GET_CODE (temp) == REG
6704 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
6705 temp = gen_reg_rtx (mode);
6706 store_expr (singleton, temp, 0);
6708 else
6709 expand_expr (singleton,
6710 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6711 dest_left_flag = get_last_insn ();
6712 if (singleton == TREE_OPERAND (exp, 1))
6713 jumpif (TREE_OPERAND (exp, 0), op0);
6714 else
6715 jumpifnot (TREE_OPERAND (exp, 0), op0);
6717 /* Allows cleanups up to here. */
6718 old_cleanups = cleanups_this_call;
6719 if (binary_op && temp == 0)
6720 /* Just touch the other operand. */
6721 expand_expr (TREE_OPERAND (binary_op, 1),
6722 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6723 else if (binary_op)
6724 store_expr (build (TREE_CODE (binary_op), type,
6725 make_tree (type, temp),
6726 TREE_OPERAND (binary_op, 1)),
6727 temp, 0);
6728 else
6729 store_expr (build1 (TREE_CODE (unary_op), type,
6730 make_tree (type, temp)),
6731 temp, 0);
6732 op1 = op0;
6733 dest_right_flag = get_last_insn ();
6735 #if 0
6736 /* This is now done in jump.c and is better done there because it
6737 produces shorter register lifetimes. */
6739 /* Check for both possibilities either constants or variables
6740 in registers (but not the same as the target!). If so, can
6741 save branches by assigning one, branching, and assigning the
6742 other. */
6743 else if (temp && GET_MODE (temp) != BLKmode
6744 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
6745 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
6746 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
6747 && DECL_RTL (TREE_OPERAND (exp, 1))
6748 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
6749 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
6750 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
6751 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
6752 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
6753 && DECL_RTL (TREE_OPERAND (exp, 2))
6754 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
6755 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
6757 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6758 temp = gen_reg_rtx (mode);
6759 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6760 dest_left_flag = get_last_insn ();
6761 jumpifnot (TREE_OPERAND (exp, 0), op0);
6763 /* Allows cleanups up to here. */
6764 old_cleanups = cleanups_this_call;
6765 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6766 op1 = op0;
6767 dest_right_flag = get_last_insn ();
6769 #endif
6770 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
6771 comparison operator. If we have one of these cases, set the
6772 output to A, branch on A (cse will merge these two references),
6773 then set the output to FOO. */
6774 else if (temp
6775 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
6776 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
6777 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6778 TREE_OPERAND (exp, 1), 0)
6779 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
6780 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
6782 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6783 temp = gen_reg_rtx (mode);
6784 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6785 dest_left_flag = get_last_insn ();
6786 jumpif (TREE_OPERAND (exp, 0), op0);
6788 /* Allows cleanups up to here. */
6789 old_cleanups = cleanups_this_call;
6790 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6791 op1 = op0;
6792 dest_right_flag = get_last_insn ();
6794 else if (temp
6795 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
6796 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
6797 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6798 TREE_OPERAND (exp, 2), 0)
6799 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
6800 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
6802 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6803 temp = gen_reg_rtx (mode);
6804 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6805 dest_left_flag = get_last_insn ();
6806 jumpifnot (TREE_OPERAND (exp, 0), op0);
6808 /* Allows cleanups up to here. */
6809 old_cleanups = cleanups_this_call;
6810 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6811 op1 = op0;
6812 dest_right_flag = get_last_insn ();
6814 else
6816 op1 = gen_label_rtx ();
6817 jumpifnot (TREE_OPERAND (exp, 0), op0);
6819 /* Allows cleanups up to here. */
6820 old_cleanups = cleanups_this_call;
6821 if (temp != 0)
6822 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6823 else
6824 expand_expr (TREE_OPERAND (exp, 1),
6825 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6826 dest_left_flag = get_last_insn ();
6828 /* Handle conditional cleanups, if any. */
6829 left_cleanups = defer_cleanups_to (old_cleanups);
6831 emit_queue ();
6832 emit_jump_insn (gen_jump (op1));
6833 emit_barrier ();
6834 emit_label (op0);
6835 if (temp != 0)
6836 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6837 else
6838 expand_expr (TREE_OPERAND (exp, 2),
6839 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6840 dest_right_flag = get_last_insn ();
6843 /* Handle conditional cleanups, if any. */
6844 right_cleanups = defer_cleanups_to (old_cleanups);
6846 emit_queue ();
6847 emit_label (op1);
6848 OK_DEFER_POP;
6850 /* Add back in, any conditional cleanups. */
6851 if (left_cleanups || right_cleanups)
6853 tree new_cleanups;
6854 tree cond;
6855 rtx last;
6857 /* Now that we know that a flag is needed, go back and add in the
6858 setting of the flag. */
6860 /* Do the left side flag. */
6861 last = get_last_insn ();
6862 /* Flag left cleanups as needed. */
6863 emit_move_insn (flag, const1_rtx);
6864 /* ??? deprecated, use sequences instead. */
6865 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_left_flag);
6867 /* Do the right side flag. */
6868 last = get_last_insn ();
6869 /* Flag left cleanups as needed. */
6870 emit_move_insn (flag, const0_rtx);
6871 /* ??? deprecated, use sequences instead. */
6872 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_right_flag);
6874 /* All cleanups must be on the function_obstack. */
6875 push_obstacks_nochange ();
6876 resume_temporary_allocation ();
6878 /* convert flag, which is an rtx, into a tree. */
6879 cond = make_node (RTL_EXPR);
6880 TREE_TYPE (cond) = integer_type_node;
6881 RTL_EXPR_RTL (cond) = flag;
6882 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
6883 cond = save_expr (cond);
6885 if (! left_cleanups)
6886 left_cleanups = integer_zero_node;
6887 if (! right_cleanups)
6888 right_cleanups = integer_zero_node;
6889 new_cleanups = build (COND_EXPR, void_type_node,
6890 truthvalue_conversion (cond),
6891 left_cleanups, right_cleanups);
6892 new_cleanups = fold (new_cleanups);
6894 pop_obstacks ();
6896 /* Now add in the conditionalized cleanups. */
6897 cleanups_this_call
6898 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
6899 expand_eh_region_start ();
6901 return temp;
6904 case TARGET_EXPR:
6906 /* Something needs to be initialized, but we didn't know
6907 where that thing was when building the tree. For example,
6908 it could be the return value of a function, or a parameter
6909 to a function which lays down in the stack, or a temporary
6910 variable which must be passed by reference.
6912 We guarantee that the expression will either be constructed
6913 or copied into our original target. */
6915 tree slot = TREE_OPERAND (exp, 0);
6916 tree cleanups = NULL_TREE;
6917 tree exp1;
6918 rtx temp;
6920 if (TREE_CODE (slot) != VAR_DECL)
6921 abort ();
6923 if (! ignore)
6924 target = original_target;
6926 if (target == 0)
6928 if (DECL_RTL (slot) != 0)
6930 target = DECL_RTL (slot);
6931 /* If we have already expanded the slot, so don't do
6932 it again. (mrs) */
6933 if (TREE_OPERAND (exp, 1) == NULL_TREE)
6934 return target;
6936 else
6938 target = assign_temp (type, 2, 1, 1);
6939 /* All temp slots at this level must not conflict. */
6940 preserve_temp_slots (target);
6941 DECL_RTL (slot) = target;
6943 /* Since SLOT is not known to the called function
6944 to belong to its stack frame, we must build an explicit
6945 cleanup. This case occurs when we must build up a reference
6946 to pass the reference as an argument. In this case,
6947 it is very likely that such a reference need not be
6948 built here. */
6950 if (TREE_OPERAND (exp, 2) == 0)
6951 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
6952 cleanups = TREE_OPERAND (exp, 2);
6955 else
6957 /* This case does occur, when expanding a parameter which
6958 needs to be constructed on the stack. The target
6959 is the actual stack address that we want to initialize.
6960 The function we call will perform the cleanup in this case. */
6962 /* If we have already assigned it space, use that space,
6963 not target that we were passed in, as our target
6964 parameter is only a hint. */
6965 if (DECL_RTL (slot) != 0)
6967 target = DECL_RTL (slot);
6968 /* If we have already expanded the slot, so don't do
6969 it again. (mrs) */
6970 if (TREE_OPERAND (exp, 1) == NULL_TREE)
6971 return target;
6974 DECL_RTL (slot) = target;
6977 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
6978 /* Mark it as expanded. */
6979 TREE_OPERAND (exp, 1) = NULL_TREE;
6981 store_expr (exp1, target, 0);
6983 if (cleanups)
6985 cleanups_this_call = tree_cons (NULL_TREE,
6986 cleanups,
6987 cleanups_this_call);
6988 expand_eh_region_start ();
6991 return target;
6994 case INIT_EXPR:
6996 tree lhs = TREE_OPERAND (exp, 0);
6997 tree rhs = TREE_OPERAND (exp, 1);
6998 tree noncopied_parts = 0;
6999 tree lhs_type = TREE_TYPE (lhs);
7001 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7002 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
7003 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
7004 TYPE_NONCOPIED_PARTS (lhs_type));
7005 while (noncopied_parts != 0)
7007 expand_assignment (TREE_VALUE (noncopied_parts),
7008 TREE_PURPOSE (noncopied_parts), 0, 0);
7009 noncopied_parts = TREE_CHAIN (noncopied_parts);
7011 return temp;
7014 case MODIFY_EXPR:
7016 /* If lhs is complex, expand calls in rhs before computing it.
7017 That's so we don't compute a pointer and save it over a call.
7018 If lhs is simple, compute it first so we can give it as a
7019 target if the rhs is just a call. This avoids an extra temp and copy
7020 and that prevents a partial-subsumption which makes bad code.
7021 Actually we could treat component_ref's of vars like vars. */
7023 tree lhs = TREE_OPERAND (exp, 0);
7024 tree rhs = TREE_OPERAND (exp, 1);
7025 tree noncopied_parts = 0;
7026 tree lhs_type = TREE_TYPE (lhs);
7028 temp = 0;
7030 if (TREE_CODE (lhs) != VAR_DECL
7031 && TREE_CODE (lhs) != RESULT_DECL
7032 && TREE_CODE (lhs) != PARM_DECL)
7033 preexpand_calls (exp);
7035 /* Check for |= or &= of a bitfield of size one into another bitfield
7036 of size 1. In this case, (unless we need the result of the
7037 assignment) we can do this more efficiently with a
7038 test followed by an assignment, if necessary.
7040 ??? At this point, we can't get a BIT_FIELD_REF here. But if
7041 things change so we do, this code should be enhanced to
7042 support it. */
7043 if (ignore
7044 && TREE_CODE (lhs) == COMPONENT_REF
7045 && (TREE_CODE (rhs) == BIT_IOR_EXPR
7046 || TREE_CODE (rhs) == BIT_AND_EXPR)
7047 && TREE_OPERAND (rhs, 0) == lhs
7048 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
7049 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
7050 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
7052 rtx label = gen_label_rtx ();
7054 do_jump (TREE_OPERAND (rhs, 1),
7055 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
7056 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
7057 expand_assignment (lhs, convert (TREE_TYPE (rhs),
7058 (TREE_CODE (rhs) == BIT_IOR_EXPR
7059 ? integer_one_node
7060 : integer_zero_node)),
7061 0, 0);
7062 do_pending_stack_adjust ();
7063 emit_label (label);
7064 return const0_rtx;
7067 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
7068 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
7069 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
7070 TYPE_NONCOPIED_PARTS (lhs_type));
7072 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7073 while (noncopied_parts != 0)
7075 expand_assignment (TREE_PURPOSE (noncopied_parts),
7076 TREE_VALUE (noncopied_parts), 0, 0);
7077 noncopied_parts = TREE_CHAIN (noncopied_parts);
7079 return temp;
7082 case PREINCREMENT_EXPR:
7083 case PREDECREMENT_EXPR:
7084 return expand_increment (exp, 0, ignore);
7086 case POSTINCREMENT_EXPR:
7087 case POSTDECREMENT_EXPR:
7088 /* Faster to treat as pre-increment if result is not used. */
7089 return expand_increment (exp, ! ignore, ignore);
7091 case ADDR_EXPR:
7092 /* If nonzero, TEMP will be set to the address of something that might
7093 be a MEM corresponding to a stack slot. */
7094 temp = 0;
7096 /* Are we taking the address of a nested function? */
7097 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
7098 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
7099 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0)))
7101 op0 = trampoline_address (TREE_OPERAND (exp, 0));
7102 op0 = force_operand (op0, target);
7104 /* If we are taking the address of something erroneous, just
7105 return a zero. */
7106 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
7107 return const0_rtx;
7108 else
7110 /* We make sure to pass const0_rtx down if we came in with
7111 ignore set, to avoid doing the cleanups twice for something. */
7112 op0 = expand_expr (TREE_OPERAND (exp, 0),
7113 ignore ? const0_rtx : NULL_RTX, VOIDmode,
7114 (modifier == EXPAND_INITIALIZER
7115 ? modifier : EXPAND_CONST_ADDRESS));
7117 /* If we are going to ignore the result, OP0 will have been set
7118 to const0_rtx, so just return it. Don't get confused and
7119 think we are taking the address of the constant. */
7120 if (ignore)
7121 return op0;
7123 op0 = protect_from_queue (op0, 0);
7125 /* We would like the object in memory. If it is a constant,
7126 we can have it be statically allocated into memory. For
7127 a non-constant (REG, SUBREG or CONCAT), we need to allocate some
7128 memory and store the value into it. */
7130 if (CONSTANT_P (op0))
7131 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7132 op0);
7133 else if (GET_CODE (op0) == MEM)
7135 mark_temp_addr_taken (op0);
7136 temp = XEXP (op0, 0);
7139 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7140 || GET_CODE (op0) == CONCAT)
7142 /* If this object is in a register, it must be not
7143 be BLKmode. */
7144 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7145 rtx memloc = assign_temp (inner_type, 1, 1, 1);
7147 mark_temp_addr_taken (memloc);
7148 emit_move_insn (memloc, op0);
7149 op0 = memloc;
7152 if (GET_CODE (op0) != MEM)
7153 abort ();
7155 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7157 temp = XEXP (op0, 0);
7158 #ifdef POINTERS_EXTEND_UNSIGNED
7159 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
7160 && mode == ptr_mode)
7161 temp = convert_memory_address (ptr_mode, temp);
7162 #endif
7163 return temp;
7166 op0 = force_operand (XEXP (op0, 0), target);
7169 if (flag_force_addr && GET_CODE (op0) != REG)
7170 op0 = force_reg (Pmode, op0);
7172 if (GET_CODE (op0) == REG
7173 && ! REG_USERVAR_P (op0))
7174 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)) / BITS_PER_UNIT);
7176 /* If we might have had a temp slot, add an equivalent address
7177 for it. */
7178 if (temp != 0)
7179 update_temp_slot_address (temp, op0);
7181 #ifdef POINTERS_EXTEND_UNSIGNED
7182 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
7183 && mode == ptr_mode)
7184 op0 = convert_memory_address (ptr_mode, op0);
7185 #endif
7187 return op0;
7189 case ENTRY_VALUE_EXPR:
7190 abort ();
7192 /* COMPLEX type for Extended Pascal & Fortran */
7193 case COMPLEX_EXPR:
7195 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7196 rtx insns;
7198 /* Get the rtx code of the operands. */
7199 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7200 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
7202 if (! target)
7203 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7205 start_sequence ();
7207 /* Move the real (op0) and imaginary (op1) parts to their location. */
7208 emit_move_insn (gen_realpart (mode, target), op0);
7209 emit_move_insn (gen_imagpart (mode, target), op1);
7211 insns = get_insns ();
7212 end_sequence ();
7214 /* Complex construction should appear as a single unit. */
7215 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
7216 each with a separate pseudo as destination.
7217 It's not correct for flow to treat them as a unit. */
7218 if (GET_CODE (target) != CONCAT)
7219 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
7220 else
7221 emit_insns (insns);
7223 return target;
7226 case REALPART_EXPR:
7227 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7228 return gen_realpart (mode, op0);
7230 case IMAGPART_EXPR:
7231 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7232 return gen_imagpart (mode, op0);
7234 case CONJ_EXPR:
7236 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7237 rtx imag_t;
7238 rtx insns;
7240 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7242 if (! target)
7243 target = gen_reg_rtx (mode);
7245 start_sequence ();
7247 /* Store the realpart and the negated imagpart to target. */
7248 emit_move_insn (gen_realpart (partmode, target),
7249 gen_realpart (partmode, op0));
7251 imag_t = gen_imagpart (partmode, target);
7252 temp = expand_unop (partmode, neg_optab,
7253 gen_imagpart (partmode, op0), imag_t, 0);
7254 if (temp != imag_t)
7255 emit_move_insn (imag_t, temp);
7257 insns = get_insns ();
7258 end_sequence ();
7260 /* Conjugate should appear as a single unit
7261 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
7262 each with a separate pseudo as destination.
7263 It's not correct for flow to treat them as a unit. */
7264 if (GET_CODE (target) != CONCAT)
7265 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
7266 else
7267 emit_insns (insns);
7269 return target;
7272 case ERROR_MARK:
7273 op0 = CONST0_RTX (tmode);
7274 if (op0 != 0)
7275 return op0;
7276 return const0_rtx;
7278 default:
7279 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7282 /* Here to do an ordinary binary operator, generating an instruction
7283 from the optab already placed in `this_optab'. */
7284 binop:
7285 preexpand_calls (exp);
7286 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
7287 subtarget = 0;
7288 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7289 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7290 binop2:
7291 temp = expand_binop (mode, this_optab, op0, op1, target,
7292 unsignedp, OPTAB_LIB_WIDEN);
7293 if (temp == 0)
7294 abort ();
7295 return temp;
7299 /* Emit bytecode to evaluate the given expression EXP to the stack. */
7301 void
7302 bc_expand_expr (exp)
7303 tree exp;
7305 enum tree_code code;
7306 tree type, arg0;
7307 rtx r;
7308 struct binary_operator *binoptab;
7309 struct unary_operator *unoptab;
7310 struct increment_operator *incroptab;
7311 struct bc_label *lab, *lab1;
7312 enum bytecode_opcode opcode;
7315 code = TREE_CODE (exp);
7317 switch (code)
7319 case PARM_DECL:
7321 if (DECL_RTL (exp) == 0)
7323 error_with_decl (exp, "prior parameter's size depends on `%s'");
7324 return;
7327 bc_load_parmaddr (DECL_RTL (exp));
7328 bc_load_memory (TREE_TYPE (exp), exp);
7330 return;
7332 case VAR_DECL:
7334 if (DECL_RTL (exp) == 0)
7335 abort ();
7337 #if 0
7338 if (BYTECODE_LABEL (DECL_RTL (exp)))
7339 bc_load_externaddr (DECL_RTL (exp));
7340 else
7341 bc_load_localaddr (DECL_RTL (exp));
7342 #endif
7343 if (TREE_PUBLIC (exp))
7344 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
7345 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
7346 else
7347 bc_load_localaddr (DECL_RTL (exp));
7349 bc_load_memory (TREE_TYPE (exp), exp);
7350 return;
7352 case INTEGER_CST:
7354 #ifdef DEBUG_PRINT_CODE
7355 fprintf (stderr, " [%x]\n", TREE_INT_CST_LOW (exp));
7356 #endif
7357 bc_emit_instruction (mode_to_const_map[(int) (DECL_BIT_FIELD (exp)
7358 ? SImode
7359 : TYPE_MODE (TREE_TYPE (exp)))],
7360 (HOST_WIDE_INT) TREE_INT_CST_LOW (exp));
7361 return;
7363 case REAL_CST:
7365 #if 0
7366 #ifdef DEBUG_PRINT_CODE
7367 fprintf (stderr, " [%g]\n", (double) TREE_INT_CST_LOW (exp));
7368 #endif
7369 /* FIX THIS: find a better way to pass real_cst's. -bson */
7370 bc_emit_instruction (mode_to_const_map[TYPE_MODE (TREE_TYPE (exp))],
7371 (double) TREE_REAL_CST (exp));
7372 #else
7373 abort ();
7374 #endif
7376 return;
7378 case CALL_EXPR:
7380 /* We build a call description vector describing the type of
7381 the return value and of the arguments; this call vector,
7382 together with a pointer to a location for the return value
7383 and the base of the argument list, is passed to the low
7384 level machine dependent call subroutine, which is responsible
7385 for putting the arguments wherever real functions expect
7386 them, as well as getting the return value back. */
7388 tree calldesc = 0, arg;
7389 int nargs = 0, i;
7390 rtx retval;
7392 /* Push the evaluated args on the evaluation stack in reverse
7393 order. Also make an entry for each arg in the calldesc
7394 vector while we're at it. */
7396 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
7398 for (arg = TREE_OPERAND (exp, 1); arg; arg = TREE_CHAIN (arg))
7400 ++nargs;
7401 bc_expand_expr (TREE_VALUE (arg));
7403 calldesc = tree_cons ((tree) 0,
7404 size_in_bytes (TREE_TYPE (TREE_VALUE (arg))),
7405 calldesc);
7406 calldesc = tree_cons ((tree) 0,
7407 bc_runtime_type_code (TREE_TYPE (TREE_VALUE (arg))),
7408 calldesc);
7411 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
7413 /* Allocate a location for the return value and push its
7414 address on the evaluation stack. Also make an entry
7415 at the front of the calldesc for the return value type. */
7417 type = TREE_TYPE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7418 retval = bc_allocate_local (int_size_in_bytes (type), TYPE_ALIGN (type));
7419 bc_load_localaddr (retval);
7421 calldesc = tree_cons ((tree) 0, size_in_bytes (type), calldesc);
7422 calldesc = tree_cons ((tree) 0, bc_runtime_type_code (type), calldesc);
7424 /* Prepend the argument count. */
7425 calldesc = tree_cons ((tree) 0,
7426 build_int_2 (nargs, 0),
7427 calldesc);
7429 /* Push the address of the call description vector on the stack. */
7430 calldesc = build_nt (CONSTRUCTOR, (tree) 0, calldesc);
7431 TREE_TYPE (calldesc) = build_array_type (integer_type_node,
7432 build_index_type (build_int_2 (nargs * 2, 0)));
7433 r = output_constant_def (calldesc);
7434 bc_load_externaddr (r);
7436 /* Push the address of the function to be called. */
7437 bc_expand_expr (TREE_OPERAND (exp, 0));
7439 /* Call the function, popping its address and the calldesc vector
7440 address off the evaluation stack in the process. */
7441 bc_emit_instruction (call);
7443 /* Pop the arguments off the stack. */
7444 bc_adjust_stack (nargs);
7446 /* Load the return value onto the stack. */
7447 bc_load_localaddr (retval);
7448 bc_load_memory (type, TREE_OPERAND (exp, 0));
7450 return;
7452 case SAVE_EXPR:
7454 if (!SAVE_EXPR_RTL (exp))
7456 /* First time around: copy to local variable */
7457 SAVE_EXPR_RTL (exp) = bc_allocate_local (int_size_in_bytes (TREE_TYPE (exp)),
7458 TYPE_ALIGN (TREE_TYPE(exp)));
7459 bc_expand_expr (TREE_OPERAND (exp, 0));
7460 bc_emit_instruction (duplicate);
7462 bc_load_localaddr (SAVE_EXPR_RTL (exp));
7463 bc_store_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7465 else
7467 /* Consecutive reference: use saved copy */
7468 bc_load_localaddr (SAVE_EXPR_RTL (exp));
7469 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7471 return;
7473 #if 0
7474 /* FIXME: the XXXX_STMT codes have been removed in GCC2, but
7475 how are they handled instead? */
7476 case LET_STMT:
7478 TREE_USED (exp) = 1;
7479 bc_expand_expr (STMT_BODY (exp));
7480 return;
7481 #endif
7483 case NOP_EXPR:
7484 case CONVERT_EXPR:
7486 bc_expand_expr (TREE_OPERAND (exp, 0));
7487 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)), TREE_TYPE (exp));
7488 return;
7490 case MODIFY_EXPR:
7492 expand_assignment (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), 0, 0);
7493 return;
7495 case ADDR_EXPR:
7497 bc_expand_address (TREE_OPERAND (exp, 0));
7498 return;
7500 case INDIRECT_REF:
7502 bc_expand_expr (TREE_OPERAND (exp, 0));
7503 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7504 return;
7506 case ARRAY_REF:
7508 bc_expand_expr (bc_canonicalize_array_ref (exp));
7509 return;
7511 case COMPONENT_REF:
7513 bc_expand_component_address (exp);
7515 /* If we have a bitfield, generate a proper load */
7516 bc_load_memory (TREE_TYPE (TREE_OPERAND (exp, 1)), TREE_OPERAND (exp, 1));
7517 return;
7519 case COMPOUND_EXPR:
7521 bc_expand_expr (TREE_OPERAND (exp, 0));
7522 bc_emit_instruction (drop);
7523 bc_expand_expr (TREE_OPERAND (exp, 1));
7524 return;
7526 case COND_EXPR:
7528 bc_expand_expr (TREE_OPERAND (exp, 0));
7529 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
7530 lab = bc_get_bytecode_label ();
7531 bc_emit_bytecode (xjumpifnot);
7532 bc_emit_bytecode_labelref (lab);
7534 #ifdef DEBUG_PRINT_CODE
7535 fputc ('\n', stderr);
7536 #endif
7537 bc_expand_expr (TREE_OPERAND (exp, 1));
7538 lab1 = bc_get_bytecode_label ();
7539 bc_emit_bytecode (jump);
7540 bc_emit_bytecode_labelref (lab1);
7542 #ifdef DEBUG_PRINT_CODE
7543 fputc ('\n', stderr);
7544 #endif
7546 bc_emit_bytecode_labeldef (lab);
7547 bc_expand_expr (TREE_OPERAND (exp, 2));
7548 bc_emit_bytecode_labeldef (lab1);
7549 return;
7551 case TRUTH_ANDIF_EXPR:
7553 opcode = xjumpifnot;
7554 goto andorif;
7556 case TRUTH_ORIF_EXPR:
7558 opcode = xjumpif;
7559 goto andorif;
7561 case PLUS_EXPR:
7563 binoptab = optab_plus_expr;
7564 goto binop;
7566 case MINUS_EXPR:
7568 binoptab = optab_minus_expr;
7569 goto binop;
7571 case MULT_EXPR:
7573 binoptab = optab_mult_expr;
7574 goto binop;
7576 case TRUNC_DIV_EXPR:
7577 case FLOOR_DIV_EXPR:
7578 case CEIL_DIV_EXPR:
7579 case ROUND_DIV_EXPR:
7580 case EXACT_DIV_EXPR:
7582 binoptab = optab_trunc_div_expr;
7583 goto binop;
7585 case TRUNC_MOD_EXPR:
7586 case FLOOR_MOD_EXPR:
7587 case CEIL_MOD_EXPR:
7588 case ROUND_MOD_EXPR:
7590 binoptab = optab_trunc_mod_expr;
7591 goto binop;
7593 case FIX_ROUND_EXPR:
7594 case FIX_FLOOR_EXPR:
7595 case FIX_CEIL_EXPR:
7596 abort (); /* Not used for C. */
7598 case FIX_TRUNC_EXPR:
7599 case FLOAT_EXPR:
7600 case MAX_EXPR:
7601 case MIN_EXPR:
7602 case FFS_EXPR:
7603 case LROTATE_EXPR:
7604 case RROTATE_EXPR:
7605 abort (); /* FIXME */
7607 case RDIV_EXPR:
7609 binoptab = optab_rdiv_expr;
7610 goto binop;
7612 case BIT_AND_EXPR:
7614 binoptab = optab_bit_and_expr;
7615 goto binop;
7617 case BIT_IOR_EXPR:
7619 binoptab = optab_bit_ior_expr;
7620 goto binop;
7622 case BIT_XOR_EXPR:
7624 binoptab = optab_bit_xor_expr;
7625 goto binop;
7627 case LSHIFT_EXPR:
7629 binoptab = optab_lshift_expr;
7630 goto binop;
7632 case RSHIFT_EXPR:
7634 binoptab = optab_rshift_expr;
7635 goto binop;
7637 case TRUTH_AND_EXPR:
7639 binoptab = optab_truth_and_expr;
7640 goto binop;
7642 case TRUTH_OR_EXPR:
7644 binoptab = optab_truth_or_expr;
7645 goto binop;
7647 case LT_EXPR:
7649 binoptab = optab_lt_expr;
7650 goto binop;
7652 case LE_EXPR:
7654 binoptab = optab_le_expr;
7655 goto binop;
7657 case GE_EXPR:
7659 binoptab = optab_ge_expr;
7660 goto binop;
7662 case GT_EXPR:
7664 binoptab = optab_gt_expr;
7665 goto binop;
7667 case EQ_EXPR:
7669 binoptab = optab_eq_expr;
7670 goto binop;
7672 case NE_EXPR:
7674 binoptab = optab_ne_expr;
7675 goto binop;
7677 case NEGATE_EXPR:
7679 unoptab = optab_negate_expr;
7680 goto unop;
7682 case BIT_NOT_EXPR:
7684 unoptab = optab_bit_not_expr;
7685 goto unop;
7687 case TRUTH_NOT_EXPR:
7689 unoptab = optab_truth_not_expr;
7690 goto unop;
7692 case PREDECREMENT_EXPR:
7694 incroptab = optab_predecrement_expr;
7695 goto increment;
7697 case PREINCREMENT_EXPR:
7699 incroptab = optab_preincrement_expr;
7700 goto increment;
7702 case POSTDECREMENT_EXPR:
7704 incroptab = optab_postdecrement_expr;
7705 goto increment;
7707 case POSTINCREMENT_EXPR:
7709 incroptab = optab_postincrement_expr;
7710 goto increment;
7712 case CONSTRUCTOR:
7714 bc_expand_constructor (exp);
7715 return;
7717 case ERROR_MARK:
7718 case RTL_EXPR:
7720 return;
7722 case BIND_EXPR:
7724 tree vars = TREE_OPERAND (exp, 0);
7725 int vars_need_expansion = 0;
7727 /* Need to open a binding contour here because
7728 if there are any cleanups they most be contained here. */
7729 expand_start_bindings (0);
7731 /* Mark the corresponding BLOCK for output. */
7732 if (TREE_OPERAND (exp, 2) != 0)
7733 TREE_USED (TREE_OPERAND (exp, 2)) = 1;
7735 /* If VARS have not yet been expanded, expand them now. */
7736 while (vars)
7738 if (DECL_RTL (vars) == 0)
7740 vars_need_expansion = 1;
7741 expand_decl (vars);
7743 expand_decl_init (vars);
7744 vars = TREE_CHAIN (vars);
7747 bc_expand_expr (TREE_OPERAND (exp, 1));
7749 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
7751 return;
7755 abort ();
7757 binop:
7759 bc_expand_binary_operation (binoptab, TREE_TYPE (exp),
7760 TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1));
7761 return;
7764 unop:
7766 bc_expand_unary_operation (unoptab, TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7767 return;
7770 andorif:
7772 bc_expand_expr (TREE_OPERAND (exp, 0));
7773 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
7774 lab = bc_get_bytecode_label ();
7776 bc_emit_instruction (duplicate);
7777 bc_emit_bytecode (opcode);
7778 bc_emit_bytecode_labelref (lab);
7780 #ifdef DEBUG_PRINT_CODE
7781 fputc ('\n', stderr);
7782 #endif
7784 bc_emit_instruction (drop);
7786 bc_expand_expr (TREE_OPERAND (exp, 1));
7787 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)));
7788 bc_emit_bytecode_labeldef (lab);
7789 return;
7792 increment:
7794 type = TREE_TYPE (TREE_OPERAND (exp, 0));
7796 /* Push the quantum. */
7797 bc_expand_expr (TREE_OPERAND (exp, 1));
7799 /* Convert it to the lvalue's type. */
7800 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)), type);
7802 /* Push the address of the lvalue */
7803 bc_expand_expr (build1 (ADDR_EXPR, TYPE_POINTER_TO (type), TREE_OPERAND (exp, 0)));
7805 /* Perform actual increment */
7806 bc_expand_increment (incroptab, type);
7807 return;
7810 /* Return the alignment in bits of EXP, a pointer valued expression.
7811 But don't return more than MAX_ALIGN no matter what.
7812 The alignment returned is, by default, the alignment of the thing that
7813 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
7815 Otherwise, look at the expression to see if we can do better, i.e., if the
7816 expression is actually pointing at an object whose alignment is tighter. */
7818 static int
7819 get_pointer_alignment (exp, max_align)
7820 tree exp;
7821 unsigned max_align;
7823 unsigned align, inner;
7825 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
7826 return 0;
7828 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
7829 align = MIN (align, max_align);
7831 while (1)
7833 switch (TREE_CODE (exp))
7835 case NOP_EXPR:
7836 case CONVERT_EXPR:
7837 case NON_LVALUE_EXPR:
7838 exp = TREE_OPERAND (exp, 0);
7839 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
7840 return align;
7841 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
7842 align = MIN (inner, max_align);
7843 break;
7845 case PLUS_EXPR:
7846 /* If sum of pointer + int, restrict our maximum alignment to that
7847 imposed by the integer. If not, we can't do any better than
7848 ALIGN. */
7849 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
7850 return align;
7852 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
7853 & (max_align - 1))
7854 != 0)
7855 max_align >>= 1;
7857 exp = TREE_OPERAND (exp, 0);
7858 break;
7860 case ADDR_EXPR:
7861 /* See what we are pointing at and look at its alignment. */
7862 exp = TREE_OPERAND (exp, 0);
7863 if (TREE_CODE (exp) == FUNCTION_DECL)
7864 align = FUNCTION_BOUNDARY;
7865 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
7866 align = DECL_ALIGN (exp);
7867 #ifdef CONSTANT_ALIGNMENT
7868 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
7869 align = CONSTANT_ALIGNMENT (exp, align);
7870 #endif
7871 return MIN (align, max_align);
7873 default:
7874 return align;
7879 /* Return the tree node and offset if a given argument corresponds to
7880 a string constant. */
7882 static tree
7883 string_constant (arg, ptr_offset)
7884 tree arg;
7885 tree *ptr_offset;
7887 STRIP_NOPS (arg);
7889 if (TREE_CODE (arg) == ADDR_EXPR
7890 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
7892 *ptr_offset = integer_zero_node;
7893 return TREE_OPERAND (arg, 0);
7895 else if (TREE_CODE (arg) == PLUS_EXPR)
7897 tree arg0 = TREE_OPERAND (arg, 0);
7898 tree arg1 = TREE_OPERAND (arg, 1);
7900 STRIP_NOPS (arg0);
7901 STRIP_NOPS (arg1);
7903 if (TREE_CODE (arg0) == ADDR_EXPR
7904 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
7906 *ptr_offset = arg1;
7907 return TREE_OPERAND (arg0, 0);
7909 else if (TREE_CODE (arg1) == ADDR_EXPR
7910 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
7912 *ptr_offset = arg0;
7913 return TREE_OPERAND (arg1, 0);
7917 return 0;
7920 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
7921 way, because it could contain a zero byte in the middle.
7922 TREE_STRING_LENGTH is the size of the character array, not the string.
7924 Unfortunately, string_constant can't access the values of const char
7925 arrays with initializers, so neither can we do so here. */
7927 static tree
7928 c_strlen (src)
7929 tree src;
7931 tree offset_node;
7932 int offset, max;
7933 char *ptr;
7935 src = string_constant (src, &offset_node);
7936 if (src == 0)
7937 return 0;
7938 max = TREE_STRING_LENGTH (src);
7939 ptr = TREE_STRING_POINTER (src);
7940 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
7942 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
7943 compute the offset to the following null if we don't know where to
7944 start searching for it. */
7945 int i;
7946 for (i = 0; i < max; i++)
7947 if (ptr[i] == 0)
7948 return 0;
7949 /* We don't know the starting offset, but we do know that the string
7950 has no internal zero bytes. We can assume that the offset falls
7951 within the bounds of the string; otherwise, the programmer deserves
7952 what he gets. Subtract the offset from the length of the string,
7953 and return that. */
7954 /* This would perhaps not be valid if we were dealing with named
7955 arrays in addition to literal string constants. */
7956 return size_binop (MINUS_EXPR, size_int (max), offset_node);
7959 /* We have a known offset into the string. Start searching there for
7960 a null character. */
7961 if (offset_node == 0)
7962 offset = 0;
7963 else
7965 /* Did we get a long long offset? If so, punt. */
7966 if (TREE_INT_CST_HIGH (offset_node) != 0)
7967 return 0;
7968 offset = TREE_INT_CST_LOW (offset_node);
7970 /* If the offset is known to be out of bounds, warn, and call strlen at
7971 runtime. */
7972 if (offset < 0 || offset > max)
7974 warning ("offset outside bounds of constant string");
7975 return 0;
7977 /* Use strlen to search for the first zero byte. Since any strings
7978 constructed with build_string will have nulls appended, we win even
7979 if we get handed something like (char[4])"abcd".
7981 Since OFFSET is our starting index into the string, no further
7982 calculation is needed. */
7983 return size_int (strlen (ptr + offset));
7987 expand_builtin_return_addr (fndecl_code, count, tem)
7988 enum built_in_function fndecl_code;
7989 int count;
7990 rtx tem;
7992 int i;
7994 /* Some machines need special handling before we can access
7995 arbitrary frames. For example, on the sparc, we must first flush
7996 all register windows to the stack. */
7997 #ifdef SETUP_FRAME_ADDRESSES
7998 SETUP_FRAME_ADDRESSES ();
7999 #endif
8001 /* On the sparc, the return address is not in the frame, it is in a
8002 register. There is no way to access it off of the current frame
8003 pointer, but it can be accessed off the previous frame pointer by
8004 reading the value from the register window save area. */
8005 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
8006 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
8007 count--;
8008 #endif
8010 /* Scan back COUNT frames to the specified frame. */
8011 for (i = 0; i < count; i++)
8013 /* Assume the dynamic chain pointer is in the word that the
8014 frame address points to, unless otherwise specified. */
8015 #ifdef DYNAMIC_CHAIN_ADDRESS
8016 tem = DYNAMIC_CHAIN_ADDRESS (tem);
8017 #endif
8018 tem = memory_address (Pmode, tem);
8019 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
8022 /* For __builtin_frame_address, return what we've got. */
8023 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
8024 return tem;
8026 /* For __builtin_return_address, Get the return address from that
8027 frame. */
8028 #ifdef RETURN_ADDR_RTX
8029 tem = RETURN_ADDR_RTX (count, tem);
8030 #else
8031 tem = memory_address (Pmode,
8032 plus_constant (tem, GET_MODE_SIZE (Pmode)));
8033 tem = gen_rtx (MEM, Pmode, tem);
8034 #endif
8035 return tem;
8038 /* Expand an expression EXP that calls a built-in function,
8039 with result going to TARGET if that's convenient
8040 (and in mode MODE if that's convenient).
8041 SUBTARGET may be used as the target for computing one of EXP's operands.
8042 IGNORE is nonzero if the value is to be ignored. */
8044 #define CALLED_AS_BUILT_IN(NODE) \
8045 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
8047 static rtx
8048 expand_builtin (exp, target, subtarget, mode, ignore)
8049 tree exp;
8050 rtx target;
8051 rtx subtarget;
8052 enum machine_mode mode;
8053 int ignore;
8055 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
8056 tree arglist = TREE_OPERAND (exp, 1);
8057 rtx op0;
8058 rtx lab1, insns;
8059 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
8060 optab builtin_optab;
8062 switch (DECL_FUNCTION_CODE (fndecl))
8064 case BUILT_IN_ABS:
8065 case BUILT_IN_LABS:
8066 case BUILT_IN_FABS:
8067 /* build_function_call changes these into ABS_EXPR. */
8068 abort ();
8070 case BUILT_IN_SIN:
8071 case BUILT_IN_COS:
8072 /* Treat these like sqrt, but only if the user asks for them. */
8073 if (! flag_fast_math)
8074 break;
8075 case BUILT_IN_FSQRT:
8076 /* If not optimizing, call the library function. */
8077 if (! optimize)
8078 break;
8080 if (arglist == 0
8081 /* Arg could be wrong type if user redeclared this fcn wrong. */
8082 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
8083 break;
8085 /* Stabilize and compute the argument. */
8086 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
8087 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
8089 exp = copy_node (exp);
8090 arglist = copy_node (arglist);
8091 TREE_OPERAND (exp, 1) = arglist;
8092 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
8094 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
8096 /* Make a suitable register to place result in. */
8097 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8099 emit_queue ();
8100 start_sequence ();
8102 switch (DECL_FUNCTION_CODE (fndecl))
8104 case BUILT_IN_SIN:
8105 builtin_optab = sin_optab; break;
8106 case BUILT_IN_COS:
8107 builtin_optab = cos_optab; break;
8108 case BUILT_IN_FSQRT:
8109 builtin_optab = sqrt_optab; break;
8110 default:
8111 abort ();
8114 /* Compute into TARGET.
8115 Set TARGET to wherever the result comes back. */
8116 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8117 builtin_optab, op0, target, 0);
8119 /* If we were unable to expand via the builtin, stop the
8120 sequence (without outputting the insns) and break, causing
8121 a call the the library function. */
8122 if (target == 0)
8124 end_sequence ();
8125 break;
8128 /* Check the results by default. But if flag_fast_math is turned on,
8129 then assume sqrt will always be called with valid arguments. */
8131 if (! flag_fast_math)
8133 /* Don't define the builtin FP instructions
8134 if your machine is not IEEE. */
8135 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
8136 abort ();
8138 lab1 = gen_label_rtx ();
8140 /* Test the result; if it is NaN, set errno=EDOM because
8141 the argument was not in the domain. */
8142 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
8143 emit_jump_insn (gen_beq (lab1));
8145 #ifdef TARGET_EDOM
8147 #ifdef GEN_ERRNO_RTX
8148 rtx errno_rtx = GEN_ERRNO_RTX;
8149 #else
8150 rtx errno_rtx
8151 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "errno"));
8152 #endif
8154 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
8156 #else
8157 /* We can't set errno=EDOM directly; let the library call do it.
8158 Pop the arguments right away in case the call gets deleted. */
8159 NO_DEFER_POP;
8160 expand_call (exp, target, 0);
8161 OK_DEFER_POP;
8162 #endif
8164 emit_label (lab1);
8167 /* Output the entire sequence. */
8168 insns = get_insns ();
8169 end_sequence ();
8170 emit_insns (insns);
8172 return target;
8174 /* __builtin_apply_args returns block of memory allocated on
8175 the stack into which is stored the arg pointer, structure
8176 value address, static chain, and all the registers that might
8177 possibly be used in performing a function call. The code is
8178 moved to the start of the function so the incoming values are
8179 saved. */
8180 case BUILT_IN_APPLY_ARGS:
8181 /* Don't do __builtin_apply_args more than once in a function.
8182 Save the result of the first call and reuse it. */
8183 if (apply_args_value != 0)
8184 return apply_args_value;
8186 /* When this function is called, it means that registers must be
8187 saved on entry to this function. So we migrate the
8188 call to the first insn of this function. */
8189 rtx temp;
8190 rtx seq;
8192 start_sequence ();
8193 temp = expand_builtin_apply_args ();
8194 seq = get_insns ();
8195 end_sequence ();
8197 apply_args_value = temp;
8199 /* Put the sequence after the NOTE that starts the function.
8200 If this is inside a SEQUENCE, make the outer-level insn
8201 chain current, so the code is placed at the start of the
8202 function. */
8203 push_topmost_sequence ();
8204 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8205 pop_topmost_sequence ();
8206 return temp;
8209 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
8210 FUNCTION with a copy of the parameters described by
8211 ARGUMENTS, and ARGSIZE. It returns a block of memory
8212 allocated on the stack into which is stored all the registers
8213 that might possibly be used for returning the result of a
8214 function. ARGUMENTS is the value returned by
8215 __builtin_apply_args. ARGSIZE is the number of bytes of
8216 arguments that must be copied. ??? How should this value be
8217 computed? We'll also need a safe worst case value for varargs
8218 functions. */
8219 case BUILT_IN_APPLY:
8220 if (arglist == 0
8221 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8222 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8223 || TREE_CHAIN (arglist) == 0
8224 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8225 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8226 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8227 return const0_rtx;
8228 else
8230 int i;
8231 tree t;
8232 rtx ops[3];
8234 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
8235 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
8237 return expand_builtin_apply (ops[0], ops[1], ops[2]);
8240 /* __builtin_return (RESULT) causes the function to return the
8241 value described by RESULT. RESULT is address of the block of
8242 memory returned by __builtin_apply. */
8243 case BUILT_IN_RETURN:
8244 if (arglist
8245 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8246 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
8247 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
8248 NULL_RTX, VOIDmode, 0));
8249 return const0_rtx;
8251 case BUILT_IN_SAVEREGS:
8252 /* Don't do __builtin_saveregs more than once in a function.
8253 Save the result of the first call and reuse it. */
8254 if (saveregs_value != 0)
8255 return saveregs_value;
8257 /* When this function is called, it means that registers must be
8258 saved on entry to this function. So we migrate the
8259 call to the first insn of this function. */
8260 rtx temp;
8261 rtx seq;
8263 /* Now really call the function. `expand_call' does not call
8264 expand_builtin, so there is no danger of infinite recursion here. */
8265 start_sequence ();
8267 #ifdef EXPAND_BUILTIN_SAVEREGS
8268 /* Do whatever the machine needs done in this case. */
8269 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
8270 #else
8271 /* The register where the function returns its value
8272 is likely to have something else in it, such as an argument.
8273 So preserve that register around the call. */
8275 if (value_mode != VOIDmode)
8277 rtx valreg = hard_libcall_value (value_mode);
8278 rtx saved_valreg = gen_reg_rtx (value_mode);
8280 emit_move_insn (saved_valreg, valreg);
8281 temp = expand_call (exp, target, ignore);
8282 emit_move_insn (valreg, saved_valreg);
8284 else
8285 /* Generate the call, putting the value in a pseudo. */
8286 temp = expand_call (exp, target, ignore);
8287 #endif
8289 seq = get_insns ();
8290 end_sequence ();
8292 saveregs_value = temp;
8294 /* Put the sequence after the NOTE that starts the function.
8295 If this is inside a SEQUENCE, make the outer-level insn
8296 chain current, so the code is placed at the start of the
8297 function. */
8298 push_topmost_sequence ();
8299 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8300 pop_topmost_sequence ();
8301 return temp;
8304 /* __builtin_args_info (N) returns word N of the arg space info
8305 for the current function. The number and meanings of words
8306 is controlled by the definition of CUMULATIVE_ARGS. */
8307 case BUILT_IN_ARGS_INFO:
8309 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
8310 int i;
8311 int *word_ptr = (int *) &current_function_args_info;
8312 tree type, elts, result;
8314 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
8315 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
8316 __FILE__, __LINE__);
8318 if (arglist != 0)
8320 tree arg = TREE_VALUE (arglist);
8321 if (TREE_CODE (arg) != INTEGER_CST)
8322 error ("argument of `__builtin_args_info' must be constant");
8323 else
8325 int wordnum = TREE_INT_CST_LOW (arg);
8327 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
8328 error ("argument of `__builtin_args_info' out of range");
8329 else
8330 return GEN_INT (word_ptr[wordnum]);
8333 else
8334 error ("missing argument in `__builtin_args_info'");
8336 return const0_rtx;
8338 #if 0
8339 for (i = 0; i < nwords; i++)
8340 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
8342 type = build_array_type (integer_type_node,
8343 build_index_type (build_int_2 (nwords, 0)));
8344 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
8345 TREE_CONSTANT (result) = 1;
8346 TREE_STATIC (result) = 1;
8347 result = build (INDIRECT_REF, build_pointer_type (type), result);
8348 TREE_CONSTANT (result) = 1;
8349 return expand_expr (result, NULL_RTX, VOIDmode, 0);
8350 #endif
8353 /* Return the address of the first anonymous stack arg. */
8354 case BUILT_IN_NEXT_ARG:
8356 tree fntype = TREE_TYPE (current_function_decl);
8358 if ((TYPE_ARG_TYPES (fntype) == 0
8359 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
8360 == void_type_node))
8361 && ! current_function_varargs)
8363 error ("`va_start' used in function with fixed args");
8364 return const0_rtx;
8367 if (arglist)
8369 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
8370 tree arg = TREE_VALUE (arglist);
8372 /* Strip off all nops for the sake of the comparison. This
8373 is not quite the same as STRIP_NOPS. It does more.
8374 We must also strip off INDIRECT_EXPR for C++ reference
8375 parameters. */
8376 while (TREE_CODE (arg) == NOP_EXPR
8377 || TREE_CODE (arg) == CONVERT_EXPR
8378 || TREE_CODE (arg) == NON_LVALUE_EXPR
8379 || TREE_CODE (arg) == INDIRECT_REF)
8380 arg = TREE_OPERAND (arg, 0);
8381 if (arg != last_parm)
8382 warning ("second parameter of `va_start' not last named argument");
8384 else if (! current_function_varargs)
8385 /* Evidently an out of date version of <stdarg.h>; can't validate
8386 va_start's second argument, but can still work as intended. */
8387 warning ("`__builtin_next_arg' called without an argument");
8390 return expand_binop (Pmode, add_optab,
8391 current_function_internal_arg_pointer,
8392 current_function_arg_offset_rtx,
8393 NULL_RTX, 0, OPTAB_LIB_WIDEN);
8395 case BUILT_IN_CLASSIFY_TYPE:
8396 if (arglist != 0)
8398 tree type = TREE_TYPE (TREE_VALUE (arglist));
8399 enum tree_code code = TREE_CODE (type);
8400 if (code == VOID_TYPE)
8401 return GEN_INT (void_type_class);
8402 if (code == INTEGER_TYPE)
8403 return GEN_INT (integer_type_class);
8404 if (code == CHAR_TYPE)
8405 return GEN_INT (char_type_class);
8406 if (code == ENUMERAL_TYPE)
8407 return GEN_INT (enumeral_type_class);
8408 if (code == BOOLEAN_TYPE)
8409 return GEN_INT (boolean_type_class);
8410 if (code == POINTER_TYPE)
8411 return GEN_INT (pointer_type_class);
8412 if (code == REFERENCE_TYPE)
8413 return GEN_INT (reference_type_class);
8414 if (code == OFFSET_TYPE)
8415 return GEN_INT (offset_type_class);
8416 if (code == REAL_TYPE)
8417 return GEN_INT (real_type_class);
8418 if (code == COMPLEX_TYPE)
8419 return GEN_INT (complex_type_class);
8420 if (code == FUNCTION_TYPE)
8421 return GEN_INT (function_type_class);
8422 if (code == METHOD_TYPE)
8423 return GEN_INT (method_type_class);
8424 if (code == RECORD_TYPE)
8425 return GEN_INT (record_type_class);
8426 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
8427 return GEN_INT (union_type_class);
8428 if (code == ARRAY_TYPE)
8430 if (TYPE_STRING_FLAG (type))
8431 return GEN_INT (string_type_class);
8432 else
8433 return GEN_INT (array_type_class);
8435 if (code == SET_TYPE)
8436 return GEN_INT (set_type_class);
8437 if (code == FILE_TYPE)
8438 return GEN_INT (file_type_class);
8439 if (code == LANG_TYPE)
8440 return GEN_INT (lang_type_class);
8442 return GEN_INT (no_type_class);
8444 case BUILT_IN_CONSTANT_P:
8445 if (arglist == 0)
8446 return const0_rtx;
8447 else
8449 tree arg = TREE_VALUE (arglist);
8451 STRIP_NOPS (arg);
8452 return (TREE_CODE_CLASS (TREE_CODE (arg)) == 'c'
8453 || (TREE_CODE (arg) == ADDR_EXPR
8454 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8455 ? const1_rtx : const0_rtx);
8458 case BUILT_IN_FRAME_ADDRESS:
8459 /* The argument must be a nonnegative integer constant.
8460 It counts the number of frames to scan up the stack.
8461 The value is the address of that frame. */
8462 case BUILT_IN_RETURN_ADDRESS:
8463 /* The argument must be a nonnegative integer constant.
8464 It counts the number of frames to scan up the stack.
8465 The value is the return address saved in that frame. */
8466 if (arglist == 0)
8467 /* Warning about missing arg was already issued. */
8468 return const0_rtx;
8469 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
8471 error ("invalid arg to `__builtin_return_address'");
8472 return const0_rtx;
8474 else if (tree_int_cst_sgn (TREE_VALUE (arglist)) < 0)
8476 error ("invalid arg to `__builtin_return_address'");
8477 return const0_rtx;
8479 else
8481 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
8482 TREE_INT_CST_LOW (TREE_VALUE (arglist)),
8483 hard_frame_pointer_rtx);
8485 /* For __builtin_frame_address, return what we've got. */
8486 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
8487 return tem;
8489 if (GET_CODE (tem) != REG)
8490 tem = copy_to_reg (tem);
8491 return tem;
8494 case BUILT_IN_ALLOCA:
8495 if (arglist == 0
8496 /* Arg could be non-integer if user redeclared this fcn wrong. */
8497 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
8498 break;
8500 /* Compute the argument. */
8501 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
8503 /* Allocate the desired space. */
8504 return allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
8506 case BUILT_IN_FFS:
8507 /* If not optimizing, call the library function. */
8508 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8509 break;
8511 if (arglist == 0
8512 /* Arg could be non-integer if user redeclared this fcn wrong. */
8513 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
8514 break;
8516 /* Compute the argument. */
8517 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
8518 /* Compute ffs, into TARGET if possible.
8519 Set TARGET to wherever the result comes back. */
8520 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8521 ffs_optab, op0, target, 1);
8522 if (target == 0)
8523 abort ();
8524 return target;
8526 case BUILT_IN_STRLEN:
8527 /* If not optimizing, call the library function. */
8528 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8529 break;
8531 if (arglist == 0
8532 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8533 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8534 break;
8535 else
8537 tree src = TREE_VALUE (arglist);
8538 tree len = c_strlen (src);
8540 int align
8541 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8543 rtx result, src_rtx, char_rtx;
8544 enum machine_mode insn_mode = value_mode, char_mode;
8545 enum insn_code icode;
8547 /* If the length is known, just return it. */
8548 if (len != 0)
8549 return expand_expr (len, target, mode, 0);
8551 /* If SRC is not a pointer type, don't do this operation inline. */
8552 if (align == 0)
8553 break;
8555 /* Call a function if we can't compute strlen in the right mode. */
8557 while (insn_mode != VOIDmode)
8559 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
8560 if (icode != CODE_FOR_nothing)
8561 break;
8563 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
8565 if (insn_mode == VOIDmode)
8566 break;
8568 /* Make a place to write the result of the instruction. */
8569 result = target;
8570 if (! (result != 0
8571 && GET_CODE (result) == REG
8572 && GET_MODE (result) == insn_mode
8573 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
8574 result = gen_reg_rtx (insn_mode);
8576 /* Make sure the operands are acceptable to the predicates. */
8578 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
8579 result = gen_reg_rtx (insn_mode);
8581 src_rtx = memory_address (BLKmode,
8582 expand_expr (src, NULL_RTX, ptr_mode,
8583 EXPAND_NORMAL));
8584 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
8585 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
8587 char_rtx = const0_rtx;
8588 char_mode = insn_operand_mode[(int)icode][2];
8589 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
8590 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
8592 emit_insn (GEN_FCN (icode) (result,
8593 gen_rtx (MEM, BLKmode, src_rtx),
8594 char_rtx, GEN_INT (align)));
8596 /* Return the value in the proper mode for this function. */
8597 if (GET_MODE (result) == value_mode)
8598 return result;
8599 else if (target != 0)
8601 convert_move (target, result, 0);
8602 return target;
8604 else
8605 return convert_to_mode (value_mode, result, 0);
8608 case BUILT_IN_STRCPY:
8609 /* If not optimizing, call the library function. */
8610 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8611 break;
8613 if (arglist == 0
8614 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8615 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8616 || TREE_CHAIN (arglist) == 0
8617 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
8618 break;
8619 else
8621 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
8623 if (len == 0)
8624 break;
8626 len = size_binop (PLUS_EXPR, len, integer_one_node);
8628 chainon (arglist, build_tree_list (NULL_TREE, len));
8631 /* Drops in. */
8632 case BUILT_IN_MEMCPY:
8633 /* If not optimizing, call the library function. */
8634 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8635 break;
8637 if (arglist == 0
8638 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8639 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8640 || TREE_CHAIN (arglist) == 0
8641 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8642 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8643 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8644 break;
8645 else
8647 tree dest = TREE_VALUE (arglist);
8648 tree src = TREE_VALUE (TREE_CHAIN (arglist));
8649 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8650 tree type;
8652 int src_align
8653 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8654 int dest_align
8655 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8656 rtx dest_rtx, dest_mem, src_mem;
8658 /* If either SRC or DEST is not a pointer type, don't do
8659 this operation in-line. */
8660 if (src_align == 0 || dest_align == 0)
8662 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
8663 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
8664 break;
8667 dest_rtx = expand_expr (dest, NULL_RTX, ptr_mode, EXPAND_SUM);
8668 dest_mem = gen_rtx (MEM, BLKmode,
8669 memory_address (BLKmode, dest_rtx));
8670 /* There could be a void* cast on top of the object. */
8671 while (TREE_CODE (dest) == NOP_EXPR)
8672 dest = TREE_OPERAND (dest, 0);
8673 type = TREE_TYPE (TREE_TYPE (dest));
8674 MEM_IN_STRUCT_P (dest_mem) = AGGREGATE_TYPE_P (type);
8675 src_mem = gen_rtx (MEM, BLKmode,
8676 memory_address (BLKmode,
8677 expand_expr (src, NULL_RTX,
8678 ptr_mode,
8679 EXPAND_SUM)));
8680 /* There could be a void* cast on top of the object. */
8681 while (TREE_CODE (src) == NOP_EXPR)
8682 src = TREE_OPERAND (src, 0);
8683 type = TREE_TYPE (TREE_TYPE (src));
8684 MEM_IN_STRUCT_P (src_mem) = AGGREGATE_TYPE_P (type);
8686 /* Copy word part most expediently. */
8687 emit_block_move (dest_mem, src_mem,
8688 expand_expr (len, NULL_RTX, VOIDmode, 0),
8689 MIN (src_align, dest_align));
8690 return force_operand (dest_rtx, NULL_RTX);
8693 case BUILT_IN_MEMSET:
8694 /* If not optimizing, call the library function. */
8695 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8696 break;
8698 if (arglist == 0
8699 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8700 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8701 || TREE_CHAIN (arglist) == 0
8702 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
8703 != INTEGER_TYPE)
8704 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8705 || (INTEGER_CST
8706 != (TREE_CODE (TREE_TYPE
8707 (TREE_VALUE
8708 (TREE_CHAIN (TREE_CHAIN (arglist))))))))
8709 break;
8710 else
8712 tree dest = TREE_VALUE (arglist);
8713 tree val = TREE_VALUE (TREE_CHAIN (arglist));
8714 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8715 tree type;
8717 int dest_align
8718 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8719 rtx dest_rtx, dest_mem;
8721 /* If DEST is not a pointer type, don't do this
8722 operation in-line. */
8723 if (dest_align == 0)
8724 break;
8726 /* If VAL is not 0, don't do this operation in-line. */
8727 if (expand_expr (val, NULL_RTX, VOIDmode, 0) != const0_rtx)
8728 break;
8730 dest_rtx = expand_expr (dest, NULL_RTX, ptr_mode, EXPAND_SUM);
8731 dest_mem = gen_rtx (MEM, BLKmode,
8732 memory_address (BLKmode, dest_rtx));
8733 /* There could be a void* cast on top of the object. */
8734 while (TREE_CODE (dest) == NOP_EXPR)
8735 dest = TREE_OPERAND (dest, 0);
8736 type = TREE_TYPE (TREE_TYPE (dest));
8737 MEM_IN_STRUCT_P (dest_mem) = AGGREGATE_TYPE_P (type);
8739 clear_storage (dest_mem, expand_expr (len, NULL_RTX, VOIDmode, 0),
8740 dest_align);
8742 return force_operand (dest_rtx, NULL_RTX);
8745 /* These comparison functions need an instruction that returns an actual
8746 index. An ordinary compare that just sets the condition codes
8747 is not enough. */
8748 #ifdef HAVE_cmpstrsi
8749 case BUILT_IN_STRCMP:
8750 /* If not optimizing, call the library function. */
8751 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8752 break;
8754 if (arglist == 0
8755 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8756 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8757 || TREE_CHAIN (arglist) == 0
8758 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
8759 break;
8760 else if (!HAVE_cmpstrsi)
8761 break;
8763 tree arg1 = TREE_VALUE (arglist);
8764 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
8765 tree offset;
8766 tree len, len2;
8768 len = c_strlen (arg1);
8769 if (len)
8770 len = size_binop (PLUS_EXPR, integer_one_node, len);
8771 len2 = c_strlen (arg2);
8772 if (len2)
8773 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
8775 /* If we don't have a constant length for the first, use the length
8776 of the second, if we know it. We don't require a constant for
8777 this case; some cost analysis could be done if both are available
8778 but neither is constant. For now, assume they're equally cheap.
8780 If both strings have constant lengths, use the smaller. This
8781 could arise if optimization results in strcpy being called with
8782 two fixed strings, or if the code was machine-generated. We should
8783 add some code to the `memcmp' handler below to deal with such
8784 situations, someday. */
8785 if (!len || TREE_CODE (len) != INTEGER_CST)
8787 if (len2)
8788 len = len2;
8789 else if (len == 0)
8790 break;
8792 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
8794 if (tree_int_cst_lt (len2, len))
8795 len = len2;
8798 chainon (arglist, build_tree_list (NULL_TREE, len));
8801 /* Drops in. */
8802 case BUILT_IN_MEMCMP:
8803 /* If not optimizing, call the library function. */
8804 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8805 break;
8807 if (arglist == 0
8808 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8809 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8810 || TREE_CHAIN (arglist) == 0
8811 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8812 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8813 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8814 break;
8815 else if (!HAVE_cmpstrsi)
8816 break;
8818 tree arg1 = TREE_VALUE (arglist);
8819 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
8820 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8821 rtx result;
8823 int arg1_align
8824 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8825 int arg2_align
8826 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8827 enum machine_mode insn_mode
8828 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
8830 /* If we don't have POINTER_TYPE, call the function. */
8831 if (arg1_align == 0 || arg2_align == 0)
8833 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
8834 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
8835 break;
8838 /* Make a place to write the result of the instruction. */
8839 result = target;
8840 if (! (result != 0
8841 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
8842 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
8843 result = gen_reg_rtx (insn_mode);
8845 emit_insn (gen_cmpstrsi (result,
8846 gen_rtx (MEM, BLKmode,
8847 expand_expr (arg1, NULL_RTX,
8848 ptr_mode,
8849 EXPAND_NORMAL)),
8850 gen_rtx (MEM, BLKmode,
8851 expand_expr (arg2, NULL_RTX,
8852 ptr_mode,
8853 EXPAND_NORMAL)),
8854 expand_expr (len, NULL_RTX, VOIDmode, 0),
8855 GEN_INT (MIN (arg1_align, arg2_align))));
8857 /* Return the value in the proper mode for this function. */
8858 mode = TYPE_MODE (TREE_TYPE (exp));
8859 if (GET_MODE (result) == mode)
8860 return result;
8861 else if (target != 0)
8863 convert_move (target, result, 0);
8864 return target;
8866 else
8867 return convert_to_mode (mode, result, 0);
8869 #else
8870 case BUILT_IN_STRCMP:
8871 case BUILT_IN_MEMCMP:
8872 break;
8873 #endif
8875 /* __builtin_setjmp is passed a pointer to an array of five words
8876 (not all will be used on all machines). It operates similarly to
8877 the C library function of the same name, but is more efficient.
8878 Much of the code below (and for longjmp) is copied from the handling
8879 of non-local gotos.
8881 NOTE: This is intended for use by GNAT and will only work in
8882 the method used by it. This code will likely NOT survive to
8883 the GCC 2.8.0 release. */
8884 case BUILT_IN_SETJMP:
8885 if (arglist == 0
8886 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8887 break;
8890 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
8891 VOIDmode, 0);
8892 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
8893 enum machine_mode sa_mode = Pmode;
8894 rtx stack_save;
8895 int old_inhibit_defer_pop = inhibit_defer_pop;
8896 int return_pops
8897 = RETURN_POPS_ARGS (get_identifier ("__dummy"),
8898 build_function_type (void_type_node, NULL_TREE),
8900 rtx next_arg_reg;
8901 CUMULATIVE_ARGS args_so_far;
8902 int i;
8904 #ifdef POINTERS_EXTEND_UNSIGNED
8905 buf_addr = convert_memory_address (Pmode, buf_addr);
8906 #endif
8908 buf_addr = force_reg (Pmode, buf_addr);
8910 if (target == 0 || GET_CODE (target) != REG
8911 || REGNO (target) < FIRST_PSEUDO_REGISTER)
8912 target = gen_reg_rtx (value_mode);
8914 emit_queue ();
8916 CONST_CALL_P (emit_note (NULL_PTR, NOTE_INSN_SETJMP)) = 1;
8917 current_function_calls_setjmp = 1;
8919 /* We store the frame pointer and the address of lab1 in the buffer
8920 and use the rest of it for the stack save area, which is
8921 machine-dependent. */
8922 emit_move_insn (gen_rtx (MEM, Pmode, buf_addr),
8923 virtual_stack_vars_rtx);
8924 emit_move_insn
8925 (validize_mem (gen_rtx (MEM, Pmode,
8926 plus_constant (buf_addr,
8927 GET_MODE_SIZE (Pmode)))),
8928 gen_rtx (LABEL_REF, Pmode, lab1));
8930 #ifdef HAVE_save_stack_nonlocal
8931 if (HAVE_save_stack_nonlocal)
8932 sa_mode = insn_operand_mode[(int) CODE_FOR_save_stack_nonlocal][0];
8933 #endif
8935 stack_save = gen_rtx (MEM, sa_mode,
8936 plus_constant (buf_addr,
8937 2 * GET_MODE_SIZE (Pmode)));
8938 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
8940 #ifdef HAVE_setjmp
8941 if (HAVE_setjmp)
8942 emit_insn (gen_setjmp ());
8943 #endif
8945 /* Set TARGET to zero and branch around the other case. */
8946 emit_move_insn (target, const0_rtx);
8947 emit_jump_insn (gen_jump (lab2));
8948 emit_barrier ();
8949 emit_label (lab1);
8951 /* Note that setjmp clobbers FP when we get here, so we have to
8952 make sure it's marked as used by this function. */
8953 emit_insn (gen_rtx (USE, VOIDmode, hard_frame_pointer_rtx));
8955 /* Mark the static chain as clobbered here so life information
8956 doesn't get messed up for it. */
8957 emit_insn (gen_rtx (CLOBBER, VOIDmode, static_chain_rtx));
8959 /* Now put in the code to restore the frame pointer, and argument
8960 pointer, if needed. The code below is from expand_end_bindings
8961 in stmt.c; see detailed documentation there. */
8962 #ifdef HAVE_nonlocal_goto
8963 if (! HAVE_nonlocal_goto)
8964 #endif
8965 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
8967 current_function_has_nonlocal_goto = 1;
8969 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
8970 if (fixed_regs[ARG_POINTER_REGNUM])
8972 #ifdef ELIMINABLE_REGS
8973 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
8975 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
8976 if (elim_regs[i].from == ARG_POINTER_REGNUM
8977 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
8978 break;
8980 if (i == sizeof elim_regs / sizeof elim_regs [0])
8981 #endif
8983 /* Now restore our arg pointer from the address at which it
8984 was saved in our stack frame.
8985 If there hasn't be space allocated for it yet, make
8986 some now. */
8987 if (arg_pointer_save_area == 0)
8988 arg_pointer_save_area
8989 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
8990 emit_move_insn (virtual_incoming_args_rtx,
8991 copy_to_reg (arg_pointer_save_area));
8994 #endif
8996 #ifdef HAVE_nonlocal_goto_receiver
8997 if (HAVE_nonlocal_goto_receiver)
8998 emit_insn (gen_nonlocal_goto_receiver ());
8999 #endif
9000 /* The static chain pointer contains the address of dummy function.
9001 We need to call it here to handle some PIC cases of restoring
9002 a global pointer. Then return 1. */
9003 op0 = copy_to_mode_reg (Pmode, static_chain_rtx);
9005 /* We can't actually call emit_library_call here, so do everything
9006 it does, which isn't much for a libfunc with no args. */
9007 op0 = memory_address (FUNCTION_MODE, op0);
9009 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE,
9010 gen_rtx (SYMBOL_REF, Pmode, "__dummy"), 1);
9011 next_arg_reg = FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1);
9013 #ifndef ACCUMULATE_OUTGOING_ARGS
9014 #ifdef HAVE_call_pop
9015 if (HAVE_call_pop)
9016 emit_call_insn (gen_call_pop (gen_rtx (MEM, FUNCTION_MODE, op0),
9017 const0_rtx, next_arg_reg,
9018 GEN_INT (return_pops)));
9019 else
9020 #endif
9021 #endif
9023 #ifdef HAVE_call
9024 if (HAVE_call)
9025 emit_call_insn (gen_call (gen_rtx (MEM, FUNCTION_MODE, op0),
9026 const0_rtx, next_arg_reg, const0_rtx));
9027 else
9028 #endif
9029 abort ();
9031 emit_move_insn (target, const1_rtx);
9032 emit_label (lab2);
9033 return target;
9036 /* __builtin_longjmp is passed a pointer to an array of five words
9037 and a value, which is a dummy. It's similar to the C library longjmp
9038 function but works with __builtin_setjmp above. */
9039 case BUILT_IN_LONGJMP:
9040 if (arglist == 0 || TREE_CHAIN (arglist) == 0
9041 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
9042 break;
9045 tree dummy_id = get_identifier ("__dummy");
9046 tree dummy_type = build_function_type (void_type_node, NULL_TREE);
9047 tree dummy_decl = build_decl (FUNCTION_DECL, dummy_id, dummy_type);
9048 #ifdef POINTERS_EXTEND_UNSIGNED
9049 rtx buf_addr
9050 = force_reg (Pmode,
9051 convert_memory_address
9052 (Pmode,
9053 expand_expr (TREE_VALUE (arglist),
9054 NULL_RTX, VOIDmode, 0)));
9055 #else
9056 rtx buf_addr
9057 = force_reg (Pmode, expand_expr (TREE_VALUE (arglist),
9058 NULL_RTX,
9059 VOIDmode, 0));
9060 #endif
9061 rtx fp = gen_rtx (MEM, Pmode, buf_addr);
9062 rtx lab = gen_rtx (MEM, Pmode,
9063 plus_constant (buf_addr, GET_MODE_SIZE (Pmode)));
9064 enum machine_mode sa_mode
9065 #ifdef HAVE_save_stack_nonlocal
9066 = (HAVE_save_stack_nonlocal
9067 ? insn_operand_mode[(int) CODE_FOR_save_stack_nonlocal][0]
9068 : Pmode);
9069 #else
9070 = Pmode;
9071 #endif
9072 rtx stack = gen_rtx (MEM, sa_mode,
9073 plus_constant (buf_addr,
9074 2 * GET_MODE_SIZE (Pmode)));
9076 DECL_EXTERNAL (dummy_decl) = 1;
9077 TREE_PUBLIC (dummy_decl) = 1;
9078 make_decl_rtl (dummy_decl, NULL_PTR, 1);
9080 /* Expand the second expression just for side-effects. */
9081 expand_expr (TREE_VALUE (TREE_CHAIN (arglist)),
9082 const0_rtx, VOIDmode, 0);
9084 assemble_external (dummy_decl);
9086 /* Pick up FP, label, and SP from the block and jump. This code is
9087 from expand_goto in stmt.c; see there for detailed comments. */
9088 #if HAVE_nonlocal_goto
9089 if (HAVE_nonlocal_goto)
9090 emit_insn (gen_nonlocal_goto (fp, lab, stack,
9091 XEXP (DECL_RTL (dummy_decl), 0)));
9092 else
9093 #endif
9095 lab = copy_to_reg (lab);
9096 emit_move_insn (hard_frame_pointer_rtx, fp);
9097 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
9099 /* Put in the static chain register the address of the dummy
9100 function. */
9101 emit_move_insn (static_chain_rtx, XEXP (DECL_RTL (dummy_decl), 0));
9102 emit_insn (gen_rtx (USE, VOIDmode, hard_frame_pointer_rtx));
9103 emit_insn (gen_rtx (USE, VOIDmode, stack_pointer_rtx));
9104 emit_insn (gen_rtx (USE, VOIDmode, static_chain_rtx));
9105 emit_indirect_jump (lab);
9108 return const0_rtx;
9111 default: /* just do library call, if unknown builtin */
9112 error ("built-in function `%s' not currently supported",
9113 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
9116 /* The switch statement above can drop through to cause the function
9117 to be called normally. */
9119 return expand_call (exp, target, ignore);
9122 /* Built-in functions to perform an untyped call and return. */
9124 /* For each register that may be used for calling a function, this
9125 gives a mode used to copy the register's value. VOIDmode indicates
9126 the register is not used for calling a function. If the machine
9127 has register windows, this gives only the outbound registers.
9128 INCOMING_REGNO gives the corresponding inbound register. */
9129 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
9131 /* For each register that may be used for returning values, this gives
9132 a mode used to copy the register's value. VOIDmode indicates the
9133 register is not used for returning values. If the machine has
9134 register windows, this gives only the outbound registers.
9135 INCOMING_REGNO gives the corresponding inbound register. */
9136 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
9138 /* For each register that may be used for calling a function, this
9139 gives the offset of that register into the block returned by
9140 __builtin_apply_args. 0 indicates that the register is not
9141 used for calling a function. */
9142 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
9144 /* Return the offset of register REGNO into the block returned by
9145 __builtin_apply_args. This is not declared static, since it is
9146 needed in objc-act.c. */
9148 int
9149 apply_args_register_offset (regno)
9150 int regno;
9152 apply_args_size ();
9154 /* Arguments are always put in outgoing registers (in the argument
9155 block) if such make sense. */
9156 #ifdef OUTGOING_REGNO
9157 regno = OUTGOING_REGNO(regno);
9158 #endif
9159 return apply_args_reg_offset[regno];
9162 /* Return the size required for the block returned by __builtin_apply_args,
9163 and initialize apply_args_mode. */
9165 static int
9166 apply_args_size ()
9168 static int size = -1;
9169 int align, regno;
9170 enum machine_mode mode;
9172 /* The values computed by this function never change. */
9173 if (size < 0)
9175 /* The first value is the incoming arg-pointer. */
9176 size = GET_MODE_SIZE (Pmode);
9178 /* The second value is the structure value address unless this is
9179 passed as an "invisible" first argument. */
9180 if (struct_value_rtx)
9181 size += GET_MODE_SIZE (Pmode);
9183 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9184 if (FUNCTION_ARG_REGNO_P (regno))
9186 /* Search for the proper mode for copying this register's
9187 value. I'm not sure this is right, but it works so far. */
9188 enum machine_mode best_mode = VOIDmode;
9190 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9191 mode != VOIDmode;
9192 mode = GET_MODE_WIDER_MODE (mode))
9193 if (HARD_REGNO_MODE_OK (regno, mode)
9194 && HARD_REGNO_NREGS (regno, mode) == 1)
9195 best_mode = mode;
9197 if (best_mode == VOIDmode)
9198 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9199 mode != VOIDmode;
9200 mode = GET_MODE_WIDER_MODE (mode))
9201 if (HARD_REGNO_MODE_OK (regno, mode)
9202 && (mov_optab->handlers[(int) mode].insn_code
9203 != CODE_FOR_nothing))
9204 best_mode = mode;
9206 mode = best_mode;
9207 if (mode == VOIDmode)
9208 abort ();
9210 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9211 if (size % align != 0)
9212 size = CEIL (size, align) * align;
9213 apply_args_reg_offset[regno] = size;
9214 size += GET_MODE_SIZE (mode);
9215 apply_args_mode[regno] = mode;
9217 else
9219 apply_args_mode[regno] = VOIDmode;
9220 apply_args_reg_offset[regno] = 0;
9223 return size;
9226 /* Return the size required for the block returned by __builtin_apply,
9227 and initialize apply_result_mode. */
9229 static int
9230 apply_result_size ()
9232 static int size = -1;
9233 int align, regno;
9234 enum machine_mode mode;
9236 /* The values computed by this function never change. */
9237 if (size < 0)
9239 size = 0;
9241 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9242 if (FUNCTION_VALUE_REGNO_P (regno))
9244 /* Search for the proper mode for copying this register's
9245 value. I'm not sure this is right, but it works so far. */
9246 enum machine_mode best_mode = VOIDmode;
9248 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9249 mode != TImode;
9250 mode = GET_MODE_WIDER_MODE (mode))
9251 if (HARD_REGNO_MODE_OK (regno, mode))
9252 best_mode = mode;
9254 if (best_mode == VOIDmode)
9255 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9256 mode != VOIDmode;
9257 mode = GET_MODE_WIDER_MODE (mode))
9258 if (HARD_REGNO_MODE_OK (regno, mode)
9259 && (mov_optab->handlers[(int) mode].insn_code
9260 != CODE_FOR_nothing))
9261 best_mode = mode;
9263 mode = best_mode;
9264 if (mode == VOIDmode)
9265 abort ();
9267 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9268 if (size % align != 0)
9269 size = CEIL (size, align) * align;
9270 size += GET_MODE_SIZE (mode);
9271 apply_result_mode[regno] = mode;
9273 else
9274 apply_result_mode[regno] = VOIDmode;
9276 /* Allow targets that use untyped_call and untyped_return to override
9277 the size so that machine-specific information can be stored here. */
9278 #ifdef APPLY_RESULT_SIZE
9279 size = APPLY_RESULT_SIZE;
9280 #endif
9282 return size;
9285 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
9286 /* Create a vector describing the result block RESULT. If SAVEP is true,
9287 the result block is used to save the values; otherwise it is used to
9288 restore the values. */
9290 static rtx
9291 result_vector (savep, result)
9292 int savep;
9293 rtx result;
9295 int regno, size, align, nelts;
9296 enum machine_mode mode;
9297 rtx reg, mem;
9298 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
9300 size = nelts = 0;
9301 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9302 if ((mode = apply_result_mode[regno]) != VOIDmode)
9304 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9305 if (size % align != 0)
9306 size = CEIL (size, align) * align;
9307 reg = gen_rtx (REG, mode, savep ? regno : INCOMING_REGNO (regno));
9308 mem = change_address (result, mode,
9309 plus_constant (XEXP (result, 0), size));
9310 savevec[nelts++] = (savep
9311 ? gen_rtx (SET, VOIDmode, mem, reg)
9312 : gen_rtx (SET, VOIDmode, reg, mem));
9313 size += GET_MODE_SIZE (mode);
9315 return gen_rtx (PARALLEL, VOIDmode, gen_rtvec_v (nelts, savevec));
9317 #endif /* HAVE_untyped_call or HAVE_untyped_return */
9319 /* Save the state required to perform an untyped call with the same
9320 arguments as were passed to the current function. */
9322 static rtx
9323 expand_builtin_apply_args ()
9325 rtx registers;
9326 int size, align, regno;
9327 enum machine_mode mode;
9329 /* Create a block where the arg-pointer, structure value address,
9330 and argument registers can be saved. */
9331 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
9333 /* Walk past the arg-pointer and structure value address. */
9334 size = GET_MODE_SIZE (Pmode);
9335 if (struct_value_rtx)
9336 size += GET_MODE_SIZE (Pmode);
9338 /* Save each register used in calling a function to the block. */
9339 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9340 if ((mode = apply_args_mode[regno]) != VOIDmode)
9342 rtx tem;
9344 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9345 if (size % align != 0)
9346 size = CEIL (size, align) * align;
9348 tem = gen_rtx (REG, mode, INCOMING_REGNO (regno));
9350 #ifdef STACK_REGS
9351 /* For reg-stack.c's stack register household.
9352 Compare with a similar piece of code in function.c. */
9354 emit_insn (gen_rtx (USE, mode, tem));
9355 #endif
9357 emit_move_insn (change_address (registers, mode,
9358 plus_constant (XEXP (registers, 0),
9359 size)),
9360 tem);
9361 size += GET_MODE_SIZE (mode);
9364 /* Save the arg pointer to the block. */
9365 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
9366 copy_to_reg (virtual_incoming_args_rtx));
9367 size = GET_MODE_SIZE (Pmode);
9369 /* Save the structure value address unless this is passed as an
9370 "invisible" first argument. */
9371 if (struct_value_incoming_rtx)
9373 emit_move_insn (change_address (registers, Pmode,
9374 plus_constant (XEXP (registers, 0),
9375 size)),
9376 copy_to_reg (struct_value_incoming_rtx));
9377 size += GET_MODE_SIZE (Pmode);
9380 /* Return the address of the block. */
9381 return copy_addr_to_reg (XEXP (registers, 0));
9384 /* Perform an untyped call and save the state required to perform an
9385 untyped return of whatever value was returned by the given function. */
9387 static rtx
9388 expand_builtin_apply (function, arguments, argsize)
9389 rtx function, arguments, argsize;
9391 int size, align, regno;
9392 enum machine_mode mode;
9393 rtx incoming_args, result, reg, dest, call_insn;
9394 rtx old_stack_level = 0;
9395 rtx call_fusage = 0;
9397 /* Create a block where the return registers can be saved. */
9398 result = assign_stack_local (BLKmode, apply_result_size (), -1);
9400 /* ??? The argsize value should be adjusted here. */
9402 /* Fetch the arg pointer from the ARGUMENTS block. */
9403 incoming_args = gen_reg_rtx (Pmode);
9404 emit_move_insn (incoming_args,
9405 gen_rtx (MEM, Pmode, arguments));
9406 #ifndef STACK_GROWS_DOWNWARD
9407 incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
9408 incoming_args, 0, OPTAB_LIB_WIDEN);
9409 #endif
9411 /* Perform postincrements before actually calling the function. */
9412 emit_queue ();
9414 /* Push a new argument block and copy the arguments. */
9415 do_pending_stack_adjust ();
9416 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
9418 /* Push a block of memory onto the stack to store the memory arguments.
9419 Save the address in a register, and copy the memory arguments. ??? I
9420 haven't figured out how the calling convention macros effect this,
9421 but it's likely that the source and/or destination addresses in
9422 the block copy will need updating in machine specific ways. */
9423 dest = copy_addr_to_reg (push_block (argsize, 0, 0));
9424 emit_block_move (gen_rtx (MEM, BLKmode, dest),
9425 gen_rtx (MEM, BLKmode, incoming_args),
9426 argsize,
9427 PARM_BOUNDARY / BITS_PER_UNIT);
9429 /* Refer to the argument block. */
9430 apply_args_size ();
9431 arguments = gen_rtx (MEM, BLKmode, arguments);
9433 /* Walk past the arg-pointer and structure value address. */
9434 size = GET_MODE_SIZE (Pmode);
9435 if (struct_value_rtx)
9436 size += GET_MODE_SIZE (Pmode);
9438 /* Restore each of the registers previously saved. Make USE insns
9439 for each of these registers for use in making the call. */
9440 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9441 if ((mode = apply_args_mode[regno]) != VOIDmode)
9443 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9444 if (size % align != 0)
9445 size = CEIL (size, align) * align;
9446 reg = gen_rtx (REG, mode, regno);
9447 emit_move_insn (reg,
9448 change_address (arguments, mode,
9449 plus_constant (XEXP (arguments, 0),
9450 size)));
9452 use_reg (&call_fusage, reg);
9453 size += GET_MODE_SIZE (mode);
9456 /* Restore the structure value address unless this is passed as an
9457 "invisible" first argument. */
9458 size = GET_MODE_SIZE (Pmode);
9459 if (struct_value_rtx)
9461 rtx value = gen_reg_rtx (Pmode);
9462 emit_move_insn (value,
9463 change_address (arguments, Pmode,
9464 plus_constant (XEXP (arguments, 0),
9465 size)));
9466 emit_move_insn (struct_value_rtx, value);
9467 if (GET_CODE (struct_value_rtx) == REG)
9468 use_reg (&call_fusage, struct_value_rtx);
9469 size += GET_MODE_SIZE (Pmode);
9472 /* All arguments and registers used for the call are set up by now! */
9473 function = prepare_call_address (function, NULL_TREE, &call_fusage, 0);
9475 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
9476 and we don't want to load it into a register as an optimization,
9477 because prepare_call_address already did it if it should be done. */
9478 if (GET_CODE (function) != SYMBOL_REF)
9479 function = memory_address (FUNCTION_MODE, function);
9481 /* Generate the actual call instruction and save the return value. */
9482 #ifdef HAVE_untyped_call
9483 if (HAVE_untyped_call)
9484 emit_call_insn (gen_untyped_call (gen_rtx (MEM, FUNCTION_MODE, function),
9485 result, result_vector (1, result)));
9486 else
9487 #endif
9488 #ifdef HAVE_call_value
9489 if (HAVE_call_value)
9491 rtx valreg = 0;
9493 /* Locate the unique return register. It is not possible to
9494 express a call that sets more than one return register using
9495 call_value; use untyped_call for that. In fact, untyped_call
9496 only needs to save the return registers in the given block. */
9497 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9498 if ((mode = apply_result_mode[regno]) != VOIDmode)
9500 if (valreg)
9501 abort (); /* HAVE_untyped_call required. */
9502 valreg = gen_rtx (REG, mode, regno);
9505 emit_call_insn (gen_call_value (valreg,
9506 gen_rtx (MEM, FUNCTION_MODE, function),
9507 const0_rtx, NULL_RTX, const0_rtx));
9509 emit_move_insn (change_address (result, GET_MODE (valreg),
9510 XEXP (result, 0)),
9511 valreg);
9513 else
9514 #endif
9515 abort ();
9517 /* Find the CALL insn we just emitted. */
9518 for (call_insn = get_last_insn ();
9519 call_insn && GET_CODE (call_insn) != CALL_INSN;
9520 call_insn = PREV_INSN (call_insn))
9523 if (! call_insn)
9524 abort ();
9526 /* Put the register usage information on the CALL. If there is already
9527 some usage information, put ours at the end. */
9528 if (CALL_INSN_FUNCTION_USAGE (call_insn))
9530 rtx link;
9532 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
9533 link = XEXP (link, 1))
9536 XEXP (link, 1) = call_fusage;
9538 else
9539 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
9541 /* Restore the stack. */
9542 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
9544 /* Return the address of the result block. */
9545 return copy_addr_to_reg (XEXP (result, 0));
9548 /* Perform an untyped return. */
9550 static void
9551 expand_builtin_return (result)
9552 rtx result;
9554 int size, align, regno;
9555 enum machine_mode mode;
9556 rtx reg;
9557 rtx call_fusage = 0;
9559 apply_result_size ();
9560 result = gen_rtx (MEM, BLKmode, result);
9562 #ifdef HAVE_untyped_return
9563 if (HAVE_untyped_return)
9565 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
9566 emit_barrier ();
9567 return;
9569 #endif
9571 /* Restore the return value and note that each value is used. */
9572 size = 0;
9573 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9574 if ((mode = apply_result_mode[regno]) != VOIDmode)
9576 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9577 if (size % align != 0)
9578 size = CEIL (size, align) * align;
9579 reg = gen_rtx (REG, mode, INCOMING_REGNO (regno));
9580 emit_move_insn (reg,
9581 change_address (result, mode,
9582 plus_constant (XEXP (result, 0),
9583 size)));
9585 push_to_sequence (call_fusage);
9586 emit_insn (gen_rtx (USE, VOIDmode, reg));
9587 call_fusage = get_insns ();
9588 end_sequence ();
9589 size += GET_MODE_SIZE (mode);
9592 /* Put the USE insns before the return. */
9593 emit_insns (call_fusage);
9595 /* Return whatever values was restored by jumping directly to the end
9596 of the function. */
9597 expand_null_return ();
9600 /* Expand code for a post- or pre- increment or decrement
9601 and return the RTX for the result.
9602 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9604 static rtx
9605 expand_increment (exp, post, ignore)
9606 register tree exp;
9607 int post, ignore;
9609 register rtx op0, op1;
9610 register rtx temp, value;
9611 register tree incremented = TREE_OPERAND (exp, 0);
9612 optab this_optab = add_optab;
9613 int icode;
9614 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9615 int op0_is_copy = 0;
9616 int single_insn = 0;
9617 /* 1 means we can't store into OP0 directly,
9618 because it is a subreg narrower than a word,
9619 and we don't dare clobber the rest of the word. */
9620 int bad_subreg = 0;
9622 if (output_bytecode)
9624 bc_expand_expr (exp);
9625 return NULL_RTX;
9628 /* Stabilize any component ref that might need to be
9629 evaluated more than once below. */
9630 if (!post
9631 || TREE_CODE (incremented) == BIT_FIELD_REF
9632 || (TREE_CODE (incremented) == COMPONENT_REF
9633 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9634 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9635 incremented = stabilize_reference (incremented);
9636 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9637 ones into save exprs so that they don't accidentally get evaluated
9638 more than once by the code below. */
9639 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9640 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9641 incremented = save_expr (incremented);
9643 /* Compute the operands as RTX.
9644 Note whether OP0 is the actual lvalue or a copy of it:
9645 I believe it is a copy iff it is a register or subreg
9646 and insns were generated in computing it. */
9648 temp = get_last_insn ();
9649 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9651 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9652 in place but instead must do sign- or zero-extension during assignment,
9653 so we copy it into a new register and let the code below use it as
9654 a copy.
9656 Note that we can safely modify this SUBREG since it is know not to be
9657 shared (it was made by the expand_expr call above). */
9659 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9661 if (post)
9662 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9663 else
9664 bad_subreg = 1;
9666 else if (GET_CODE (op0) == SUBREG
9667 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9669 /* We cannot increment this SUBREG in place. If we are
9670 post-incrementing, get a copy of the old value. Otherwise,
9671 just mark that we cannot increment in place. */
9672 if (post)
9673 op0 = copy_to_reg (op0);
9674 else
9675 bad_subreg = 1;
9678 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9679 && temp != get_last_insn ());
9680 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9682 /* Decide whether incrementing or decrementing. */
9683 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9684 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9685 this_optab = sub_optab;
9687 /* Convert decrement by a constant into a negative increment. */
9688 if (this_optab == sub_optab
9689 && GET_CODE (op1) == CONST_INT)
9691 op1 = GEN_INT (- INTVAL (op1));
9692 this_optab = add_optab;
9695 /* For a preincrement, see if we can do this with a single instruction. */
9696 if (!post)
9698 icode = (int) this_optab->handlers[(int) mode].insn_code;
9699 if (icode != (int) CODE_FOR_nothing
9700 /* Make sure that OP0 is valid for operands 0 and 1
9701 of the insn we want to queue. */
9702 && (*insn_operand_predicate[icode][0]) (op0, mode)
9703 && (*insn_operand_predicate[icode][1]) (op0, mode)
9704 && (*insn_operand_predicate[icode][2]) (op1, mode))
9705 single_insn = 1;
9708 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9709 then we cannot just increment OP0. We must therefore contrive to
9710 increment the original value. Then, for postincrement, we can return
9711 OP0 since it is a copy of the old value. For preincrement, expand here
9712 unless we can do it with a single insn.
9714 Likewise if storing directly into OP0 would clobber high bits
9715 we need to preserve (bad_subreg). */
9716 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9718 /* This is the easiest way to increment the value wherever it is.
9719 Problems with multiple evaluation of INCREMENTED are prevented
9720 because either (1) it is a component_ref or preincrement,
9721 in which case it was stabilized above, or (2) it is an array_ref
9722 with constant index in an array in a register, which is
9723 safe to reevaluate. */
9724 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9725 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9726 ? MINUS_EXPR : PLUS_EXPR),
9727 TREE_TYPE (exp),
9728 incremented,
9729 TREE_OPERAND (exp, 1));
9731 while (TREE_CODE (incremented) == NOP_EXPR
9732 || TREE_CODE (incremented) == CONVERT_EXPR)
9734 newexp = convert (TREE_TYPE (incremented), newexp);
9735 incremented = TREE_OPERAND (incremented, 0);
9738 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9739 return post ? op0 : temp;
9742 if (post)
9744 /* We have a true reference to the value in OP0.
9745 If there is an insn to add or subtract in this mode, queue it.
9746 Queueing the increment insn avoids the register shuffling
9747 that often results if we must increment now and first save
9748 the old value for subsequent use. */
9750 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9751 op0 = stabilize (op0);
9752 #endif
9754 icode = (int) this_optab->handlers[(int) mode].insn_code;
9755 if (icode != (int) CODE_FOR_nothing
9756 /* Make sure that OP0 is valid for operands 0 and 1
9757 of the insn we want to queue. */
9758 && (*insn_operand_predicate[icode][0]) (op0, mode)
9759 && (*insn_operand_predicate[icode][1]) (op0, mode))
9761 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
9762 op1 = force_reg (mode, op1);
9764 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9766 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9768 rtx addr = force_reg (Pmode, XEXP (op0, 0));
9769 rtx temp, result;
9771 op0 = change_address (op0, VOIDmode, addr);
9772 temp = force_reg (GET_MODE (op0), op0);
9773 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
9774 op1 = force_reg (mode, op1);
9776 /* The increment queue is LIFO, thus we have to `queue'
9777 the instructions in reverse order. */
9778 enqueue_insn (op0, gen_move_insn (op0, temp));
9779 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9780 return result;
9784 /* Preincrement, or we can't increment with one simple insn. */
9785 if (post)
9786 /* Save a copy of the value before inc or dec, to return it later. */
9787 temp = value = copy_to_reg (op0);
9788 else
9789 /* Arrange to return the incremented value. */
9790 /* Copy the rtx because expand_binop will protect from the queue,
9791 and the results of that would be invalid for us to return
9792 if our caller does emit_queue before using our result. */
9793 temp = copy_rtx (value = op0);
9795 /* Increment however we can. */
9796 op1 = expand_binop (mode, this_optab, value, op1, op0,
9797 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9798 /* Make sure the value is stored into OP0. */
9799 if (op1 != op0)
9800 emit_move_insn (op0, op1);
9802 return temp;
9805 /* Expand all function calls contained within EXP, innermost ones first.
9806 But don't look within expressions that have sequence points.
9807 For each CALL_EXPR, record the rtx for its value
9808 in the CALL_EXPR_RTL field. */
9810 static void
9811 preexpand_calls (exp)
9812 tree exp;
9814 register int nops, i;
9815 int type = TREE_CODE_CLASS (TREE_CODE (exp));
9817 if (! do_preexpand_calls)
9818 return;
9820 /* Only expressions and references can contain calls. */
9822 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
9823 return;
9825 switch (TREE_CODE (exp))
9827 case CALL_EXPR:
9828 /* Do nothing if already expanded. */
9829 if (CALL_EXPR_RTL (exp) != 0
9830 /* Do nothing if the call returns a variable-sized object. */
9831 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
9832 /* Do nothing to built-in functions. */
9833 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
9834 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
9835 == FUNCTION_DECL)
9836 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
9837 return;
9839 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
9840 return;
9842 case COMPOUND_EXPR:
9843 case COND_EXPR:
9844 case TRUTH_ANDIF_EXPR:
9845 case TRUTH_ORIF_EXPR:
9846 /* If we find one of these, then we can be sure
9847 the adjust will be done for it (since it makes jumps).
9848 Do it now, so that if this is inside an argument
9849 of a function, we don't get the stack adjustment
9850 after some other args have already been pushed. */
9851 do_pending_stack_adjust ();
9852 return;
9854 case BLOCK:
9855 case RTL_EXPR:
9856 case WITH_CLEANUP_EXPR:
9857 case CLEANUP_POINT_EXPR:
9858 return;
9860 case SAVE_EXPR:
9861 if (SAVE_EXPR_RTL (exp) != 0)
9862 return;
9865 nops = tree_code_length[(int) TREE_CODE (exp)];
9866 for (i = 0; i < nops; i++)
9867 if (TREE_OPERAND (exp, i) != 0)
9869 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
9870 if (type == 'e' || type == '<' || type == '1' || type == '2'
9871 || type == 'r')
9872 preexpand_calls (TREE_OPERAND (exp, i));
9876 /* At the start of a function, record that we have no previously-pushed
9877 arguments waiting to be popped. */
9879 void
9880 init_pending_stack_adjust ()
9882 pending_stack_adjust = 0;
9885 /* When exiting from function, if safe, clear out any pending stack adjust
9886 so the adjustment won't get done. */
9888 void
9889 clear_pending_stack_adjust ()
9891 #ifdef EXIT_IGNORE_STACK
9892 if (optimize > 0
9893 && ! flag_omit_frame_pointer && EXIT_IGNORE_STACK
9894 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9895 && ! flag_inline_functions)
9896 pending_stack_adjust = 0;
9897 #endif
9900 /* Pop any previously-pushed arguments that have not been popped yet. */
9902 void
9903 do_pending_stack_adjust ()
9905 if (inhibit_defer_pop == 0)
9907 if (pending_stack_adjust != 0)
9908 adjust_stack (GEN_INT (pending_stack_adjust));
9909 pending_stack_adjust = 0;
9913 /* Defer the expansion all cleanups up to OLD_CLEANUPS.
9914 Returns the cleanups to be performed. */
9916 static tree
9917 defer_cleanups_to (old_cleanups)
9918 tree old_cleanups;
9920 tree new_cleanups = NULL_TREE;
9921 tree cleanups = cleanups_this_call;
9922 tree last = NULL_TREE;
9924 while (cleanups_this_call != old_cleanups)
9926 expand_eh_region_end (TREE_VALUE (cleanups_this_call));
9927 last = cleanups_this_call;
9928 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
9931 if (last)
9933 /* Remove the list from the chain of cleanups. */
9934 TREE_CHAIN (last) = NULL_TREE;
9936 /* reverse them so that we can build them in the right order. */
9937 cleanups = nreverse (cleanups);
9939 /* All cleanups must be on the function_obstack. */
9940 push_obstacks_nochange ();
9941 resume_temporary_allocation ();
9943 while (cleanups)
9945 if (new_cleanups)
9946 new_cleanups = build (COMPOUND_EXPR, TREE_TYPE (new_cleanups),
9947 TREE_VALUE (cleanups), new_cleanups);
9948 else
9949 new_cleanups = TREE_VALUE (cleanups);
9951 cleanups = TREE_CHAIN (cleanups);
9954 pop_obstacks ();
9957 return new_cleanups;
9960 /* Expand all cleanups up to OLD_CLEANUPS.
9961 Needed here, and also for language-dependent calls. */
9963 void
9964 expand_cleanups_to (old_cleanups)
9965 tree old_cleanups;
9967 while (cleanups_this_call != old_cleanups)
9969 expand_eh_region_end (TREE_VALUE (cleanups_this_call));
9970 expand_expr (TREE_VALUE (cleanups_this_call), const0_rtx, VOIDmode, 0);
9971 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
9975 /* Expand conditional expressions. */
9977 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9978 LABEL is an rtx of code CODE_LABEL, in this function and all the
9979 functions here. */
9981 void
9982 jumpifnot (exp, label)
9983 tree exp;
9984 rtx label;
9986 do_jump (exp, label, NULL_RTX);
9989 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9991 void
9992 jumpif (exp, label)
9993 tree exp;
9994 rtx label;
9996 do_jump (exp, NULL_RTX, label);
9999 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
10000 the result is zero, or IF_TRUE_LABEL if the result is one.
10001 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
10002 meaning fall through in that case.
10004 do_jump always does any pending stack adjust except when it does not
10005 actually perform a jump. An example where there is no jump
10006 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
10008 This function is responsible for optimizing cases such as
10009 &&, || and comparison operators in EXP. */
10011 void
10012 do_jump (exp, if_false_label, if_true_label)
10013 tree exp;
10014 rtx if_false_label, if_true_label;
10016 register enum tree_code code = TREE_CODE (exp);
10017 /* Some cases need to create a label to jump to
10018 in order to properly fall through.
10019 These cases set DROP_THROUGH_LABEL nonzero. */
10020 rtx drop_through_label = 0;
10021 rtx temp;
10022 rtx comparison = 0;
10023 int i;
10024 tree type;
10025 enum machine_mode mode;
10027 emit_queue ();
10029 switch (code)
10031 case ERROR_MARK:
10032 break;
10034 case INTEGER_CST:
10035 temp = integer_zerop (exp) ? if_false_label : if_true_label;
10036 if (temp)
10037 emit_jump (temp);
10038 break;
10040 #if 0
10041 /* This is not true with #pragma weak */
10042 case ADDR_EXPR:
10043 /* The address of something can never be zero. */
10044 if (if_true_label)
10045 emit_jump (if_true_label);
10046 break;
10047 #endif
10049 case NOP_EXPR:
10050 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
10051 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
10052 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
10053 goto normal;
10054 case CONVERT_EXPR:
10055 /* If we are narrowing the operand, we have to do the compare in the
10056 narrower mode. */
10057 if ((TYPE_PRECISION (TREE_TYPE (exp))
10058 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10059 goto normal;
10060 case NON_LVALUE_EXPR:
10061 case REFERENCE_EXPR:
10062 case ABS_EXPR:
10063 case NEGATE_EXPR:
10064 case LROTATE_EXPR:
10065 case RROTATE_EXPR:
10066 /* These cannot change zero->non-zero or vice versa. */
10067 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10068 break;
10070 #if 0
10071 /* This is never less insns than evaluating the PLUS_EXPR followed by
10072 a test and can be longer if the test is eliminated. */
10073 case PLUS_EXPR:
10074 /* Reduce to minus. */
10075 exp = build (MINUS_EXPR, TREE_TYPE (exp),
10076 TREE_OPERAND (exp, 0),
10077 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
10078 TREE_OPERAND (exp, 1))));
10079 /* Process as MINUS. */
10080 #endif
10082 case MINUS_EXPR:
10083 /* Non-zero iff operands of minus differ. */
10084 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
10085 TREE_OPERAND (exp, 0),
10086 TREE_OPERAND (exp, 1)),
10087 NE, NE);
10088 break;
10090 case BIT_AND_EXPR:
10091 /* If we are AND'ing with a small constant, do this comparison in the
10092 smallest type that fits. If the machine doesn't have comparisons
10093 that small, it will be converted back to the wider comparison.
10094 This helps if we are testing the sign bit of a narrower object.
10095 combine can't do this for us because it can't know whether a
10096 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
10098 if (! SLOW_BYTE_ACCESS
10099 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
10100 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
10101 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
10102 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
10103 && (type = type_for_mode (mode, 1)) != 0
10104 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
10105 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
10106 != CODE_FOR_nothing))
10108 do_jump (convert (type, exp), if_false_label, if_true_label);
10109 break;
10111 goto normal;
10113 case TRUTH_NOT_EXPR:
10114 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10115 break;
10117 case TRUTH_ANDIF_EXPR:
10119 rtx seq1, seq2;
10120 tree cleanups, old_cleanups;
10122 if (if_false_label == 0)
10123 if_false_label = drop_through_label = gen_label_rtx ();
10124 start_sequence ();
10125 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
10126 seq1 = get_insns ();
10127 end_sequence ();
10129 old_cleanups = cleanups_this_call;
10130 start_sequence ();
10131 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10132 seq2 = get_insns ();
10133 cleanups = defer_cleanups_to (old_cleanups);
10134 end_sequence ();
10136 if (cleanups)
10138 rtx flag = gen_reg_rtx (word_mode);
10139 tree new_cleanups;
10140 tree cond;
10142 /* Flag cleanups as not needed. */
10143 emit_move_insn (flag, const0_rtx);
10144 emit_insns (seq1);
10146 /* Flag cleanups as needed. */
10147 emit_move_insn (flag, const1_rtx);
10148 emit_insns (seq2);
10150 /* All cleanups must be on the function_obstack. */
10151 push_obstacks_nochange ();
10152 resume_temporary_allocation ();
10154 /* convert flag, which is an rtx, into a tree. */
10155 cond = make_node (RTL_EXPR);
10156 TREE_TYPE (cond) = integer_type_node;
10157 RTL_EXPR_RTL (cond) = flag;
10158 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
10159 cond = save_expr (cond);
10161 new_cleanups = build (COND_EXPR, void_type_node,
10162 truthvalue_conversion (cond),
10163 cleanups, integer_zero_node);
10164 new_cleanups = fold (new_cleanups);
10166 pop_obstacks ();
10168 /* Now add in the conditionalized cleanups. */
10169 cleanups_this_call
10170 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
10171 expand_eh_region_start ();
10173 else
10175 emit_insns (seq1);
10176 emit_insns (seq2);
10179 break;
10181 case TRUTH_ORIF_EXPR:
10183 rtx seq1, seq2;
10184 tree cleanups, old_cleanups;
10186 if (if_true_label == 0)
10187 if_true_label = drop_through_label = gen_label_rtx ();
10188 start_sequence ();
10189 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
10190 seq1 = get_insns ();
10191 end_sequence ();
10193 old_cleanups = cleanups_this_call;
10194 start_sequence ();
10195 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10196 seq2 = get_insns ();
10197 cleanups = defer_cleanups_to (old_cleanups);
10198 end_sequence ();
10200 if (cleanups)
10202 rtx flag = gen_reg_rtx (word_mode);
10203 tree new_cleanups;
10204 tree cond;
10206 /* Flag cleanups as not needed. */
10207 emit_move_insn (flag, const0_rtx);
10208 emit_insns (seq1);
10210 /* Flag cleanups as needed. */
10211 emit_move_insn (flag, const1_rtx);
10212 emit_insns (seq2);
10214 /* All cleanups must be on the function_obstack. */
10215 push_obstacks_nochange ();
10216 resume_temporary_allocation ();
10218 /* convert flag, which is an rtx, into a tree. */
10219 cond = make_node (RTL_EXPR);
10220 TREE_TYPE (cond) = integer_type_node;
10221 RTL_EXPR_RTL (cond) = flag;
10222 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
10223 cond = save_expr (cond);
10225 new_cleanups = build (COND_EXPR, void_type_node,
10226 truthvalue_conversion (cond),
10227 cleanups, integer_zero_node);
10228 new_cleanups = fold (new_cleanups);
10230 pop_obstacks ();
10232 /* Now add in the conditionalized cleanups. */
10233 cleanups_this_call
10234 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
10235 expand_eh_region_start ();
10237 else
10239 emit_insns (seq1);
10240 emit_insns (seq2);
10243 break;
10245 case COMPOUND_EXPR:
10246 push_temp_slots ();
10247 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
10248 preserve_temp_slots (NULL_RTX);
10249 free_temp_slots ();
10250 pop_temp_slots ();
10251 emit_queue ();
10252 do_pending_stack_adjust ();
10253 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10254 break;
10256 case COMPONENT_REF:
10257 case BIT_FIELD_REF:
10258 case ARRAY_REF:
10260 int bitsize, bitpos, unsignedp;
10261 enum machine_mode mode;
10262 tree type;
10263 tree offset;
10264 int volatilep = 0;
10265 int alignment;
10267 /* Get description of this reference. We don't actually care
10268 about the underlying object here. */
10269 get_inner_reference (exp, &bitsize, &bitpos, &offset,
10270 &mode, &unsignedp, &volatilep,
10271 &alignment);
10273 type = type_for_size (bitsize, unsignedp);
10274 if (! SLOW_BYTE_ACCESS
10275 && type != 0 && bitsize >= 0
10276 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
10277 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
10278 != CODE_FOR_nothing))
10280 do_jump (convert (type, exp), if_false_label, if_true_label);
10281 break;
10283 goto normal;
10286 case COND_EXPR:
10287 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
10288 if (integer_onep (TREE_OPERAND (exp, 1))
10289 && integer_zerop (TREE_OPERAND (exp, 2)))
10290 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10292 else if (integer_zerop (TREE_OPERAND (exp, 1))
10293 && integer_onep (TREE_OPERAND (exp, 2)))
10294 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10296 else
10298 rtx seq1, seq2;
10299 tree cleanups_left_side, cleanups_right_side, old_cleanups;
10301 register rtx label1 = gen_label_rtx ();
10302 drop_through_label = gen_label_rtx ();
10304 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
10306 /* We need to save the cleanups for the lhs and rhs separately.
10307 Keep track of the cleanups seen before the lhs. */
10308 old_cleanups = cleanups_this_call;
10309 start_sequence ();
10310 /* Now the THEN-expression. */
10311 do_jump (TREE_OPERAND (exp, 1),
10312 if_false_label ? if_false_label : drop_through_label,
10313 if_true_label ? if_true_label : drop_through_label);
10314 /* In case the do_jump just above never jumps. */
10315 do_pending_stack_adjust ();
10316 emit_label (label1);
10317 seq1 = get_insns ();
10318 /* Now grab the cleanups for the lhs. */
10319 cleanups_left_side = defer_cleanups_to (old_cleanups);
10320 end_sequence ();
10322 /* And keep track of where we start before the rhs. */
10323 old_cleanups = cleanups_this_call;
10324 start_sequence ();
10325 /* Now the ELSE-expression. */
10326 do_jump (TREE_OPERAND (exp, 2),
10327 if_false_label ? if_false_label : drop_through_label,
10328 if_true_label ? if_true_label : drop_through_label);
10329 seq2 = get_insns ();
10330 /* Grab the cleanups for the rhs. */
10331 cleanups_right_side = defer_cleanups_to (old_cleanups);
10332 end_sequence ();
10334 if (cleanups_left_side || cleanups_right_side)
10336 /* Make the cleanups for the THEN and ELSE clauses
10337 conditional based on which half is executed. */
10338 rtx flag = gen_reg_rtx (word_mode);
10339 tree new_cleanups;
10340 tree cond;
10342 /* Set the flag to 0 so that we know we executed the lhs. */
10343 emit_move_insn (flag, const0_rtx);
10344 emit_insns (seq1);
10346 /* Set the flag to 1 so that we know we executed the rhs. */
10347 emit_move_insn (flag, const1_rtx);
10348 emit_insns (seq2);
10350 /* Make sure the cleanup lives on the function_obstack. */
10351 push_obstacks_nochange ();
10352 resume_temporary_allocation ();
10354 /* Now, build up a COND_EXPR that tests the value of the
10355 flag, and then either do the cleanups for the lhs or the
10356 rhs. */
10357 cond = make_node (RTL_EXPR);
10358 TREE_TYPE (cond) = integer_type_node;
10359 RTL_EXPR_RTL (cond) = flag;
10360 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
10361 cond = save_expr (cond);
10363 new_cleanups = build (COND_EXPR, void_type_node,
10364 truthvalue_conversion (cond),
10365 cleanups_right_side, cleanups_left_side);
10366 new_cleanups = fold (new_cleanups);
10368 pop_obstacks ();
10370 /* Now add in the conditionalized cleanups. */
10371 cleanups_this_call
10372 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
10373 expand_eh_region_start ();
10375 else
10377 /* No cleanups were needed, so emit the two sequences
10378 directly. */
10379 emit_insns (seq1);
10380 emit_insns (seq2);
10383 break;
10385 case EQ_EXPR:
10387 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10389 if (integer_zerop (TREE_OPERAND (exp, 1)))
10390 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10391 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10392 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10393 do_jump
10394 (fold
10395 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
10396 fold (build (EQ_EXPR, TREE_TYPE (exp),
10397 fold (build1 (REALPART_EXPR,
10398 TREE_TYPE (inner_type),
10399 TREE_OPERAND (exp, 0))),
10400 fold (build1 (REALPART_EXPR,
10401 TREE_TYPE (inner_type),
10402 TREE_OPERAND (exp, 1))))),
10403 fold (build (EQ_EXPR, TREE_TYPE (exp),
10404 fold (build1 (IMAGPART_EXPR,
10405 TREE_TYPE (inner_type),
10406 TREE_OPERAND (exp, 0))),
10407 fold (build1 (IMAGPART_EXPR,
10408 TREE_TYPE (inner_type),
10409 TREE_OPERAND (exp, 1))))))),
10410 if_false_label, if_true_label);
10411 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10412 && !can_compare_p (TYPE_MODE (inner_type)))
10413 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
10414 else
10415 comparison = compare (exp, EQ, EQ);
10416 break;
10419 case NE_EXPR:
10421 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10423 if (integer_zerop (TREE_OPERAND (exp, 1)))
10424 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10425 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10426 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10427 do_jump
10428 (fold
10429 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
10430 fold (build (NE_EXPR, TREE_TYPE (exp),
10431 fold (build1 (REALPART_EXPR,
10432 TREE_TYPE (inner_type),
10433 TREE_OPERAND (exp, 0))),
10434 fold (build1 (REALPART_EXPR,
10435 TREE_TYPE (inner_type),
10436 TREE_OPERAND (exp, 1))))),
10437 fold (build (NE_EXPR, TREE_TYPE (exp),
10438 fold (build1 (IMAGPART_EXPR,
10439 TREE_TYPE (inner_type),
10440 TREE_OPERAND (exp, 0))),
10441 fold (build1 (IMAGPART_EXPR,
10442 TREE_TYPE (inner_type),
10443 TREE_OPERAND (exp, 1))))))),
10444 if_false_label, if_true_label);
10445 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10446 && !can_compare_p (TYPE_MODE (inner_type)))
10447 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
10448 else
10449 comparison = compare (exp, NE, NE);
10450 break;
10453 case LT_EXPR:
10454 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10455 == MODE_INT)
10456 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10457 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
10458 else
10459 comparison = compare (exp, LT, LTU);
10460 break;
10462 case LE_EXPR:
10463 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10464 == MODE_INT)
10465 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10466 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
10467 else
10468 comparison = compare (exp, LE, LEU);
10469 break;
10471 case GT_EXPR:
10472 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10473 == MODE_INT)
10474 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10475 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
10476 else
10477 comparison = compare (exp, GT, GTU);
10478 break;
10480 case GE_EXPR:
10481 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10482 == MODE_INT)
10483 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10484 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
10485 else
10486 comparison = compare (exp, GE, GEU);
10487 break;
10489 default:
10490 normal:
10491 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
10492 #if 0
10493 /* This is not needed any more and causes poor code since it causes
10494 comparisons and tests from non-SI objects to have different code
10495 sequences. */
10496 /* Copy to register to avoid generating bad insns by cse
10497 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
10498 if (!cse_not_expected && GET_CODE (temp) == MEM)
10499 temp = copy_to_reg (temp);
10500 #endif
10501 do_pending_stack_adjust ();
10502 if (GET_CODE (temp) == CONST_INT)
10503 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
10504 else if (GET_CODE (temp) == LABEL_REF)
10505 comparison = const_true_rtx;
10506 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
10507 && !can_compare_p (GET_MODE (temp)))
10508 /* Note swapping the labels gives us not-equal. */
10509 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
10510 else if (GET_MODE (temp) != VOIDmode)
10511 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
10512 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
10513 GET_MODE (temp), NULL_RTX, 0);
10514 else
10515 abort ();
10518 /* Do any postincrements in the expression that was tested. */
10519 emit_queue ();
10521 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
10522 straight into a conditional jump instruction as the jump condition.
10523 Otherwise, all the work has been done already. */
10525 if (comparison == const_true_rtx)
10527 if (if_true_label)
10528 emit_jump (if_true_label);
10530 else if (comparison == const0_rtx)
10532 if (if_false_label)
10533 emit_jump (if_false_label);
10535 else if (comparison)
10536 do_jump_for_compare (comparison, if_false_label, if_true_label);
10538 if (drop_through_label)
10540 /* If do_jump produces code that might be jumped around,
10541 do any stack adjusts from that code, before the place
10542 where control merges in. */
10543 do_pending_stack_adjust ();
10544 emit_label (drop_through_label);
10548 /* Given a comparison expression EXP for values too wide to be compared
10549 with one insn, test the comparison and jump to the appropriate label.
10550 The code of EXP is ignored; we always test GT if SWAP is 0,
10551 and LT if SWAP is 1. */
10553 static void
10554 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
10555 tree exp;
10556 int swap;
10557 rtx if_false_label, if_true_label;
10559 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10560 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10561 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10562 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10563 rtx drop_through_label = 0;
10564 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
10565 int i;
10567 if (! if_true_label || ! if_false_label)
10568 drop_through_label = gen_label_rtx ();
10569 if (! if_true_label)
10570 if_true_label = drop_through_label;
10571 if (! if_false_label)
10572 if_false_label = drop_through_label;
10574 /* Compare a word at a time, high order first. */
10575 for (i = 0; i < nwords; i++)
10577 rtx comp;
10578 rtx op0_word, op1_word;
10580 if (WORDS_BIG_ENDIAN)
10582 op0_word = operand_subword_force (op0, i, mode);
10583 op1_word = operand_subword_force (op1, i, mode);
10585 else
10587 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10588 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10591 /* All but high-order word must be compared as unsigned. */
10592 comp = compare_from_rtx (op0_word, op1_word,
10593 (unsignedp || i > 0) ? GTU : GT,
10594 unsignedp, word_mode, NULL_RTX, 0);
10595 if (comp == const_true_rtx)
10596 emit_jump (if_true_label);
10597 else if (comp != const0_rtx)
10598 do_jump_for_compare (comp, NULL_RTX, if_true_label);
10600 /* Consider lower words only if these are equal. */
10601 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10602 NULL_RTX, 0);
10603 if (comp == const_true_rtx)
10604 emit_jump (if_false_label);
10605 else if (comp != const0_rtx)
10606 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10609 if (if_false_label)
10610 emit_jump (if_false_label);
10611 if (drop_through_label)
10612 emit_label (drop_through_label);
10615 /* Compare OP0 with OP1, word at a time, in mode MODE.
10616 UNSIGNEDP says to do unsigned comparison.
10617 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
10619 void
10620 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10621 enum machine_mode mode;
10622 int unsignedp;
10623 rtx op0, op1;
10624 rtx if_false_label, if_true_label;
10626 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10627 rtx drop_through_label = 0;
10628 int i;
10630 if (! if_true_label || ! if_false_label)
10631 drop_through_label = gen_label_rtx ();
10632 if (! if_true_label)
10633 if_true_label = drop_through_label;
10634 if (! if_false_label)
10635 if_false_label = drop_through_label;
10637 /* Compare a word at a time, high order first. */
10638 for (i = 0; i < nwords; i++)
10640 rtx comp;
10641 rtx op0_word, op1_word;
10643 if (WORDS_BIG_ENDIAN)
10645 op0_word = operand_subword_force (op0, i, mode);
10646 op1_word = operand_subword_force (op1, i, mode);
10648 else
10650 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10651 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10654 /* All but high-order word must be compared as unsigned. */
10655 comp = compare_from_rtx (op0_word, op1_word,
10656 (unsignedp || i > 0) ? GTU : GT,
10657 unsignedp, word_mode, NULL_RTX, 0);
10658 if (comp == const_true_rtx)
10659 emit_jump (if_true_label);
10660 else if (comp != const0_rtx)
10661 do_jump_for_compare (comp, NULL_RTX, if_true_label);
10663 /* Consider lower words only if these are equal. */
10664 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10665 NULL_RTX, 0);
10666 if (comp == const_true_rtx)
10667 emit_jump (if_false_label);
10668 else if (comp != const0_rtx)
10669 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10672 if (if_false_label)
10673 emit_jump (if_false_label);
10674 if (drop_through_label)
10675 emit_label (drop_through_label);
10678 /* Given an EQ_EXPR expression EXP for values too wide to be compared
10679 with one insn, test the comparison and jump to the appropriate label. */
10681 static void
10682 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10683 tree exp;
10684 rtx if_false_label, if_true_label;
10686 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10687 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10688 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10689 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10690 int i;
10691 rtx drop_through_label = 0;
10693 if (! if_false_label)
10694 drop_through_label = if_false_label = gen_label_rtx ();
10696 for (i = 0; i < nwords; i++)
10698 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
10699 operand_subword_force (op1, i, mode),
10700 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10701 word_mode, NULL_RTX, 0);
10702 if (comp == const_true_rtx)
10703 emit_jump (if_false_label);
10704 else if (comp != const0_rtx)
10705 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10708 if (if_true_label)
10709 emit_jump (if_true_label);
10710 if (drop_through_label)
10711 emit_label (drop_through_label);
10714 /* Jump according to whether OP0 is 0.
10715 We assume that OP0 has an integer mode that is too wide
10716 for the available compare insns. */
10718 static void
10719 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10720 rtx op0;
10721 rtx if_false_label, if_true_label;
10723 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10724 int i;
10725 rtx drop_through_label = 0;
10727 if (! if_false_label)
10728 drop_through_label = if_false_label = gen_label_rtx ();
10730 for (i = 0; i < nwords; i++)
10732 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
10733 GET_MODE (op0)),
10734 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
10735 if (comp == const_true_rtx)
10736 emit_jump (if_false_label);
10737 else if (comp != const0_rtx)
10738 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10741 if (if_true_label)
10742 emit_jump (if_true_label);
10743 if (drop_through_label)
10744 emit_label (drop_through_label);
10747 /* Given a comparison expression in rtl form, output conditional branches to
10748 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
10750 static void
10751 do_jump_for_compare (comparison, if_false_label, if_true_label)
10752 rtx comparison, if_false_label, if_true_label;
10754 if (if_true_label)
10756 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
10757 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
10758 else
10759 abort ();
10761 if (if_false_label)
10762 emit_jump (if_false_label);
10764 else if (if_false_label)
10766 rtx insn;
10767 rtx prev = get_last_insn ();
10768 rtx branch = 0;
10770 /* Output the branch with the opposite condition. Then try to invert
10771 what is generated. If more than one insn is a branch, or if the
10772 branch is not the last insn written, abort. If we can't invert
10773 the branch, emit make a true label, redirect this jump to that,
10774 emit a jump to the false label and define the true label. */
10776 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
10777 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])(if_false_label));
10778 else
10779 abort ();
10781 /* Here we get the first insn that was just emitted. It used to be the
10782 case that, on some machines, emitting the branch would discard
10783 the previous compare insn and emit a replacement. This isn't
10784 done anymore, but abort if we see that PREV is deleted. */
10786 if (prev == 0)
10787 insn = get_insns ();
10788 else if (INSN_DELETED_P (prev))
10789 abort ();
10790 else
10791 insn = NEXT_INSN (prev);
10793 for (; insn; insn = NEXT_INSN (insn))
10794 if (GET_CODE (insn) == JUMP_INSN)
10796 if (branch)
10797 abort ();
10798 branch = insn;
10801 if (branch != get_last_insn ())
10802 abort ();
10804 JUMP_LABEL (branch) = if_false_label;
10805 if (! invert_jump (branch, if_false_label))
10807 if_true_label = gen_label_rtx ();
10808 redirect_jump (branch, if_true_label);
10809 emit_jump (if_false_label);
10810 emit_label (if_true_label);
10815 /* Generate code for a comparison expression EXP
10816 (including code to compute the values to be compared)
10817 and set (CC0) according to the result.
10818 SIGNED_CODE should be the rtx operation for this comparison for
10819 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10821 We force a stack adjustment unless there are currently
10822 things pushed on the stack that aren't yet used. */
10824 static rtx
10825 compare (exp, signed_code, unsigned_code)
10826 register tree exp;
10827 enum rtx_code signed_code, unsigned_code;
10829 register rtx op0
10830 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10831 register rtx op1
10832 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10833 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
10834 register enum machine_mode mode = TYPE_MODE (type);
10835 int unsignedp = TREE_UNSIGNED (type);
10836 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
10838 #ifdef HAVE_canonicalize_funcptr_for_compare
10839 /* If function pointers need to be "canonicalized" before they can
10840 be reliably compared, then canonicalize them. */
10841 if (HAVE_canonicalize_funcptr_for_compare
10842 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10843 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10844 == FUNCTION_TYPE))
10846 rtx new_op0 = gen_reg_rtx (mode);
10848 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10849 op0 = new_op0;
10852 if (HAVE_canonicalize_funcptr_for_compare
10853 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10854 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10855 == FUNCTION_TYPE))
10857 rtx new_op1 = gen_reg_rtx (mode);
10859 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10860 op1 = new_op1;
10862 #endif
10864 return compare_from_rtx (op0, op1, code, unsignedp, mode,
10865 ((mode == BLKmode)
10866 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10867 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
10870 /* Like compare but expects the values to compare as two rtx's.
10871 The decision as to signed or unsigned comparison must be made by the caller.
10873 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10874 compared.
10876 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10877 size of MODE should be used. */
10880 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
10881 register rtx op0, op1;
10882 enum rtx_code code;
10883 int unsignedp;
10884 enum machine_mode mode;
10885 rtx size;
10886 int align;
10888 rtx tem;
10890 /* If one operand is constant, make it the second one. Only do this
10891 if the other operand is not constant as well. */
10893 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
10894 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
10896 tem = op0;
10897 op0 = op1;
10898 op1 = tem;
10899 code = swap_condition (code);
10902 if (flag_force_mem)
10904 op0 = force_not_mem (op0);
10905 op1 = force_not_mem (op1);
10908 do_pending_stack_adjust ();
10910 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10911 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10912 return tem;
10914 #if 0
10915 /* There's no need to do this now that combine.c can eliminate lots of
10916 sign extensions. This can be less efficient in certain cases on other
10917 machines. */
10919 /* If this is a signed equality comparison, we can do it as an
10920 unsigned comparison since zero-extension is cheaper than sign
10921 extension and comparisons with zero are done as unsigned. This is
10922 the case even on machines that can do fast sign extension, since
10923 zero-extension is easier to combine with other operations than
10924 sign-extension is. If we are comparing against a constant, we must
10925 convert it to what it would look like unsigned. */
10926 if ((code == EQ || code == NE) && ! unsignedp
10927 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10929 if (GET_CODE (op1) == CONST_INT
10930 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10931 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10932 unsignedp = 1;
10934 #endif
10936 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
10938 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
10941 /* Generate code to calculate EXP using a store-flag instruction
10942 and return an rtx for the result. EXP is either a comparison
10943 or a TRUTH_NOT_EXPR whose operand is a comparison.
10945 If TARGET is nonzero, store the result there if convenient.
10947 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10948 cheap.
10950 Return zero if there is no suitable set-flag instruction
10951 available on this machine.
10953 Once expand_expr has been called on the arguments of the comparison,
10954 we are committed to doing the store flag, since it is not safe to
10955 re-evaluate the expression. We emit the store-flag insn by calling
10956 emit_store_flag, but only expand the arguments if we have a reason
10957 to believe that emit_store_flag will be successful. If we think that
10958 it will, but it isn't, we have to simulate the store-flag with a
10959 set/jump/set sequence. */
10961 static rtx
10962 do_store_flag (exp, target, mode, only_cheap)
10963 tree exp;
10964 rtx target;
10965 enum machine_mode mode;
10966 int only_cheap;
10968 enum rtx_code code;
10969 tree arg0, arg1, type;
10970 tree tem;
10971 enum machine_mode operand_mode;
10972 int invert = 0;
10973 int unsignedp;
10974 rtx op0, op1;
10975 enum insn_code icode;
10976 rtx subtarget = target;
10977 rtx result, label, pattern, jump_pat;
10979 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10980 result at the end. We can't simply invert the test since it would
10981 have already been inverted if it were valid. This case occurs for
10982 some floating-point comparisons. */
10984 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10985 invert = 1, exp = TREE_OPERAND (exp, 0);
10987 arg0 = TREE_OPERAND (exp, 0);
10988 arg1 = TREE_OPERAND (exp, 1);
10989 type = TREE_TYPE (arg0);
10990 operand_mode = TYPE_MODE (type);
10991 unsignedp = TREE_UNSIGNED (type);
10993 /* We won't bother with BLKmode store-flag operations because it would mean
10994 passing a lot of information to emit_store_flag. */
10995 if (operand_mode == BLKmode)
10996 return 0;
10998 /* We won't bother with store-flag operations involving function pointers
10999 when function pointers must be canonicalized before comparisons. */
11000 #ifdef HAVE_canonicalize_funcptr_for_compare
11001 if (HAVE_canonicalize_funcptr_for_compare
11002 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
11003 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
11004 == FUNCTION_TYPE))
11005 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
11006 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
11007 == FUNCTION_TYPE))))
11008 return 0;
11009 #endif
11011 STRIP_NOPS (arg0);
11012 STRIP_NOPS (arg1);
11014 /* Get the rtx comparison code to use. We know that EXP is a comparison
11015 operation of some type. Some comparisons against 1 and -1 can be
11016 converted to comparisons with zero. Do so here so that the tests
11017 below will be aware that we have a comparison with zero. These
11018 tests will not catch constants in the first operand, but constants
11019 are rarely passed as the first operand. */
11021 switch (TREE_CODE (exp))
11023 case EQ_EXPR:
11024 code = EQ;
11025 break;
11026 case NE_EXPR:
11027 code = NE;
11028 break;
11029 case LT_EXPR:
11030 if (integer_onep (arg1))
11031 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
11032 else
11033 code = unsignedp ? LTU : LT;
11034 break;
11035 case LE_EXPR:
11036 if (! unsignedp && integer_all_onesp (arg1))
11037 arg1 = integer_zero_node, code = LT;
11038 else
11039 code = unsignedp ? LEU : LE;
11040 break;
11041 case GT_EXPR:
11042 if (! unsignedp && integer_all_onesp (arg1))
11043 arg1 = integer_zero_node, code = GE;
11044 else
11045 code = unsignedp ? GTU : GT;
11046 break;
11047 case GE_EXPR:
11048 if (integer_onep (arg1))
11049 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
11050 else
11051 code = unsignedp ? GEU : GE;
11052 break;
11053 default:
11054 abort ();
11057 /* Put a constant second. */
11058 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
11060 tem = arg0; arg0 = arg1; arg1 = tem;
11061 code = swap_condition (code);
11064 /* If this is an equality or inequality test of a single bit, we can
11065 do this by shifting the bit being tested to the low-order bit and
11066 masking the result with the constant 1. If the condition was EQ,
11067 we xor it with 1. This does not require an scc insn and is faster
11068 than an scc insn even if we have it. */
11070 if ((code == NE || code == EQ)
11071 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
11072 && integer_pow2p (TREE_OPERAND (arg0, 1))
11073 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
11075 tree inner = TREE_OPERAND (arg0, 0);
11076 HOST_WIDE_INT tem;
11077 int bitnum;
11078 int ops_unsignedp;
11080 tem = INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
11081 NULL_RTX, VOIDmode, 0));
11082 /* In this case, immed_double_const will sign extend the value to make
11083 it look the same on the host and target. We must remove the
11084 sign-extension before calling exact_log2, since exact_log2 will
11085 fail for negative values. */
11086 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT
11087 && BITS_PER_WORD == GET_MODE_BITSIZE (TYPE_MODE (type)))
11088 /* We don't use the obvious constant shift to generate the mask,
11089 because that generates compiler warnings when BITS_PER_WORD is
11090 greater than or equal to HOST_BITS_PER_WIDE_INT, even though this
11091 code is unreachable in that case. */
11092 tem = tem & GET_MODE_MASK (word_mode);
11093 bitnum = exact_log2 (tem);
11095 /* If INNER is a right shift of a constant and it plus BITNUM does
11096 not overflow, adjust BITNUM and INNER. */
11098 if (TREE_CODE (inner) == RSHIFT_EXPR
11099 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
11100 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
11101 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
11102 < TYPE_PRECISION (type)))
11104 bitnum +=TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
11105 inner = TREE_OPERAND (inner, 0);
11108 /* If we are going to be able to omit the AND below, we must do our
11109 operations as unsigned. If we must use the AND, we have a choice.
11110 Normally unsigned is faster, but for some machines signed is. */
11111 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
11112 #ifdef LOAD_EXTEND_OP
11113 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
11114 #else
11116 #endif
11119 if (subtarget == 0 || GET_CODE (subtarget) != REG
11120 || GET_MODE (subtarget) != operand_mode
11121 || ! safe_from_p (subtarget, inner))
11122 subtarget = 0;
11124 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
11126 if (bitnum != 0)
11127 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
11128 size_int (bitnum), subtarget, ops_unsignedp);
11130 if (GET_MODE (op0) != mode)
11131 op0 = convert_to_mode (mode, op0, ops_unsignedp);
11133 if ((code == EQ && ! invert) || (code == NE && invert))
11134 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
11135 ops_unsignedp, OPTAB_LIB_WIDEN);
11137 /* Put the AND last so it can combine with more things. */
11138 if (bitnum != TYPE_PRECISION (type) - 1)
11139 op0 = expand_and (op0, const1_rtx, subtarget);
11141 return op0;
11144 /* Now see if we are likely to be able to do this. Return if not. */
11145 if (! can_compare_p (operand_mode))
11146 return 0;
11147 icode = setcc_gen_code[(int) code];
11148 if (icode == CODE_FOR_nothing
11149 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
11151 /* We can only do this if it is one of the special cases that
11152 can be handled without an scc insn. */
11153 if ((code == LT && integer_zerop (arg1))
11154 || (! only_cheap && code == GE && integer_zerop (arg1)))
11156 else if (BRANCH_COST >= 0
11157 && ! only_cheap && (code == NE || code == EQ)
11158 && TREE_CODE (type) != REAL_TYPE
11159 && ((abs_optab->handlers[(int) operand_mode].insn_code
11160 != CODE_FOR_nothing)
11161 || (ffs_optab->handlers[(int) operand_mode].insn_code
11162 != CODE_FOR_nothing)))
11164 else
11165 return 0;
11168 preexpand_calls (exp);
11169 if (subtarget == 0 || GET_CODE (subtarget) != REG
11170 || GET_MODE (subtarget) != operand_mode
11171 || ! safe_from_p (subtarget, arg1))
11172 subtarget = 0;
11174 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
11175 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
11177 if (target == 0)
11178 target = gen_reg_rtx (mode);
11180 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
11181 because, if the emit_store_flag does anything it will succeed and
11182 OP0 and OP1 will not be used subsequently. */
11184 result = emit_store_flag (target, code,
11185 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
11186 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
11187 operand_mode, unsignedp, 1);
11189 if (result)
11191 if (invert)
11192 result = expand_binop (mode, xor_optab, result, const1_rtx,
11193 result, 0, OPTAB_LIB_WIDEN);
11194 return result;
11197 /* If this failed, we have to do this with set/compare/jump/set code. */
11198 if (GET_CODE (target) != REG
11199 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
11200 target = gen_reg_rtx (GET_MODE (target));
11202 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
11203 result = compare_from_rtx (op0, op1, code, unsignedp,
11204 operand_mode, NULL_RTX, 0);
11205 if (GET_CODE (result) == CONST_INT)
11206 return (((result == const0_rtx && ! invert)
11207 || (result != const0_rtx && invert))
11208 ? const0_rtx : const1_rtx);
11210 label = gen_label_rtx ();
11211 if (bcc_gen_fctn[(int) code] == 0)
11212 abort ();
11214 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
11215 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
11216 emit_label (label);
11218 return target;
11221 /* Generate a tablejump instruction (used for switch statements). */
11223 #ifdef HAVE_tablejump
11225 /* INDEX is the value being switched on, with the lowest value
11226 in the table already subtracted.
11227 MODE is its expected mode (needed if INDEX is constant).
11228 RANGE is the length of the jump table.
11229 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11231 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11232 index value is out of range. */
11234 void
11235 do_tablejump (index, mode, range, table_label, default_label)
11236 rtx index, range, table_label, default_label;
11237 enum machine_mode mode;
11239 register rtx temp, vector;
11241 /* Do an unsigned comparison (in the proper mode) between the index
11242 expression and the value which represents the length of the range.
11243 Since we just finished subtracting the lower bound of the range
11244 from the index expression, this comparison allows us to simultaneously
11245 check that the original index expression value is both greater than
11246 or equal to the minimum value of the range and less than or equal to
11247 the maximum value of the range. */
11249 emit_cmp_insn (index, range, GTU, NULL_RTX, mode, 1, 0);
11250 emit_jump_insn (gen_bgtu (default_label));
11252 /* If index is in range, it must fit in Pmode.
11253 Convert to Pmode so we can index with it. */
11254 if (mode != Pmode)
11255 index = convert_to_mode (Pmode, index, 1);
11257 /* Don't let a MEM slip thru, because then INDEX that comes
11258 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11259 and break_out_memory_refs will go to work on it and mess it up. */
11260 #ifdef PIC_CASE_VECTOR_ADDRESS
11261 if (flag_pic && GET_CODE (index) != REG)
11262 index = copy_to_mode_reg (Pmode, index);
11263 #endif
11265 /* If flag_force_addr were to affect this address
11266 it could interfere with the tricky assumptions made
11267 about addresses that contain label-refs,
11268 which may be valid only very near the tablejump itself. */
11269 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11270 GET_MODE_SIZE, because this indicates how large insns are. The other
11271 uses should all be Pmode, because they are addresses. This code
11272 could fail if addresses and insns are not the same size. */
11273 index = gen_rtx (PLUS, Pmode,
11274 gen_rtx (MULT, Pmode, index,
11275 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
11276 gen_rtx (LABEL_REF, Pmode, table_label));
11277 #ifdef PIC_CASE_VECTOR_ADDRESS
11278 if (flag_pic)
11279 index = PIC_CASE_VECTOR_ADDRESS (index);
11280 else
11281 #endif
11282 index = memory_address_noforce (CASE_VECTOR_MODE, index);
11283 temp = gen_reg_rtx (CASE_VECTOR_MODE);
11284 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
11285 RTX_UNCHANGING_P (vector) = 1;
11286 convert_move (temp, vector, 0);
11288 emit_jump_insn (gen_tablejump (temp, table_label));
11290 #ifndef CASE_VECTOR_PC_RELATIVE
11291 /* If we are generating PIC code or if the table is PC-relative, the
11292 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11293 if (! flag_pic)
11294 emit_barrier ();
11295 #endif
11298 #endif /* HAVE_tablejump */
11301 /* Emit a suitable bytecode to load a value from memory, assuming a pointer
11302 to that value is on the top of the stack. The resulting type is TYPE, and
11303 the source declaration is DECL. */
11305 void
11306 bc_load_memory (type, decl)
11307 tree type, decl;
11309 enum bytecode_opcode opcode;
11312 /* Bit fields are special. We only know about signed and
11313 unsigned ints, and enums. The latter are treated as
11314 signed integers. */
11316 if (DECL_BIT_FIELD (decl))
11317 if (TREE_CODE (type) == ENUMERAL_TYPE
11318 || TREE_CODE (type) == INTEGER_TYPE)
11319 opcode = TREE_UNSIGNED (type) ? zxloadBI : sxloadBI;
11320 else
11321 abort ();
11322 else
11323 /* See corresponding comment in bc_store_memory(). */
11324 if (TYPE_MODE (type) == BLKmode
11325 || TYPE_MODE (type) == VOIDmode)
11326 return;
11327 else
11328 opcode = mode_to_load_map [(int) TYPE_MODE (type)];
11330 if (opcode == neverneverland)
11331 abort ();
11333 bc_emit_bytecode (opcode);
11335 #ifdef DEBUG_PRINT_CODE
11336 fputc ('\n', stderr);
11337 #endif
11341 /* Store the contents of the second stack slot to the address in the
11342 top stack slot. DECL is the declaration of the destination and is used
11343 to determine whether we're dealing with a bitfield. */
11345 void
11346 bc_store_memory (type, decl)
11347 tree type, decl;
11349 enum bytecode_opcode opcode;
11352 if (DECL_BIT_FIELD (decl))
11354 if (TREE_CODE (type) == ENUMERAL_TYPE
11355 || TREE_CODE (type) == INTEGER_TYPE)
11356 opcode = sstoreBI;
11357 else
11358 abort ();
11360 else
11361 if (TYPE_MODE (type) == BLKmode)
11363 /* Copy structure. This expands to a block copy instruction, storeBLK.
11364 In addition to the arguments expected by the other store instructions,
11365 it also expects a type size (SImode) on top of the stack, which is the
11366 structure size in size units (usually bytes). The two first arguments
11367 are already on the stack; so we just put the size on level 1. For some
11368 other languages, the size may be variable, this is why we don't encode
11369 it as a storeBLK literal, but rather treat it as a full-fledged expression. */
11371 bc_expand_expr (TYPE_SIZE (type));
11372 opcode = storeBLK;
11374 else
11375 opcode = mode_to_store_map [(int) TYPE_MODE (type)];
11377 if (opcode == neverneverland)
11378 abort ();
11380 bc_emit_bytecode (opcode);
11382 #ifdef DEBUG_PRINT_CODE
11383 fputc ('\n', stderr);
11384 #endif
11388 /* Allocate local stack space sufficient to hold a value of the given
11389 SIZE at alignment boundary ALIGNMENT bits. ALIGNMENT must be an
11390 integral power of 2. A special case is locals of type VOID, which
11391 have size 0 and alignment 1 - any "voidish" SIZE or ALIGNMENT is
11392 remapped into the corresponding attribute of SI. */
11395 bc_allocate_local (size, alignment)
11396 int size, alignment;
11398 rtx retval;
11399 int byte_alignment;
11401 if (size < 0)
11402 abort ();
11404 /* Normalize size and alignment */
11405 if (!size)
11406 size = UNITS_PER_WORD;
11408 if (alignment < BITS_PER_UNIT)
11409 byte_alignment = 1 << (INT_ALIGN - 1);
11410 else
11411 /* Align */
11412 byte_alignment = alignment / BITS_PER_UNIT;
11414 if (local_vars_size & (byte_alignment - 1))
11415 local_vars_size += byte_alignment - (local_vars_size & (byte_alignment - 1));
11417 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
11418 local_vars_size += size;
11420 return retval;
11424 /* Allocate variable-sized local array. Variable-sized arrays are
11425 actually pointers to the address in memory where they are stored. */
11428 bc_allocate_variable_array (size)
11429 tree size;
11431 rtx retval;
11432 const int ptralign = (1 << (PTR_ALIGN - 1));
11434 /* Align pointer */
11435 if (local_vars_size & ptralign)
11436 local_vars_size += ptralign - (local_vars_size & ptralign);
11438 /* Note down local space needed: pointer to block; also return
11439 dummy rtx */
11441 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
11442 local_vars_size += POINTER_SIZE / BITS_PER_UNIT;
11443 return retval;
11447 /* Push the machine address for the given external variable offset. */
11449 void
11450 bc_load_externaddr (externaddr)
11451 rtx externaddr;
11453 bc_emit_bytecode (constP);
11454 bc_emit_code_labelref (BYTECODE_LABEL (externaddr),
11455 BYTECODE_BC_LABEL (externaddr)->offset);
11457 #ifdef DEBUG_PRINT_CODE
11458 fputc ('\n', stderr);
11459 #endif
11463 /* Like above, but expects an IDENTIFIER. */
11465 void
11466 bc_load_externaddr_id (id, offset)
11467 tree id;
11468 int offset;
11470 if (!IDENTIFIER_POINTER (id))
11471 abort ();
11473 bc_emit_bytecode (constP);
11474 bc_emit_code_labelref (xstrdup (IDENTIFIER_POINTER (id)), offset);
11476 #ifdef DEBUG_PRINT_CODE
11477 fputc ('\n', stderr);
11478 #endif
11482 /* Push the machine address for the given local variable offset. */
11484 void
11485 bc_load_localaddr (localaddr)
11486 rtx localaddr;
11488 bc_emit_instruction (localP, (HOST_WIDE_INT) BYTECODE_BC_LABEL (localaddr)->offset);
11492 /* Push the machine address for the given parameter offset.
11493 NOTE: offset is in bits. */
11495 void
11496 bc_load_parmaddr (parmaddr)
11497 rtx parmaddr;
11499 bc_emit_instruction (argP, ((HOST_WIDE_INT) BYTECODE_BC_LABEL (parmaddr)->offset
11500 / BITS_PER_UNIT));
11504 /* Convert a[i] into *(a + i). */
11506 tree
11507 bc_canonicalize_array_ref (exp)
11508 tree exp;
11510 tree type = TREE_TYPE (exp);
11511 tree array_adr = build1 (ADDR_EXPR, TYPE_POINTER_TO (type),
11512 TREE_OPERAND (exp, 0));
11513 tree index = TREE_OPERAND (exp, 1);
11516 /* Convert the integer argument to a type the same size as a pointer
11517 so the multiply won't overflow spuriously. */
11519 if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
11520 index = convert (type_for_size (POINTER_SIZE, 0), index);
11522 /* The array address isn't volatile even if the array is.
11523 (Of course this isn't terribly relevant since the bytecode
11524 translator treats nearly everything as volatile anyway.) */
11525 TREE_THIS_VOLATILE (array_adr) = 0;
11527 return build1 (INDIRECT_REF, type,
11528 fold (build (PLUS_EXPR,
11529 TYPE_POINTER_TO (type),
11530 array_adr,
11531 fold (build (MULT_EXPR,
11532 TYPE_POINTER_TO (type),
11533 index,
11534 size_in_bytes (type))))));
11538 /* Load the address of the component referenced by the given
11539 COMPONENT_REF expression.
11541 Returns innermost lvalue. */
11543 tree
11544 bc_expand_component_address (exp)
11545 tree exp;
11547 tree tem, chain;
11548 enum machine_mode mode;
11549 int bitpos = 0;
11550 HOST_WIDE_INT SIval;
11553 tem = TREE_OPERAND (exp, 1);
11554 mode = DECL_MODE (tem);
11557 /* Compute cumulative bit offset for nested component refs
11558 and array refs, and find the ultimate containing object. */
11560 for (tem = exp;; tem = TREE_OPERAND (tem, 0))
11562 if (TREE_CODE (tem) == COMPONENT_REF)
11563 bitpos += TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (tem, 1)));
11564 else
11565 if (TREE_CODE (tem) == ARRAY_REF
11566 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
11567 && TREE_CODE (TYPE_SIZE (TREE_TYPE (tem))) == INTEGER_CST)
11569 bitpos += (TREE_INT_CST_LOW (TREE_OPERAND (tem, 1))
11570 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem)))
11571 /* * TYPE_SIZE_UNIT (TREE_TYPE (tem)) */);
11572 else
11573 break;
11576 bc_expand_expr (tem);
11579 /* For bitfields also push their offset and size */
11580 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
11581 bc_push_offset_and_size (bitpos, /* DECL_SIZE_UNIT */ (TREE_OPERAND (exp, 1)));
11582 else
11583 if (SIval = bitpos / BITS_PER_UNIT)
11584 bc_emit_instruction (addconstPSI, SIval);
11586 return (TREE_OPERAND (exp, 1));
11590 /* Emit code to push two SI constants */
11592 void
11593 bc_push_offset_and_size (offset, size)
11594 HOST_WIDE_INT offset, size;
11596 bc_emit_instruction (constSI, offset);
11597 bc_emit_instruction (constSI, size);
11601 /* Emit byte code to push the address of the given lvalue expression to
11602 the stack. If it's a bit field, we also push offset and size info.
11604 Returns innermost component, which allows us to determine not only
11605 its type, but also whether it's a bitfield. */
11607 tree
11608 bc_expand_address (exp)
11609 tree exp;
11611 /* Safeguard */
11612 if (!exp || TREE_CODE (exp) == ERROR_MARK)
11613 return (exp);
11616 switch (TREE_CODE (exp))
11618 case ARRAY_REF:
11620 return (bc_expand_address (bc_canonicalize_array_ref (exp)));
11622 case COMPONENT_REF:
11624 return (bc_expand_component_address (exp));
11626 case INDIRECT_REF:
11628 bc_expand_expr (TREE_OPERAND (exp, 0));
11630 /* For variable-sized types: retrieve pointer. Sometimes the
11631 TYPE_SIZE tree is NULL. Is this a bug or a feature? Let's
11632 also make sure we have an operand, just in case... */
11634 if (TREE_OPERAND (exp, 0)
11635 && TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))
11636 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))) != INTEGER_CST)
11637 bc_emit_instruction (loadP);
11639 /* If packed, also return offset and size */
11640 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 0)))
11642 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (exp, 0))),
11643 TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (exp, 0))));
11645 return (TREE_OPERAND (exp, 0));
11647 case FUNCTION_DECL:
11649 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
11650 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
11651 break;
11653 case PARM_DECL:
11655 bc_load_parmaddr (DECL_RTL (exp));
11657 /* For variable-sized types: retrieve pointer */
11658 if (TYPE_SIZE (TREE_TYPE (exp))
11659 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
11660 bc_emit_instruction (loadP);
11662 /* If packed, also return offset and size */
11663 if (DECL_BIT_FIELD (exp))
11664 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
11665 TREE_INT_CST_LOW (DECL_SIZE (exp)));
11667 break;
11669 case RESULT_DECL:
11671 bc_emit_instruction (returnP);
11672 break;
11674 case VAR_DECL:
11676 #if 0
11677 if (BYTECODE_LABEL (DECL_RTL (exp)))
11678 bc_load_externaddr (DECL_RTL (exp));
11679 #endif
11681 if (DECL_EXTERNAL (exp))
11682 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
11683 (BYTECODE_BC_LABEL (DECL_RTL (exp)))->offset);
11684 else
11685 bc_load_localaddr (DECL_RTL (exp));
11687 /* For variable-sized types: retrieve pointer */
11688 if (TYPE_SIZE (TREE_TYPE (exp))
11689 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
11690 bc_emit_instruction (loadP);
11692 /* If packed, also return offset and size */
11693 if (DECL_BIT_FIELD (exp))
11694 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
11695 TREE_INT_CST_LOW (DECL_SIZE (exp)));
11697 break;
11699 case STRING_CST:
11701 rtx r;
11703 bc_emit_bytecode (constP);
11704 r = output_constant_def (exp);
11705 bc_emit_code_labelref (BYTECODE_LABEL (r), BYTECODE_BC_LABEL (r)->offset);
11707 #ifdef DEBUG_PRINT_CODE
11708 fputc ('\n', stderr);
11709 #endif
11711 break;
11713 default:
11715 abort();
11716 break;
11719 /* Most lvalues don't have components. */
11720 return (exp);
11724 /* Emit a type code to be used by the runtime support in handling
11725 parameter passing. The type code consists of the machine mode
11726 plus the minimal alignment shifted left 8 bits. */
11728 tree
11729 bc_runtime_type_code (type)
11730 tree type;
11732 int val;
11734 switch (TREE_CODE (type))
11736 case VOID_TYPE:
11737 case INTEGER_TYPE:
11738 case REAL_TYPE:
11739 case COMPLEX_TYPE:
11740 case ENUMERAL_TYPE:
11741 case POINTER_TYPE:
11742 case RECORD_TYPE:
11744 val = (int) TYPE_MODE (type) | TYPE_ALIGN (type) << 8;
11745 break;
11747 case ERROR_MARK:
11749 val = 0;
11750 break;
11752 default:
11754 abort ();
11756 return build_int_2 (val, 0);
11760 /* Generate constructor label */
11762 char *
11763 bc_gen_constr_label ()
11765 static int label_counter;
11766 static char label[20];
11768 sprintf (label, "*LR%d", label_counter++);
11770 return (obstack_copy0 (&permanent_obstack, label, strlen (label)));
11774 /* Evaluate constructor CONSTR and return pointer to it on level one. We
11775 expand the constructor data as static data, and push a pointer to it.
11776 The pointer is put in the pointer table and is retrieved by a constP
11777 bytecode instruction. We then loop and store each constructor member in
11778 the corresponding component. Finally, we return the original pointer on
11779 the stack. */
11781 void
11782 bc_expand_constructor (constr)
11783 tree constr;
11785 char *l;
11786 HOST_WIDE_INT ptroffs;
11787 rtx constr_rtx;
11790 /* Literal constructors are handled as constants, whereas
11791 non-literals are evaluated and stored element by element
11792 into the data segment. */
11794 /* Allocate space in proper segment and push pointer to space on stack.
11797 l = bc_gen_constr_label ();
11799 if (TREE_CONSTANT (constr))
11801 text_section ();
11803 bc_emit_const_labeldef (l);
11804 bc_output_constructor (constr, int_size_in_bytes (TREE_TYPE (constr)));
11806 else
11808 data_section ();
11810 bc_emit_data_labeldef (l);
11811 bc_output_data_constructor (constr);
11815 /* Add reference to pointer table and recall pointer to stack;
11816 this code is common for both types of constructors: literals
11817 and non-literals. */
11819 ptroffs = bc_define_pointer (l);
11820 bc_emit_instruction (constP, ptroffs);
11822 /* This is all that has to be done if it's a literal. */
11823 if (TREE_CONSTANT (constr))
11824 return;
11827 /* At this point, we have the pointer to the structure on top of the stack.
11828 Generate sequences of store_memory calls for the constructor. */
11830 /* constructor type is structure */
11831 if (TREE_CODE (TREE_TYPE (constr)) == RECORD_TYPE)
11833 register tree elt;
11835 /* If the constructor has fewer fields than the structure,
11836 clear the whole structure first. */
11838 if (list_length (CONSTRUCTOR_ELTS (constr))
11839 != list_length (TYPE_FIELDS (TREE_TYPE (constr))))
11841 bc_emit_instruction (duplicate);
11842 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
11843 bc_emit_instruction (clearBLK);
11846 /* Store each element of the constructor into the corresponding
11847 field of TARGET. */
11849 for (elt = CONSTRUCTOR_ELTS (constr); elt; elt = TREE_CHAIN (elt))
11851 register tree field = TREE_PURPOSE (elt);
11852 register enum machine_mode mode;
11853 int bitsize;
11854 int bitpos;
11855 int unsignedp;
11857 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field)) /* * DECL_SIZE_UNIT (field) */;
11858 mode = DECL_MODE (field);
11859 unsignedp = TREE_UNSIGNED (field);
11861 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
11863 bc_store_field (elt, bitsize, bitpos, mode, TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
11864 /* The alignment of TARGET is
11865 at least what its type requires. */
11866 VOIDmode, 0,
11867 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
11868 int_size_in_bytes (TREE_TYPE (constr)));
11871 else
11873 /* Constructor type is array */
11874 if (TREE_CODE (TREE_TYPE (constr)) == ARRAY_TYPE)
11876 register tree elt;
11877 register int i;
11878 tree domain = TYPE_DOMAIN (TREE_TYPE (constr));
11879 int minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
11880 int maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
11881 tree elttype = TREE_TYPE (TREE_TYPE (constr));
11883 /* If the constructor has fewer fields than the structure,
11884 clear the whole structure first. */
11886 if (list_length (CONSTRUCTOR_ELTS (constr)) < maxelt - minelt + 1)
11888 bc_emit_instruction (duplicate);
11889 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
11890 bc_emit_instruction (clearBLK);
11894 /* Store each element of the constructor into the corresponding
11895 element of TARGET, determined by counting the elements. */
11897 for (elt = CONSTRUCTOR_ELTS (constr), i = 0;
11898 elt;
11899 elt = TREE_CHAIN (elt), i++)
11901 register enum machine_mode mode;
11902 int bitsize;
11903 int bitpos;
11904 int unsignedp;
11906 mode = TYPE_MODE (elttype);
11907 bitsize = GET_MODE_BITSIZE (mode);
11908 unsignedp = TREE_UNSIGNED (elttype);
11910 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype))
11911 /* * TYPE_SIZE_UNIT (elttype) */ );
11913 bc_store_field (elt, bitsize, bitpos, mode,
11914 TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
11915 /* The alignment of TARGET is
11916 at least what its type requires. */
11917 VOIDmode, 0,
11918 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
11919 int_size_in_bytes (TREE_TYPE (constr)));
11926 /* Store the value of EXP (an expression tree) into member FIELD of
11927 structure at address on stack, which has type TYPE, mode MODE and
11928 occupies BITSIZE bits, starting BITPOS bits from the beginning of the
11929 structure.
11931 ALIGN is the alignment that TARGET is known to have, measured in bytes.
11932 TOTAL_SIZE is its size in bytes, or -1 if variable. */
11934 void
11935 bc_store_field (field, bitsize, bitpos, mode, exp, type,
11936 value_mode, unsignedp, align, total_size)
11937 int bitsize, bitpos;
11938 enum machine_mode mode;
11939 tree field, exp, type;
11940 enum machine_mode value_mode;
11941 int unsignedp;
11942 int align;
11943 int total_size;
11946 /* Expand expression and copy pointer */
11947 bc_expand_expr (exp);
11948 bc_emit_instruction (over);
11951 /* If the component is a bit field, we cannot use addressing to access
11952 it. Use bit-field techniques to store in it. */
11954 if (DECL_BIT_FIELD (field))
11956 bc_store_bit_field (bitpos, bitsize, unsignedp);
11957 return;
11959 else
11960 /* Not bit field */
11962 HOST_WIDE_INT offset = bitpos / BITS_PER_UNIT;
11964 /* Advance pointer to the desired member */
11965 if (offset)
11966 bc_emit_instruction (addconstPSI, offset);
11968 /* Store */
11969 bc_store_memory (type, field);
11974 /* Store SI/SU in bitfield */
11976 void
11977 bc_store_bit_field (offset, size, unsignedp)
11978 int offset, size, unsignedp;
11980 /* Push bitfield offset and size */
11981 bc_push_offset_and_size (offset, size);
11983 /* Store */
11984 bc_emit_instruction (sstoreBI);
11988 /* Load SI/SU from bitfield */
11990 void
11991 bc_load_bit_field (offset, size, unsignedp)
11992 int offset, size, unsignedp;
11994 /* Push bitfield offset and size */
11995 bc_push_offset_and_size (offset, size);
11997 /* Load: sign-extend if signed, else zero-extend */
11998 bc_emit_instruction (unsignedp ? zxloadBI : sxloadBI);
12002 /* Adjust interpreter stack by NLEVELS. Positive means drop NLEVELS
12003 (adjust stack pointer upwards), negative means add that number of
12004 levels (adjust the stack pointer downwards). Only positive values
12005 normally make sense. */
12007 void
12008 bc_adjust_stack (nlevels)
12009 int nlevels;
12011 switch (nlevels)
12013 case 0:
12014 break;
12016 case 2:
12017 bc_emit_instruction (drop);
12019 case 1:
12020 bc_emit_instruction (drop);
12021 break;
12023 default:
12025 bc_emit_instruction (adjstackSI, (HOST_WIDE_INT) nlevels);
12026 stack_depth -= nlevels;
12029 #if defined (VALIDATE_STACK_FOR_BC)
12030 VALIDATE_STACK_FOR_BC ();
12031 #endif