Add expand_complex_abs decl.
[official-gcc.git] / gcc / expr.c
blob8705e40538173ea175392cb6a74d17a6df9e545d
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
21 #include "config.h"
22 #include "rtl.h"
23 #include "tree.h"
24 #include "flags.h"
25 #include "function.h"
26 #include "insn-flags.h"
27 #include "insn-codes.h"
28 #include "expr.h"
29 #include "insn-config.h"
30 #include "recog.h"
31 #include "output.h"
32 #include "gvarargs.h"
33 #include "typeclass.h"
35 #define CEIL(x,y) (((x) + (y) - 1) / (y))
37 /* Decide whether a function's arguments should be processed
38 from first to last or from last to first. */
40 #ifdef STACK_GROWS_DOWNWARD
41 #ifdef PUSH_ROUNDING
42 #define PUSH_ARGS_REVERSED /* If it's last to first */
43 #endif
44 #endif
46 #ifndef STACK_PUSH_CODE
47 #ifdef STACK_GROWS_DOWNWARD
48 #define STACK_PUSH_CODE PRE_DEC
49 #else
50 #define STACK_PUSH_CODE PRE_INC
51 #endif
52 #endif
54 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
55 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
57 /* If this is nonzero, we do not bother generating VOLATILE
58 around volatile memory references, and we are willing to
59 output indirect addresses. If cse is to follow, we reject
60 indirect addresses so a useful potential cse is generated;
61 if it is used only once, instruction combination will produce
62 the same indirect address eventually. */
63 int cse_not_expected;
65 /* Nonzero to generate code for all the subroutines within an
66 expression before generating the upper levels of the expression.
67 Nowadays this is never zero. */
68 int do_preexpand_calls = 1;
70 /* Number of units that we should eventually pop off the stack.
71 These are the arguments to function calls that have already returned. */
72 int pending_stack_adjust;
74 /* Nonzero means stack pops must not be deferred, and deferred stack
75 pops must not be output. It is nonzero inside a function call,
76 inside a conditional expression, inside a statement expression,
77 and in other cases as well. */
78 int inhibit_defer_pop;
80 /* A list of all cleanups which belong to the arguments of
81 function calls being expanded by expand_call. */
82 tree cleanups_this_call;
84 /* Nonzero means __builtin_saveregs has already been done in this function.
85 The value is the pseudoreg containing the value __builtin_saveregs
86 returned. */
87 static rtx saveregs_value;
89 rtx store_expr ();
90 static void store_constructor ();
91 static rtx store_field ();
92 static rtx expand_builtin ();
93 static rtx compare ();
94 static rtx do_store_flag ();
95 static void preexpand_calls ();
96 static rtx expand_increment ();
97 static void init_queue ();
99 void do_pending_stack_adjust ();
100 static void do_jump_for_compare ();
101 static void do_jump_by_parts_equality ();
102 static void do_jump_by_parts_equality_rtx ();
103 static void do_jump_by_parts_greater ();
105 /* Record for each mode whether we can move a register directly to or
106 from an object of that mode in memory. If we can't, we won't try
107 to use that mode directly when accessing a field of that mode. */
109 static char direct_load[NUM_MACHINE_MODES];
110 static char direct_store[NUM_MACHINE_MODES];
112 /* MOVE_RATIO is the number of move instructions that is better than
113 a block move. */
115 #ifndef MOVE_RATIO
116 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
117 #define MOVE_RATIO 2
118 #else
119 /* A value of around 6 would minimize code size; infinity would minimize
120 execution time. */
121 #define MOVE_RATIO 15
122 #endif
123 #endif
125 /* This array records the insn_code of insns to perform block moves. */
126 static enum insn_code movstr_optab[NUM_MACHINE_MODES];
128 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
130 #ifndef SLOW_UNALIGNED_ACCESS
131 #define SLOW_UNALIGNED_ACCESS 0
132 #endif
134 /* This is run once per compilation to set up which modes can be used
135 directly in memory and to initialize the block move optab. */
137 void
138 init_expr_once ()
140 rtx insn, pat;
141 enum machine_mode mode;
142 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
144 start_sequence ();
145 insn = emit_insn (gen_rtx (SET, 0, 0));
146 pat = PATTERN (insn);
148 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
149 mode = (enum machine_mode) ((int) mode + 1))
151 int regno;
152 rtx reg;
153 int num_clobbers;
155 direct_load[(int) mode] = direct_store[(int) mode] = 0;
156 PUT_MODE (mem, mode);
158 /* See if there is some register that can be used in this mode and
159 directly loaded or stored from memory. */
161 if (mode != VOIDmode && mode != BLKmode)
162 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
163 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
164 regno++)
166 if (! HARD_REGNO_MODE_OK (regno, mode))
167 continue;
169 reg = gen_rtx (REG, mode, regno);
171 SET_SRC (pat) = mem;
172 SET_DEST (pat) = reg;
173 if (recog (pat, insn, &num_clobbers) >= 0)
174 direct_load[(int) mode] = 1;
176 SET_SRC (pat) = reg;
177 SET_DEST (pat) = mem;
178 if (recog (pat, insn, &num_clobbers) >= 0)
179 direct_store[(int) mode] = 1;
182 movstr_optab[(int) mode] = CODE_FOR_nothing;
185 end_sequence ();
187 #ifdef HAVE_movstrqi
188 if (HAVE_movstrqi)
189 movstr_optab[(int) QImode] = CODE_FOR_movstrqi;
190 #endif
191 #ifdef HAVE_movstrhi
192 if (HAVE_movstrhi)
193 movstr_optab[(int) HImode] = CODE_FOR_movstrhi;
194 #endif
195 #ifdef HAVE_movstrsi
196 if (HAVE_movstrsi)
197 movstr_optab[(int) SImode] = CODE_FOR_movstrsi;
198 #endif
199 #ifdef HAVE_movstrdi
200 if (HAVE_movstrdi)
201 movstr_optab[(int) DImode] = CODE_FOR_movstrdi;
202 #endif
203 #ifdef HAVE_movstrti
204 if (HAVE_movstrti)
205 movstr_optab[(int) TImode] = CODE_FOR_movstrti;
206 #endif
209 /* This is run at the start of compiling a function. */
211 void
212 init_expr ()
214 init_queue ();
216 pending_stack_adjust = 0;
217 inhibit_defer_pop = 0;
218 cleanups_this_call = 0;
219 saveregs_value = 0;
220 forced_labels = 0;
223 /* Save all variables describing the current status into the structure *P.
224 This is used before starting a nested function. */
226 void
227 save_expr_status (p)
228 struct function *p;
230 /* Instead of saving the postincrement queue, empty it. */
231 emit_queue ();
233 p->pending_stack_adjust = pending_stack_adjust;
234 p->inhibit_defer_pop = inhibit_defer_pop;
235 p->cleanups_this_call = cleanups_this_call;
236 p->saveregs_value = saveregs_value;
237 p->forced_labels = forced_labels;
239 pending_stack_adjust = 0;
240 inhibit_defer_pop = 0;
241 cleanups_this_call = 0;
242 saveregs_value = 0;
243 forced_labels = 0;
246 /* Restore all variables describing the current status from the structure *P.
247 This is used after a nested function. */
249 void
250 restore_expr_status (p)
251 struct function *p;
253 pending_stack_adjust = p->pending_stack_adjust;
254 inhibit_defer_pop = p->inhibit_defer_pop;
255 cleanups_this_call = p->cleanups_this_call;
256 saveregs_value = p->saveregs_value;
257 forced_labels = p->forced_labels;
260 /* Manage the queue of increment instructions to be output
261 for POSTINCREMENT_EXPR expressions, etc. */
263 static rtx pending_chain;
265 /* Queue up to increment (or change) VAR later. BODY says how:
266 BODY should be the same thing you would pass to emit_insn
267 to increment right away. It will go to emit_insn later on.
269 The value is a QUEUED expression to be used in place of VAR
270 where you want to guarantee the pre-incrementation value of VAR. */
272 static rtx
273 enqueue_insn (var, body)
274 rtx var, body;
276 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
277 var, NULL_RTX, NULL_RTX, body, pending_chain);
278 return pending_chain;
281 /* Use protect_from_queue to convert a QUEUED expression
282 into something that you can put immediately into an instruction.
283 If the queued incrementation has not happened yet,
284 protect_from_queue returns the variable itself.
285 If the incrementation has happened, protect_from_queue returns a temp
286 that contains a copy of the old value of the variable.
288 Any time an rtx which might possibly be a QUEUED is to be put
289 into an instruction, it must be passed through protect_from_queue first.
290 QUEUED expressions are not meaningful in instructions.
292 Do not pass a value through protect_from_queue and then hold
293 on to it for a while before putting it in an instruction!
294 If the queue is flushed in between, incorrect code will result. */
297 protect_from_queue (x, modify)
298 register rtx x;
299 int modify;
301 register RTX_CODE code = GET_CODE (x);
303 #if 0 /* A QUEUED can hang around after the queue is forced out. */
304 /* Shortcut for most common case. */
305 if (pending_chain == 0)
306 return x;
307 #endif
309 if (code != QUEUED)
311 /* A special hack for read access to (MEM (QUEUED ...))
312 to facilitate use of autoincrement.
313 Make a copy of the contents of the memory location
314 rather than a copy of the address, but not
315 if the value is of mode BLKmode. */
316 if (code == MEM && GET_MODE (x) != BLKmode
317 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
319 register rtx y = XEXP (x, 0);
320 XEXP (x, 0) = QUEUED_VAR (y);
321 if (QUEUED_INSN (y))
323 register rtx temp = gen_reg_rtx (GET_MODE (x));
324 emit_insn_before (gen_move_insn (temp, x),
325 QUEUED_INSN (y));
326 return temp;
328 return x;
330 /* Otherwise, recursively protect the subexpressions of all
331 the kinds of rtx's that can contain a QUEUED. */
332 if (code == MEM)
333 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
334 else if (code == PLUS || code == MULT)
336 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
337 XEXP (x, 1) = protect_from_queue (XEXP (x, 1), 0);
339 return x;
341 /* If the increment has not happened, use the variable itself. */
342 if (QUEUED_INSN (x) == 0)
343 return QUEUED_VAR (x);
344 /* If the increment has happened and a pre-increment copy exists,
345 use that copy. */
346 if (QUEUED_COPY (x) != 0)
347 return QUEUED_COPY (x);
348 /* The increment has happened but we haven't set up a pre-increment copy.
349 Set one up now, and use it. */
350 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
351 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
352 QUEUED_INSN (x));
353 return QUEUED_COPY (x);
356 /* Return nonzero if X contains a QUEUED expression:
357 if it contains anything that will be altered by a queued increment.
358 We handle only combinations of MEM, PLUS, MINUS and MULT operators
359 since memory addresses generally contain only those. */
361 static int
362 queued_subexp_p (x)
363 rtx x;
365 register enum rtx_code code = GET_CODE (x);
366 switch (code)
368 case QUEUED:
369 return 1;
370 case MEM:
371 return queued_subexp_p (XEXP (x, 0));
372 case MULT:
373 case PLUS:
374 case MINUS:
375 return queued_subexp_p (XEXP (x, 0))
376 || queued_subexp_p (XEXP (x, 1));
378 return 0;
381 /* Perform all the pending incrementations. */
383 void
384 emit_queue ()
386 register rtx p;
387 while (p = pending_chain)
389 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
390 pending_chain = QUEUED_NEXT (p);
394 static void
395 init_queue ()
397 if (pending_chain)
398 abort ();
401 /* Copy data from FROM to TO, where the machine modes are not the same.
402 Both modes may be integer, or both may be floating.
403 UNSIGNEDP should be nonzero if FROM is an unsigned type.
404 This causes zero-extension instead of sign-extension. */
406 void
407 convert_move (to, from, unsignedp)
408 register rtx to, from;
409 int unsignedp;
411 enum machine_mode to_mode = GET_MODE (to);
412 enum machine_mode from_mode = GET_MODE (from);
413 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
414 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
415 enum insn_code code;
416 rtx libcall;
418 /* rtx code for making an equivalent value. */
419 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
421 to = protect_from_queue (to, 1);
422 from = protect_from_queue (from, 0);
424 if (to_real != from_real)
425 abort ();
427 /* If FROM is a SUBREG that indicates that we have already done at least
428 the required extension, strip it. We don't handle such SUBREGs as
429 TO here. */
431 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
432 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
433 >= GET_MODE_SIZE (to_mode))
434 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
435 from = gen_lowpart (to_mode, from), from_mode = to_mode;
437 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
438 abort ();
440 if (to_mode == from_mode
441 || (from_mode == VOIDmode && CONSTANT_P (from)))
443 emit_move_insn (to, from);
444 return;
447 if (to_real)
449 #ifdef HAVE_extendsfdf2
450 if (HAVE_extendsfdf2 && from_mode == SFmode && to_mode == DFmode)
452 emit_unop_insn (CODE_FOR_extendsfdf2, to, from, UNKNOWN);
453 return;
455 #endif
456 #ifdef HAVE_extendsfxf2
457 if (HAVE_extendsfxf2 && from_mode == SFmode && to_mode == XFmode)
459 emit_unop_insn (CODE_FOR_extendsfxf2, to, from, UNKNOWN);
460 return;
462 #endif
463 #ifdef HAVE_extendsftf2
464 if (HAVE_extendsftf2 && from_mode == SFmode && to_mode == TFmode)
466 emit_unop_insn (CODE_FOR_extendsftf2, to, from, UNKNOWN);
467 return;
469 #endif
470 #ifdef HAVE_extenddfxf2
471 if (HAVE_extenddfxf2 && from_mode == DFmode && to_mode == XFmode)
473 emit_unop_insn (CODE_FOR_extenddfxf2, to, from, UNKNOWN);
474 return;
476 #endif
477 #ifdef HAVE_extenddftf2
478 if (HAVE_extenddftf2 && from_mode == DFmode && to_mode == TFmode)
480 emit_unop_insn (CODE_FOR_extenddftf2, to, from, UNKNOWN);
481 return;
483 #endif
484 #ifdef HAVE_truncdfsf2
485 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
487 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
488 return;
490 #endif
491 #ifdef HAVE_truncxfsf2
492 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
494 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
495 return;
497 #endif
498 #ifdef HAVE_trunctfsf2
499 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
501 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
502 return;
504 #endif
505 #ifdef HAVE_truncxfdf2
506 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
508 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
509 return;
511 #endif
512 #ifdef HAVE_trunctfdf2
513 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
515 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
516 return;
518 #endif
520 libcall = (rtx) 0;
521 switch (from_mode)
523 case SFmode:
524 switch (to_mode)
526 case DFmode:
527 libcall = extendsfdf2_libfunc;
528 break;
530 case XFmode:
531 libcall = extendsfxf2_libfunc;
532 break;
534 case TFmode:
535 libcall = extendsftf2_libfunc;
536 break;
538 break;
540 case DFmode:
541 switch (to_mode)
543 case SFmode:
544 libcall = truncdfsf2_libfunc;
545 break;
547 case XFmode:
548 libcall = extenddfxf2_libfunc;
549 break;
551 case TFmode:
552 libcall = extenddftf2_libfunc;
553 break;
555 break;
557 case XFmode:
558 switch (to_mode)
560 case SFmode:
561 libcall = truncxfsf2_libfunc;
562 break;
564 case DFmode:
565 libcall = truncxfdf2_libfunc;
566 break;
568 break;
570 case TFmode:
571 switch (to_mode)
573 case SFmode:
574 libcall = trunctfsf2_libfunc;
575 break;
577 case DFmode:
578 libcall = trunctfdf2_libfunc;
579 break;
581 break;
584 if (libcall == (rtx) 0)
585 /* This conversion is not implemented yet. */
586 abort ();
588 emit_library_call (libcall, 1, to_mode, 1, from, from_mode);
589 emit_move_insn (to, hard_libcall_value (to_mode));
590 return;
593 /* Now both modes are integers. */
595 /* Handle expanding beyond a word. */
596 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
597 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
599 rtx insns;
600 rtx lowpart;
601 rtx fill_value;
602 rtx lowfrom;
603 int i;
604 enum machine_mode lowpart_mode;
605 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
607 /* Try converting directly if the insn is supported. */
608 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
609 != CODE_FOR_nothing)
611 /* If FROM is a SUBREG, put it into a register. Do this
612 so that we always generate the same set of insns for
613 better cse'ing; if an intermediate assignment occurred,
614 we won't be doing the operation directly on the SUBREG. */
615 if (optimize > 0 && GET_CODE (from) == SUBREG)
616 from = force_reg (from_mode, from);
617 emit_unop_insn (code, to, from, equiv_code);
618 return;
620 /* Next, try converting via full word. */
621 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
622 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
623 != CODE_FOR_nothing))
625 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
626 emit_unop_insn (code, to,
627 gen_lowpart (word_mode, to), equiv_code);
628 return;
631 /* No special multiword conversion insn; do it by hand. */
632 start_sequence ();
634 /* Get a copy of FROM widened to a word, if necessary. */
635 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
636 lowpart_mode = word_mode;
637 else
638 lowpart_mode = from_mode;
640 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
642 lowpart = gen_lowpart (lowpart_mode, to);
643 emit_move_insn (lowpart, lowfrom);
645 /* Compute the value to put in each remaining word. */
646 if (unsignedp)
647 fill_value = const0_rtx;
648 else
650 #ifdef HAVE_slt
651 if (HAVE_slt
652 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
653 && STORE_FLAG_VALUE == -1)
655 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
656 lowpart_mode, 0, 0);
657 fill_value = gen_reg_rtx (word_mode);
658 emit_insn (gen_slt (fill_value));
660 else
661 #endif
663 fill_value
664 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
665 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
666 NULL_RTX, 0);
667 fill_value = convert_to_mode (word_mode, fill_value, 1);
671 /* Fill the remaining words. */
672 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
674 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
675 rtx subword = operand_subword (to, index, 1, to_mode);
677 if (subword == 0)
678 abort ();
680 if (fill_value != subword)
681 emit_move_insn (subword, fill_value);
684 insns = get_insns ();
685 end_sequence ();
687 emit_no_conflict_block (insns, to, from, NULL_RTX,
688 gen_rtx (equiv_code, to_mode, from));
689 return;
692 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD)
694 convert_move (to, gen_lowpart (word_mode, from), 0);
695 return;
698 /* Handle pointer conversion */ /* SPEE 900220 */
699 if (to_mode == PSImode)
701 if (from_mode != SImode)
702 from = convert_to_mode (SImode, from, unsignedp);
704 #ifdef HAVE_truncsipsi
705 if (HAVE_truncsipsi)
707 emit_unop_insn (CODE_FOR_truncsipsi, to, from, UNKNOWN);
708 return;
710 #endif /* HAVE_truncsipsi */
711 abort ();
714 if (from_mode == PSImode)
716 if (to_mode != SImode)
718 from = convert_to_mode (SImode, from, unsignedp);
719 from_mode = SImode;
721 else
723 #ifdef HAVE_extendpsisi
724 if (HAVE_extendpsisi)
726 emit_unop_insn (CODE_FOR_extendpsisi, to, from, UNKNOWN);
727 return;
729 #endif /* HAVE_extendpsisi */
730 abort ();
734 /* Now follow all the conversions between integers
735 no more than a word long. */
737 /* For truncation, usually we can just refer to FROM in a narrower mode. */
738 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
739 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
740 GET_MODE_BITSIZE (from_mode))
741 && ((GET_CODE (from) == MEM
742 && ! MEM_VOLATILE_P (from)
743 && direct_load[(int) to_mode]
744 && ! mode_dependent_address_p (XEXP (from, 0)))
745 || GET_CODE (from) == REG
746 || GET_CODE (from) == SUBREG))
748 emit_move_insn (to, gen_lowpart (to_mode, from));
749 return;
752 /* For truncation, usually we can just refer to FROM in a narrower mode. */
753 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
755 /* Convert directly if that works. */
756 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
757 != CODE_FOR_nothing)
759 /* If FROM is a SUBREG, put it into a register. Do this
760 so that we always generate the same set of insns for
761 better cse'ing; if an intermediate assignment occurred,
762 we won't be doing the operation directly on the SUBREG. */
763 if (optimize > 0 && GET_CODE (from) == SUBREG)
764 from = force_reg (from_mode, from);
765 emit_unop_insn (code, to, from, equiv_code);
766 return;
768 else
770 enum machine_mode intermediate;
772 /* Search for a mode to convert via. */
773 for (intermediate = from_mode; intermediate != VOIDmode;
774 intermediate = GET_MODE_WIDER_MODE (intermediate))
775 if ((can_extend_p (to_mode, intermediate, unsignedp)
776 != CODE_FOR_nothing)
777 && (can_extend_p (intermediate, from_mode, unsignedp)
778 != CODE_FOR_nothing))
780 convert_move (to, convert_to_mode (intermediate, from,
781 unsignedp), unsignedp);
782 return;
785 /* No suitable intermediate mode. */
786 abort ();
790 /* Support special truncate insns for certain modes. */
792 if (from_mode == DImode && to_mode == SImode)
794 #ifdef HAVE_truncdisi2
795 if (HAVE_truncdisi2)
797 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
798 return;
800 #endif
801 convert_move (to, force_reg (from_mode, from), unsignedp);
802 return;
805 if (from_mode == DImode && to_mode == HImode)
807 #ifdef HAVE_truncdihi2
808 if (HAVE_truncdihi2)
810 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
811 return;
813 #endif
814 convert_move (to, force_reg (from_mode, from), unsignedp);
815 return;
818 if (from_mode == DImode && to_mode == QImode)
820 #ifdef HAVE_truncdiqi2
821 if (HAVE_truncdiqi2)
823 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
824 return;
826 #endif
827 convert_move (to, force_reg (from_mode, from), unsignedp);
828 return;
831 if (from_mode == SImode && to_mode == HImode)
833 #ifdef HAVE_truncsihi2
834 if (HAVE_truncsihi2)
836 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
837 return;
839 #endif
840 convert_move (to, force_reg (from_mode, from), unsignedp);
841 return;
844 if (from_mode == SImode && to_mode == QImode)
846 #ifdef HAVE_truncsiqi2
847 if (HAVE_truncsiqi2)
849 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
850 return;
852 #endif
853 convert_move (to, force_reg (from_mode, from), unsignedp);
854 return;
857 if (from_mode == HImode && to_mode == QImode)
859 #ifdef HAVE_trunchiqi2
860 if (HAVE_trunchiqi2)
862 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
863 return;
865 #endif
866 convert_move (to, force_reg (from_mode, from), unsignedp);
867 return;
870 /* Handle truncation of volatile memrefs, and so on;
871 the things that couldn't be truncated directly,
872 and for which there was no special instruction. */
873 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
875 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
876 emit_move_insn (to, temp);
877 return;
880 /* Mode combination is not recognized. */
881 abort ();
884 /* Return an rtx for a value that would result
885 from converting X to mode MODE.
886 Both X and MODE may be floating, or both integer.
887 UNSIGNEDP is nonzero if X is an unsigned value.
888 This can be done by referring to a part of X in place
889 or by copying to a new temporary with conversion.
891 This function *must not* call protect_from_queue
892 except when putting X into an insn (in which case convert_move does it). */
895 convert_to_mode (mode, x, unsignedp)
896 enum machine_mode mode;
897 rtx x;
898 int unsignedp;
900 register rtx temp;
902 /* If FROM is a SUBREG that indicates that we have already done at least
903 the required extension, strip it. */
905 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
906 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
907 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
908 x = gen_lowpart (mode, x);
910 if (mode == GET_MODE (x))
911 return x;
913 /* There is one case that we must handle specially: If we are converting
914 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
915 we are to interpret the constant as unsigned, gen_lowpart will do
916 the wrong if the constant appears negative. What we want to do is
917 make the high-order word of the constant zero, not all ones. */
919 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
920 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
921 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
922 return immed_double_const (INTVAL (x), (HOST_WIDE_INT) 0, mode);
924 /* We can do this with a gen_lowpart if both desired and current modes
925 are integer, and this is either a constant integer, a register, or a
926 non-volatile MEM. Except for the constant case, we must be narrowing
927 the operand. */
929 if (GET_CODE (x) == CONST_INT
930 || (GET_MODE_CLASS (mode) == MODE_INT
931 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
932 && (GET_CODE (x) == CONST_DOUBLE
933 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (GET_MODE (x))
934 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x))
935 && direct_load[(int) mode]
936 || GET_CODE (x) == REG)))))
937 return gen_lowpart (mode, x);
939 temp = gen_reg_rtx (mode);
940 convert_move (temp, x, unsignedp);
941 return temp;
944 /* Generate several move instructions to copy LEN bytes
945 from block FROM to block TO. (These are MEM rtx's with BLKmode).
946 The caller must pass FROM and TO
947 through protect_from_queue before calling.
948 ALIGN (in bytes) is maximum alignment we can assume. */
950 struct move_by_pieces
952 rtx to;
953 rtx to_addr;
954 int autinc_to;
955 int explicit_inc_to;
956 rtx from;
957 rtx from_addr;
958 int autinc_from;
959 int explicit_inc_from;
960 int len;
961 int offset;
962 int reverse;
965 static void move_by_pieces_1 ();
966 static int move_by_pieces_ninsns ();
968 static void
969 move_by_pieces (to, from, len, align)
970 rtx to, from;
971 int len, align;
973 struct move_by_pieces data;
974 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
975 int max_size = MOVE_MAX + 1;
977 data.offset = 0;
978 data.to_addr = to_addr;
979 data.from_addr = from_addr;
980 data.to = to;
981 data.from = from;
982 data.autinc_to
983 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
984 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
985 data.autinc_from
986 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
987 || GET_CODE (from_addr) == POST_INC
988 || GET_CODE (from_addr) == POST_DEC);
990 data.explicit_inc_from = 0;
991 data.explicit_inc_to = 0;
992 data.reverse
993 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
994 if (data.reverse) data.offset = len;
995 data.len = len;
997 /* If copying requires more than two move insns,
998 copy addresses to registers (to make displacements shorter)
999 and use post-increment if available. */
1000 if (!(data.autinc_from && data.autinc_to)
1001 && move_by_pieces_ninsns (len, align) > 2)
1003 #ifdef HAVE_PRE_DECREMENT
1004 if (data.reverse && ! data.autinc_from)
1006 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1007 data.autinc_from = 1;
1008 data.explicit_inc_from = -1;
1010 #endif
1011 #ifdef HAVE_POST_INCREMENT
1012 if (! data.autinc_from)
1014 data.from_addr = copy_addr_to_reg (from_addr);
1015 data.autinc_from = 1;
1016 data.explicit_inc_from = 1;
1018 #endif
1019 if (!data.autinc_from && CONSTANT_P (from_addr))
1020 data.from_addr = copy_addr_to_reg (from_addr);
1021 #ifdef HAVE_PRE_DECREMENT
1022 if (data.reverse && ! data.autinc_to)
1024 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1025 data.autinc_to = 1;
1026 data.explicit_inc_to = -1;
1028 #endif
1029 #ifdef HAVE_POST_INCREMENT
1030 if (! data.reverse && ! data.autinc_to)
1032 data.to_addr = copy_addr_to_reg (to_addr);
1033 data.autinc_to = 1;
1034 data.explicit_inc_to = 1;
1036 #endif
1037 if (!data.autinc_to && CONSTANT_P (to_addr))
1038 data.to_addr = copy_addr_to_reg (to_addr);
1041 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1042 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1043 align = MOVE_MAX;
1045 /* First move what we can in the largest integer mode, then go to
1046 successively smaller modes. */
1048 while (max_size > 1)
1050 enum machine_mode mode = VOIDmode, tmode;
1051 enum insn_code icode;
1053 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1054 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1055 if (GET_MODE_SIZE (tmode) < max_size)
1056 mode = tmode;
1058 if (mode == VOIDmode)
1059 break;
1061 icode = mov_optab->handlers[(int) mode].insn_code;
1062 if (icode != CODE_FOR_nothing
1063 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1064 GET_MODE_SIZE (mode)))
1065 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1067 max_size = GET_MODE_SIZE (mode);
1070 /* The code above should have handled everything. */
1071 if (data.len != 0)
1072 abort ();
1075 /* Return number of insns required to move L bytes by pieces.
1076 ALIGN (in bytes) is maximum alignment we can assume. */
1078 static int
1079 move_by_pieces_ninsns (l, align)
1080 unsigned int l;
1081 int align;
1083 register int n_insns = 0;
1084 int max_size = MOVE_MAX + 1;
1086 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1087 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1088 align = MOVE_MAX;
1090 while (max_size > 1)
1092 enum machine_mode mode = VOIDmode, tmode;
1093 enum insn_code icode;
1095 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1096 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1097 if (GET_MODE_SIZE (tmode) < max_size)
1098 mode = tmode;
1100 if (mode == VOIDmode)
1101 break;
1103 icode = mov_optab->handlers[(int) mode].insn_code;
1104 if (icode != CODE_FOR_nothing
1105 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1106 GET_MODE_SIZE (mode)))
1107 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1109 max_size = GET_MODE_SIZE (mode);
1112 return n_insns;
1115 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1116 with move instructions for mode MODE. GENFUN is the gen_... function
1117 to make a move insn for that mode. DATA has all the other info. */
1119 static void
1120 move_by_pieces_1 (genfun, mode, data)
1121 rtx (*genfun) ();
1122 enum machine_mode mode;
1123 struct move_by_pieces *data;
1125 register int size = GET_MODE_SIZE (mode);
1126 register rtx to1, from1;
1128 while (data->len >= size)
1130 if (data->reverse) data->offset -= size;
1132 to1 = (data->autinc_to
1133 ? gen_rtx (MEM, mode, data->to_addr)
1134 : change_address (data->to, mode,
1135 plus_constant (data->to_addr, data->offset)));
1136 from1 =
1137 (data->autinc_from
1138 ? gen_rtx (MEM, mode, data->from_addr)
1139 : change_address (data->from, mode,
1140 plus_constant (data->from_addr, data->offset)));
1142 #ifdef HAVE_PRE_DECREMENT
1143 if (data->explicit_inc_to < 0)
1144 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1145 if (data->explicit_inc_from < 0)
1146 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1147 #endif
1149 emit_insn ((*genfun) (to1, from1));
1150 #ifdef HAVE_POST_INCREMENT
1151 if (data->explicit_inc_to > 0)
1152 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1153 if (data->explicit_inc_from > 0)
1154 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1155 #endif
1157 if (! data->reverse) data->offset += size;
1159 data->len -= size;
1163 /* Emit code to move a block Y to a block X.
1164 This may be done with string-move instructions,
1165 with multiple scalar move instructions, or with a library call.
1167 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1168 with mode BLKmode.
1169 SIZE is an rtx that says how long they are.
1170 ALIGN is the maximum alignment we can assume they have,
1171 measured in bytes. */
1173 void
1174 emit_block_move (x, y, size, align)
1175 rtx x, y;
1176 rtx size;
1177 int align;
1179 if (GET_MODE (x) != BLKmode)
1180 abort ();
1182 if (GET_MODE (y) != BLKmode)
1183 abort ();
1185 x = protect_from_queue (x, 1);
1186 y = protect_from_queue (y, 0);
1187 size = protect_from_queue (size, 0);
1189 if (GET_CODE (x) != MEM)
1190 abort ();
1191 if (GET_CODE (y) != MEM)
1192 abort ();
1193 if (size == 0)
1194 abort ();
1196 if (GET_CODE (size) == CONST_INT
1197 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
1198 move_by_pieces (x, y, INTVAL (size), align);
1199 else
1201 /* Try the most limited insn first, because there's no point
1202 including more than one in the machine description unless
1203 the more limited one has some advantage. */
1205 rtx opalign = GEN_INT (align);
1206 enum machine_mode mode;
1208 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1209 mode = GET_MODE_WIDER_MODE (mode))
1211 enum insn_code code = movstr_optab[(int) mode];
1213 if (code != CODE_FOR_nothing
1214 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1215 here because if SIZE is less than the mode mask, as it is
1216 returned by the macro, it will definately be less than the
1217 actual mode mask. */
1218 && (unsigned) INTVAL (size) <= GET_MODE_MASK (mode)
1219 && (insn_operand_predicate[(int) code][0] == 0
1220 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1221 && (insn_operand_predicate[(int) code][1] == 0
1222 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1223 && (insn_operand_predicate[(int) code][3] == 0
1224 || (*insn_operand_predicate[(int) code][3]) (opalign,
1225 VOIDmode)))
1227 rtx op2;
1228 rtx last = get_last_insn ();
1229 rtx pat;
1231 op2 = convert_to_mode (mode, size, 1);
1232 if (insn_operand_predicate[(int) code][2] != 0
1233 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1234 op2 = copy_to_mode_reg (mode, op2);
1236 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1237 if (pat)
1239 emit_insn (pat);
1240 return;
1242 else
1243 delete_insns_since (last);
1247 #ifdef TARGET_MEM_FUNCTIONS
1248 emit_library_call (memcpy_libfunc, 0,
1249 VOIDmode, 3, XEXP (x, 0), Pmode,
1250 XEXP (y, 0), Pmode,
1251 convert_to_mode (Pmode, size, 1), Pmode);
1252 #else
1253 emit_library_call (bcopy_libfunc, 0,
1254 VOIDmode, 3, XEXP (y, 0), Pmode,
1255 XEXP (x, 0), Pmode,
1256 convert_to_mode (Pmode, size, 1), Pmode);
1257 #endif
1261 /* Copy all or part of a value X into registers starting at REGNO.
1262 The number of registers to be filled is NREGS. */
1264 void
1265 move_block_to_reg (regno, x, nregs, mode)
1266 int regno;
1267 rtx x;
1268 int nregs;
1269 enum machine_mode mode;
1271 int i;
1272 rtx pat, last;
1274 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1275 x = validize_mem (force_const_mem (mode, x));
1277 /* See if the machine can do this with a load multiple insn. */
1278 #ifdef HAVE_load_multiple
1279 last = get_last_insn ();
1280 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
1281 GEN_INT (nregs));
1282 if (pat)
1284 emit_insn (pat);
1285 return;
1287 else
1288 delete_insns_since (last);
1289 #endif
1291 for (i = 0; i < nregs; i++)
1292 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1293 operand_subword_force (x, i, mode));
1296 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1297 The number of registers to be filled is NREGS. */
1299 void
1300 move_block_from_reg (regno, x, nregs)
1301 int regno;
1302 rtx x;
1303 int nregs;
1305 int i;
1306 rtx pat, last;
1308 /* See if the machine can do this with a store multiple insn. */
1309 #ifdef HAVE_store_multiple
1310 last = get_last_insn ();
1311 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
1312 GEN_INT (nregs));
1313 if (pat)
1315 emit_insn (pat);
1316 return;
1318 else
1319 delete_insns_since (last);
1320 #endif
1322 for (i = 0; i < nregs; i++)
1324 rtx tem = operand_subword (x, i, 1, BLKmode);
1326 if (tem == 0)
1327 abort ();
1329 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1333 /* Mark NREGS consecutive regs, starting at REGNO, as being live now. */
1335 void
1336 use_regs (regno, nregs)
1337 int regno;
1338 int nregs;
1340 int i;
1342 for (i = 0; i < nregs; i++)
1343 emit_insn (gen_rtx (USE, VOIDmode, gen_rtx (REG, word_mode, regno + i)));
1346 /* Mark the instructions since PREV as a libcall block.
1347 Add REG_LIBCALL to PREV and add a REG_RETVAL to the most recent insn. */
1349 static rtx
1350 group_insns (prev)
1351 rtx prev;
1353 rtx insn_first;
1354 rtx insn_last;
1356 /* Find the instructions to mark */
1357 if (prev)
1358 insn_first = NEXT_INSN (prev);
1359 else
1360 insn_first = get_insns ();
1362 insn_last = get_last_insn ();
1364 REG_NOTES (insn_last) = gen_rtx (INSN_LIST, REG_RETVAL, insn_first,
1365 REG_NOTES (insn_last));
1367 REG_NOTES (insn_first) = gen_rtx (INSN_LIST, REG_LIBCALL, insn_last,
1368 REG_NOTES (insn_first));
1371 /* Write zeros through the storage of OBJECT.
1372 If OBJECT has BLKmode, SIZE is its length in bytes. */
1374 void
1375 clear_storage (object, size)
1376 rtx object;
1377 int size;
1379 if (GET_MODE (object) == BLKmode)
1381 #ifdef TARGET_MEM_FUNCTIONS
1382 emit_library_call (memset_libfunc, 0,
1383 VOIDmode, 3,
1384 XEXP (object, 0), Pmode, const0_rtx, Pmode,
1385 GEN_INT (size), Pmode);
1386 #else
1387 emit_library_call (bzero_libfunc, 0,
1388 VOIDmode, 2,
1389 XEXP (object, 0), Pmode,
1390 GEN_INT (size), Pmode);
1391 #endif
1393 else
1394 emit_move_insn (object, const0_rtx);
1397 /* Generate code to copy Y into X.
1398 Both Y and X must have the same mode, except that
1399 Y can be a constant with VOIDmode.
1400 This mode cannot be BLKmode; use emit_block_move for that.
1402 Return the last instruction emitted. */
1405 emit_move_insn (x, y)
1406 rtx x, y;
1408 enum machine_mode mode = GET_MODE (x);
1409 enum machine_mode submode;
1410 enum mode_class class = GET_MODE_CLASS (mode);
1411 int i;
1413 x = protect_from_queue (x, 1);
1414 y = protect_from_queue (y, 0);
1416 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
1417 abort ();
1419 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
1420 y = force_const_mem (mode, y);
1422 /* If X or Y are memory references, verify that their addresses are valid
1423 for the machine. */
1424 if (GET_CODE (x) == MEM
1425 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
1426 && ! push_operand (x, GET_MODE (x)))
1427 || (flag_force_addr
1428 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
1429 x = change_address (x, VOIDmode, XEXP (x, 0));
1431 if (GET_CODE (y) == MEM
1432 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
1433 || (flag_force_addr
1434 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
1435 y = change_address (y, VOIDmode, XEXP (y, 0));
1437 if (mode == BLKmode)
1438 abort ();
1440 if (class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1441 submode = mode_for_size (GET_MODE_UNIT_SIZE (mode) * BITS_PER_UNIT,
1442 (class == MODE_COMPLEX_INT
1443 ? MODE_INT : MODE_FLOAT),
1446 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1447 return
1448 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
1450 /* Expand complex moves by moving real part and imag part, if posible. */
1451 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1452 && submode != BLKmode
1453 && (mov_optab->handlers[(int) submode].insn_code
1454 != CODE_FOR_nothing))
1456 /* Don't split destination if it is a stack push. */
1457 int stack = push_operand (x, GET_MODE (x));
1458 rtx prev = get_last_insn ();
1460 /* Tell flow that the whole of the destination is being set. */
1461 if (GET_CODE (x) == REG)
1462 emit_insn (gen_rtx (CLOBBER, VOIDmode, x));
1464 /* If this is a stack, push the highpart first, so it
1465 will be in the argument order.
1467 In that case, change_address is used only to convert
1468 the mode, not to change the address. */
1469 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1470 ((stack ? change_address (x, submode, (rtx) 0)
1471 : gen_highpart (submode, x)),
1472 gen_highpart (submode, y)));
1473 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1474 ((stack ? change_address (x, submode, (rtx) 0)
1475 : gen_lowpart (submode, x)),
1476 gen_lowpart (submode, y)));
1478 group_insns (prev);
1481 /* This will handle any multi-word mode that lacks a move_insn pattern.
1482 However, you will get better code if you define such patterns,
1483 even if they must turn into multiple assembler instructions. */
1484 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
1486 rtx last_insn = 0;
1487 rtx prev_insn = get_last_insn ();
1489 for (i = 0;
1490 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1491 i++)
1493 rtx xpart = operand_subword (x, i, 1, mode);
1494 rtx ypart = operand_subword (y, i, 1, mode);
1496 /* If we can't get a part of Y, put Y into memory if it is a
1497 constant. Otherwise, force it into a register. If we still
1498 can't get a part of Y, abort. */
1499 if (ypart == 0 && CONSTANT_P (y))
1501 y = force_const_mem (mode, y);
1502 ypart = operand_subword (y, i, 1, mode);
1504 else if (ypart == 0)
1505 ypart = operand_subword_force (y, i, mode);
1507 if (xpart == 0 || ypart == 0)
1508 abort ();
1510 last_insn = emit_move_insn (xpart, ypart);
1512 /* Mark these insns as a libcall block. */
1513 group_insns (prev_insn);
1515 return last_insn;
1517 else
1518 abort ();
1521 /* Pushing data onto the stack. */
1523 /* Push a block of length SIZE (perhaps variable)
1524 and return an rtx to address the beginning of the block.
1525 Note that it is not possible for the value returned to be a QUEUED.
1526 The value may be virtual_outgoing_args_rtx.
1528 EXTRA is the number of bytes of padding to push in addition to SIZE.
1529 BELOW nonzero means this padding comes at low addresses;
1530 otherwise, the padding comes at high addresses. */
1533 push_block (size, extra, below)
1534 rtx size;
1535 int extra, below;
1537 register rtx temp;
1538 if (CONSTANT_P (size))
1539 anti_adjust_stack (plus_constant (size, extra));
1540 else if (GET_CODE (size) == REG && extra == 0)
1541 anti_adjust_stack (size);
1542 else
1544 rtx temp = copy_to_mode_reg (Pmode, size);
1545 if (extra != 0)
1546 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
1547 temp, 0, OPTAB_LIB_WIDEN);
1548 anti_adjust_stack (temp);
1551 #ifdef STACK_GROWS_DOWNWARD
1552 temp = virtual_outgoing_args_rtx;
1553 if (extra != 0 && below)
1554 temp = plus_constant (temp, extra);
1555 #else
1556 if (GET_CODE (size) == CONST_INT)
1557 temp = plus_constant (virtual_outgoing_args_rtx,
1558 - INTVAL (size) - (below ? 0 : extra));
1559 else if (extra != 0 && !below)
1560 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1561 negate_rtx (Pmode, plus_constant (size, extra)));
1562 else
1563 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1564 negate_rtx (Pmode, size));
1565 #endif
1567 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
1570 static rtx
1571 gen_push_operand ()
1573 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
1576 /* Generate code to push X onto the stack, assuming it has mode MODE and
1577 type TYPE.
1578 MODE is redundant except when X is a CONST_INT (since they don't
1579 carry mode info).
1580 SIZE is an rtx for the size of data to be copied (in bytes),
1581 needed only if X is BLKmode.
1583 ALIGN (in bytes) is maximum alignment we can assume.
1585 If PARTIAL is nonzero, then copy that many of the first words
1586 of X into registers starting with REG, and push the rest of X.
1587 The amount of space pushed is decreased by PARTIAL words,
1588 rounded *down* to a multiple of PARM_BOUNDARY.
1589 REG must be a hard register in this case.
1591 EXTRA is the amount in bytes of extra space to leave next to this arg.
1592 This is ignored if an argument block has already been allocated.
1594 On a machine that lacks real push insns, ARGS_ADDR is the address of
1595 the bottom of the argument block for this call. We use indexing off there
1596 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
1597 argument block has not been preallocated.
1599 ARGS_SO_FAR is the size of args previously pushed for this call. */
1601 void
1602 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
1603 args_addr, args_so_far)
1604 register rtx x;
1605 enum machine_mode mode;
1606 tree type;
1607 rtx size;
1608 int align;
1609 int partial;
1610 rtx reg;
1611 int extra;
1612 rtx args_addr;
1613 rtx args_so_far;
1615 rtx xinner;
1616 enum direction stack_direction
1617 #ifdef STACK_GROWS_DOWNWARD
1618 = downward;
1619 #else
1620 = upward;
1621 #endif
1623 /* Decide where to pad the argument: `downward' for below,
1624 `upward' for above, or `none' for don't pad it.
1625 Default is below for small data on big-endian machines; else above. */
1626 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
1628 /* Invert direction if stack is post-update. */
1629 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
1630 if (where_pad != none)
1631 where_pad = (where_pad == downward ? upward : downward);
1633 xinner = x = protect_from_queue (x, 0);
1635 if (mode == BLKmode)
1637 /* Copy a block into the stack, entirely or partially. */
1639 register rtx temp;
1640 int used = partial * UNITS_PER_WORD;
1641 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
1642 int skip;
1644 if (size == 0)
1645 abort ();
1647 used -= offset;
1649 /* USED is now the # of bytes we need not copy to the stack
1650 because registers will take care of them. */
1652 if (partial != 0)
1653 xinner = change_address (xinner, BLKmode,
1654 plus_constant (XEXP (xinner, 0), used));
1656 /* If the partial register-part of the arg counts in its stack size,
1657 skip the part of stack space corresponding to the registers.
1658 Otherwise, start copying to the beginning of the stack space,
1659 by setting SKIP to 0. */
1660 #ifndef REG_PARM_STACK_SPACE
1661 skip = 0;
1662 #else
1663 skip = used;
1664 #endif
1666 #ifdef PUSH_ROUNDING
1667 /* Do it with several push insns if that doesn't take lots of insns
1668 and if there is no difficulty with push insns that skip bytes
1669 on the stack for alignment purposes. */
1670 if (args_addr == 0
1671 && GET_CODE (size) == CONST_INT
1672 && skip == 0
1673 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
1674 < MOVE_RATIO)
1675 /* Here we avoid the case of a structure whose weak alignment
1676 forces many pushes of a small amount of data,
1677 and such small pushes do rounding that causes trouble. */
1678 && ((! STRICT_ALIGNMENT && ! SLOW_UNALIGNED_ACCESS)
1679 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
1680 || PUSH_ROUNDING (align) == align)
1681 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
1683 /* Push padding now if padding above and stack grows down,
1684 or if padding below and stack grows up.
1685 But if space already allocated, this has already been done. */
1686 if (extra && args_addr == 0
1687 && where_pad != none && where_pad != stack_direction)
1688 anti_adjust_stack (GEN_INT (extra));
1690 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
1691 INTVAL (size) - used, align);
1693 else
1694 #endif /* PUSH_ROUNDING */
1696 /* Otherwise make space on the stack and copy the data
1697 to the address of that space. */
1699 /* Deduct words put into registers from the size we must copy. */
1700 if (partial != 0)
1702 if (GET_CODE (size) == CONST_INT)
1703 size = GEN_INT (INTVAL (size) - used);
1704 else
1705 size = expand_binop (GET_MODE (size), sub_optab, size,
1706 GEN_INT (used), NULL_RTX, 0,
1707 OPTAB_LIB_WIDEN);
1710 /* Get the address of the stack space.
1711 In this case, we do not deal with EXTRA separately.
1712 A single stack adjust will do. */
1713 if (! args_addr)
1715 temp = push_block (size, extra, where_pad == downward);
1716 extra = 0;
1718 else if (GET_CODE (args_so_far) == CONST_INT)
1719 temp = memory_address (BLKmode,
1720 plus_constant (args_addr,
1721 skip + INTVAL (args_so_far)));
1722 else
1723 temp = memory_address (BLKmode,
1724 plus_constant (gen_rtx (PLUS, Pmode,
1725 args_addr, args_so_far),
1726 skip));
1728 /* TEMP is the address of the block. Copy the data there. */
1729 if (GET_CODE (size) == CONST_INT
1730 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
1731 < MOVE_RATIO))
1733 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
1734 INTVAL (size), align);
1735 goto ret;
1737 /* Try the most limited insn first, because there's no point
1738 including more than one in the machine description unless
1739 the more limited one has some advantage. */
1740 #ifdef HAVE_movstrqi
1741 if (HAVE_movstrqi
1742 && GET_CODE (size) == CONST_INT
1743 && ((unsigned) INTVAL (size)
1744 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
1746 emit_insn (gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
1747 xinner, size, GEN_INT (align)));
1748 goto ret;
1750 #endif
1751 #ifdef HAVE_movstrhi
1752 if (HAVE_movstrhi
1753 && GET_CODE (size) == CONST_INT
1754 && ((unsigned) INTVAL (size)
1755 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
1757 emit_insn (gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
1758 xinner, size, GEN_INT (align)));
1759 goto ret;
1761 #endif
1762 #ifdef HAVE_movstrsi
1763 if (HAVE_movstrsi)
1765 emit_insn (gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
1766 xinner, size, GEN_INT (align)));
1767 goto ret;
1769 #endif
1770 #ifdef HAVE_movstrdi
1771 if (HAVE_movstrdi)
1773 emit_insn (gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
1774 xinner, size, GEN_INT (align)));
1775 goto ret;
1777 #endif
1779 #ifndef ACCUMULATE_OUTGOING_ARGS
1780 /* If the source is referenced relative to the stack pointer,
1781 copy it to another register to stabilize it. We do not need
1782 to do this if we know that we won't be changing sp. */
1784 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
1785 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
1786 temp = copy_to_reg (temp);
1787 #endif
1789 /* Make inhibit_defer_pop nonzero around the library call
1790 to force it to pop the bcopy-arguments right away. */
1791 NO_DEFER_POP;
1792 #ifdef TARGET_MEM_FUNCTIONS
1793 emit_library_call (memcpy_libfunc, 0,
1794 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
1795 size, Pmode);
1796 #else
1797 emit_library_call (bcopy_libfunc, 0,
1798 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
1799 size, Pmode);
1800 #endif
1801 OK_DEFER_POP;
1804 else if (partial > 0)
1806 /* Scalar partly in registers. */
1808 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
1809 int i;
1810 int not_stack;
1811 /* # words of start of argument
1812 that we must make space for but need not store. */
1813 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
1814 int args_offset = INTVAL (args_so_far);
1815 int skip;
1817 /* Push padding now if padding above and stack grows down,
1818 or if padding below and stack grows up.
1819 But if space already allocated, this has already been done. */
1820 if (extra && args_addr == 0
1821 && where_pad != none && where_pad != stack_direction)
1822 anti_adjust_stack (GEN_INT (extra));
1824 /* If we make space by pushing it, we might as well push
1825 the real data. Otherwise, we can leave OFFSET nonzero
1826 and leave the space uninitialized. */
1827 if (args_addr == 0)
1828 offset = 0;
1830 /* Now NOT_STACK gets the number of words that we don't need to
1831 allocate on the stack. */
1832 not_stack = partial - offset;
1834 /* If the partial register-part of the arg counts in its stack size,
1835 skip the part of stack space corresponding to the registers.
1836 Otherwise, start copying to the beginning of the stack space,
1837 by setting SKIP to 0. */
1838 #ifndef REG_PARM_STACK_SPACE
1839 skip = 0;
1840 #else
1841 skip = not_stack;
1842 #endif
1844 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1845 x = validize_mem (force_const_mem (mode, x));
1847 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
1848 SUBREGs of such registers are not allowed. */
1849 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
1850 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
1851 x = copy_to_reg (x);
1853 /* Loop over all the words allocated on the stack for this arg. */
1854 /* We can do it by words, because any scalar bigger than a word
1855 has a size a multiple of a word. */
1856 #ifndef PUSH_ARGS_REVERSED
1857 for (i = not_stack; i < size; i++)
1858 #else
1859 for (i = size - 1; i >= not_stack; i--)
1860 #endif
1861 if (i >= not_stack + offset)
1862 emit_push_insn (operand_subword_force (x, i, mode),
1863 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
1864 0, args_addr,
1865 GEN_INT (args_offset + ((i - not_stack + skip)
1866 * UNITS_PER_WORD)));
1868 else
1870 rtx addr;
1872 /* Push padding now if padding above and stack grows down,
1873 or if padding below and stack grows up.
1874 But if space already allocated, this has already been done. */
1875 if (extra && args_addr == 0
1876 && where_pad != none && where_pad != stack_direction)
1877 anti_adjust_stack (GEN_INT (extra));
1879 #ifdef PUSH_ROUNDING
1880 if (args_addr == 0)
1881 addr = gen_push_operand ();
1882 else
1883 #endif
1884 if (GET_CODE (args_so_far) == CONST_INT)
1885 addr
1886 = memory_address (mode,
1887 plus_constant (args_addr, INTVAL (args_so_far)));
1888 else
1889 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
1890 args_so_far));
1892 emit_move_insn (gen_rtx (MEM, mode, addr), x);
1895 ret:
1896 /* If part should go in registers, copy that part
1897 into the appropriate registers. Do this now, at the end,
1898 since mem-to-mem copies above may do function calls. */
1899 if (partial > 0)
1900 move_block_to_reg (REGNO (reg), x, partial, mode);
1902 if (extra && args_addr == 0 && where_pad == stack_direction)
1903 anti_adjust_stack (GEN_INT (extra));
1906 /* Output a library call to function FUN (a SYMBOL_REF rtx)
1907 (emitting the queue unless NO_QUEUE is nonzero),
1908 for a value of mode OUTMODE,
1909 with NARGS different arguments, passed as alternating rtx values
1910 and machine_modes to convert them to.
1911 The rtx values should have been passed through protect_from_queue already.
1913 NO_QUEUE will be true if and only if the library call is a `const' call
1914 which will be enclosed in REG_LIBCALL/REG_RETVAL notes; it is equivalent
1915 to the variable is_const in expand_call.
1917 NO_QUEUE must be true for const calls, because if it isn't, then
1918 any pending increment will be emitted between REG_LIBCALL/REG_RETVAL notes,
1919 and will be lost if the libcall sequence is optimized away.
1921 NO_QUEUE must be false for non-const calls, because if it isn't, the
1922 call insn will have its CONST_CALL_P bit set, and it will be incorrectly
1923 optimized. For instance, the instruction scheduler may incorrectly
1924 move memory references across the non-const call. */
1926 void
1927 emit_library_call (va_alist)
1928 va_dcl
1930 va_list p;
1931 struct args_size args_size;
1932 register int argnum;
1933 enum machine_mode outmode;
1934 int nargs;
1935 rtx fun;
1936 rtx orgfun;
1937 int inc;
1938 int count;
1939 rtx argblock = 0;
1940 CUMULATIVE_ARGS args_so_far;
1941 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
1942 struct args_size offset; struct args_size size; };
1943 struct arg *argvec;
1944 int old_inhibit_defer_pop = inhibit_defer_pop;
1945 int no_queue = 0;
1946 rtx use_insns;
1948 va_start (p);
1949 orgfun = fun = va_arg (p, rtx);
1950 no_queue = va_arg (p, int);
1951 outmode = va_arg (p, enum machine_mode);
1952 nargs = va_arg (p, int);
1954 /* Copy all the libcall-arguments out of the varargs data
1955 and into a vector ARGVEC.
1957 Compute how to pass each argument. We only support a very small subset
1958 of the full argument passing conventions to limit complexity here since
1959 library functions shouldn't have many args. */
1961 argvec = (struct arg *) alloca (nargs * sizeof (struct arg));
1963 INIT_CUMULATIVE_ARGS (args_so_far, (tree)0, fun);
1965 args_size.constant = 0;
1966 args_size.var = 0;
1968 for (count = 0; count < nargs; count++)
1970 rtx val = va_arg (p, rtx);
1971 enum machine_mode mode = va_arg (p, enum machine_mode);
1973 /* We cannot convert the arg value to the mode the library wants here;
1974 must do it earlier where we know the signedness of the arg. */
1975 if (mode == BLKmode
1976 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
1977 abort ();
1979 /* On some machines, there's no way to pass a float to a library fcn.
1980 Pass it as a double instead. */
1981 #ifdef LIBGCC_NEEDS_DOUBLE
1982 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
1983 val = convert_to_mode (DFmode, val, 0), mode = DFmode;
1984 #endif
1986 /* There's no need to call protect_from_queue, because
1987 either emit_move_insn or emit_push_insn will do that. */
1989 /* Make sure it is a reasonable operand for a move or push insn. */
1990 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
1991 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
1992 val = force_operand (val, NULL_RTX);
1994 argvec[count].value = val;
1995 argvec[count].mode = mode;
1997 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
1998 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
1999 abort ();
2000 #endif
2002 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
2003 if (argvec[count].reg && GET_CODE (argvec[count].reg) == EXPR_LIST)
2004 abort ();
2005 #ifdef FUNCTION_ARG_PARTIAL_NREGS
2006 argvec[count].partial
2007 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
2008 #else
2009 argvec[count].partial = 0;
2010 #endif
2012 locate_and_pad_parm (mode, NULL_TREE,
2013 argvec[count].reg && argvec[count].partial == 0,
2014 NULL_TREE, &args_size, &argvec[count].offset,
2015 &argvec[count].size);
2017 if (argvec[count].size.var)
2018 abort ();
2020 #ifndef REG_PARM_STACK_SPACE
2021 if (argvec[count].partial)
2022 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
2023 #endif
2025 if (argvec[count].reg == 0 || argvec[count].partial != 0
2026 #ifdef REG_PARM_STACK_SPACE
2027 || 1
2028 #endif
2030 args_size.constant += argvec[count].size.constant;
2032 #ifdef ACCUMULATE_OUTGOING_ARGS
2033 /* If this arg is actually passed on the stack, it might be
2034 clobbering something we already put there (this library call might
2035 be inside the evaluation of an argument to a function whose call
2036 requires the stack). This will only occur when the library call
2037 has sufficient args to run out of argument registers. Abort in
2038 this case; if this ever occurs, code must be added to save and
2039 restore the arg slot. */
2041 if (argvec[count].reg == 0 || argvec[count].partial != 0)
2042 abort ();
2043 #endif
2045 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree)0, 1);
2047 va_end (p);
2049 /* If this machine requires an external definition for library
2050 functions, write one out. */
2051 assemble_external_libcall (fun);
2053 #ifdef STACK_BOUNDARY
2054 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
2055 / STACK_BYTES) * STACK_BYTES);
2056 #endif
2058 #ifdef REG_PARM_STACK_SPACE
2059 args_size.constant = MAX (args_size.constant,
2060 REG_PARM_STACK_SPACE ((tree) 0));
2061 #endif
2063 #ifdef ACCUMULATE_OUTGOING_ARGS
2064 if (args_size.constant > current_function_outgoing_args_size)
2065 current_function_outgoing_args_size = args_size.constant;
2066 args_size.constant = 0;
2067 #endif
2069 #ifndef PUSH_ROUNDING
2070 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
2071 #endif
2073 #ifdef PUSH_ARGS_REVERSED
2074 inc = -1;
2075 argnum = nargs - 1;
2076 #else
2077 inc = 1;
2078 argnum = 0;
2079 #endif
2081 /* Push the args that need to be pushed. */
2083 for (count = 0; count < nargs; count++, argnum += inc)
2085 register enum machine_mode mode = argvec[argnum].mode;
2086 register rtx val = argvec[argnum].value;
2087 rtx reg = argvec[argnum].reg;
2088 int partial = argvec[argnum].partial;
2090 if (! (reg != 0 && partial == 0))
2091 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
2092 argblock, GEN_INT (argvec[count].offset.constant));
2093 NO_DEFER_POP;
2096 #ifdef PUSH_ARGS_REVERSED
2097 argnum = nargs - 1;
2098 #else
2099 argnum = 0;
2100 #endif
2102 /* Now load any reg parms into their regs. */
2104 for (count = 0; count < nargs; count++, argnum += inc)
2106 register enum machine_mode mode = argvec[argnum].mode;
2107 register rtx val = argvec[argnum].value;
2108 rtx reg = argvec[argnum].reg;
2109 int partial = argvec[argnum].partial;
2111 if (reg != 0 && partial == 0)
2112 emit_move_insn (reg, val);
2113 NO_DEFER_POP;
2116 /* For version 1.37, try deleting this entirely. */
2117 if (! no_queue)
2118 emit_queue ();
2120 /* Any regs containing parms remain in use through the call. */
2121 start_sequence ();
2122 for (count = 0; count < nargs; count++)
2123 if (argvec[count].reg != 0)
2124 emit_insn (gen_rtx (USE, VOIDmode, argvec[count].reg));
2126 use_insns = get_insns ();
2127 end_sequence ();
2129 fun = prepare_call_address (fun, NULL_TREE, &use_insns);
2131 /* Don't allow popping to be deferred, since then
2132 cse'ing of library calls could delete a call and leave the pop. */
2133 NO_DEFER_POP;
2135 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
2136 will set inhibit_defer_pop to that value. */
2138 emit_call_1 (fun, get_identifier (XSTR (orgfun, 0)), args_size.constant, 0,
2139 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
2140 outmode != VOIDmode ? hard_libcall_value (outmode) : NULL_RTX,
2141 old_inhibit_defer_pop + 1, use_insns, no_queue);
2143 /* Now restore inhibit_defer_pop to its actual original value. */
2144 OK_DEFER_POP;
2147 /* Expand an assignment that stores the value of FROM into TO.
2148 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2149 (This may contain a QUEUED rtx.)
2150 Otherwise, the returned value is not meaningful.
2152 SUGGEST_REG is no longer actually used.
2153 It used to mean, copy the value through a register
2154 and return that register, if that is possible.
2155 But now we do this if WANT_VALUE.
2157 If the value stored is a constant, we return the constant. */
2160 expand_assignment (to, from, want_value, suggest_reg)
2161 tree to, from;
2162 int want_value;
2163 int suggest_reg;
2165 register rtx to_rtx = 0;
2166 rtx result;
2168 /* Don't crash if the lhs of the assignment was erroneous. */
2170 if (TREE_CODE (to) == ERROR_MARK)
2171 return expand_expr (from, NULL_RTX, VOIDmode, 0);
2173 /* Assignment of a structure component needs special treatment
2174 if the structure component's rtx is not simply a MEM.
2175 Assignment of an array element at a constant index
2176 has the same problem. */
2178 if (TREE_CODE (to) == COMPONENT_REF
2179 || TREE_CODE (to) == BIT_FIELD_REF
2180 || (TREE_CODE (to) == ARRAY_REF
2181 && TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2182 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST))
2184 enum machine_mode mode1;
2185 int bitsize;
2186 int bitpos;
2187 tree offset;
2188 int unsignedp;
2189 int volatilep = 0;
2190 tree tem = get_inner_reference (to, &bitsize, &bitpos, &offset,
2191 &mode1, &unsignedp, &volatilep);
2193 /* If we are going to use store_bit_field and extract_bit_field,
2194 make sure to_rtx will be safe for multiple use. */
2196 if (mode1 == VOIDmode && want_value)
2197 tem = stabilize_reference (tem);
2199 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
2200 if (offset != 0)
2202 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2204 if (GET_CODE (to_rtx) != MEM)
2205 abort ();
2206 to_rtx = change_address (to_rtx, VOIDmode,
2207 gen_rtx (PLUS, Pmode, XEXP (to_rtx, 0),
2208 force_reg (Pmode, offset_rtx)));
2210 if (volatilep)
2212 if (GET_CODE (to_rtx) == MEM)
2213 MEM_VOLATILE_P (to_rtx) = 1;
2214 #if 0 /* This was turned off because, when a field is volatile
2215 in an object which is not volatile, the object may be in a register,
2216 and then we would abort over here. */
2217 else
2218 abort ();
2219 #endif
2222 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2223 (want_value
2224 /* Spurious cast makes HPUX compiler happy. */
2225 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2226 : VOIDmode),
2227 unsignedp,
2228 /* Required alignment of containing datum. */
2229 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
2230 int_size_in_bytes (TREE_TYPE (tem)));
2231 preserve_temp_slots (result);
2232 free_temp_slots ();
2234 return result;
2237 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2238 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2240 if (to_rtx == 0)
2241 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2243 /* In case we are returning the contents of an object which overlaps
2244 the place the value is being stored, use a safe function when copying
2245 a value through a pointer into a structure value return block. */
2246 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2247 && current_function_returns_struct
2248 && !current_function_returns_pcc_struct)
2250 rtx from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
2251 rtx size = expr_size (from);
2253 #ifdef TARGET_MEM_FUNCTIONS
2254 emit_library_call (memcpy_libfunc, 0,
2255 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2256 XEXP (from_rtx, 0), Pmode,
2257 size, Pmode);
2258 #else
2259 emit_library_call (bcopy_libfunc, 0,
2260 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2261 XEXP (to_rtx, 0), Pmode,
2262 size, Pmode);
2263 #endif
2265 preserve_temp_slots (to_rtx);
2266 free_temp_slots ();
2267 return to_rtx;
2270 /* Compute FROM and store the value in the rtx we got. */
2272 result = store_expr (from, to_rtx, want_value);
2273 preserve_temp_slots (result);
2274 free_temp_slots ();
2275 return result;
2278 /* Generate code for computing expression EXP,
2279 and storing the value into TARGET.
2280 Returns TARGET or an equivalent value.
2281 TARGET may contain a QUEUED rtx.
2283 If SUGGEST_REG is nonzero, copy the value through a register
2284 and return that register, if that is possible.
2286 If the value stored is a constant, we return the constant. */
2289 store_expr (exp, target, suggest_reg)
2290 register tree exp;
2291 register rtx target;
2292 int suggest_reg;
2294 register rtx temp;
2295 int dont_return_target = 0;
2297 if (TREE_CODE (exp) == COMPOUND_EXPR)
2299 /* Perform first part of compound expression, then assign from second
2300 part. */
2301 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
2302 emit_queue ();
2303 return store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2305 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
2307 /* For conditional expression, get safe form of the target. Then
2308 test the condition, doing the appropriate assignment on either
2309 side. This avoids the creation of unnecessary temporaries.
2310 For non-BLKmode, it is more efficient not to do this. */
2312 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
2314 emit_queue ();
2315 target = protect_from_queue (target, 1);
2317 NO_DEFER_POP;
2318 jumpifnot (TREE_OPERAND (exp, 0), lab1);
2319 store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2320 emit_queue ();
2321 emit_jump_insn (gen_jump (lab2));
2322 emit_barrier ();
2323 emit_label (lab1);
2324 store_expr (TREE_OPERAND (exp, 2), target, suggest_reg);
2325 emit_queue ();
2326 emit_label (lab2);
2327 OK_DEFER_POP;
2328 return target;
2330 else if (suggest_reg && GET_CODE (target) == MEM
2331 && GET_MODE (target) != BLKmode)
2332 /* If target is in memory and caller wants value in a register instead,
2333 arrange that. Pass TARGET as target for expand_expr so that,
2334 if EXP is another assignment, SUGGEST_REG will be nonzero for it.
2335 We know expand_expr will not use the target in that case. */
2337 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
2338 GET_MODE (target), 0);
2339 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
2340 temp = copy_to_reg (temp);
2341 dont_return_target = 1;
2343 else if (queued_subexp_p (target))
2344 /* If target contains a postincrement, it is not safe
2345 to use as the returned value. It would access the wrong
2346 place by the time the queued increment gets output.
2347 So copy the value through a temporary and use that temp
2348 as the result. */
2350 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
2352 /* Expand EXP into a new pseudo. */
2353 temp = gen_reg_rtx (GET_MODE (target));
2354 temp = expand_expr (exp, temp, GET_MODE (target), 0);
2356 else
2357 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
2358 dont_return_target = 1;
2360 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2361 /* If this is an scalar in a register that is stored in a wider mode
2362 than the declared mode, compute the result into its declared mode
2363 and then convert to the wider mode. Our value is the computed
2364 expression. */
2366 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
2367 convert_move (SUBREG_REG (target), temp,
2368 SUBREG_PROMOTED_UNSIGNED_P (target));
2369 return temp;
2371 else
2373 temp = expand_expr (exp, target, GET_MODE (target), 0);
2374 /* DO return TARGET if it's a specified hardware register.
2375 expand_return relies on this. */
2376 if (!(target && GET_CODE (target) == REG
2377 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2378 && CONSTANT_P (temp))
2379 dont_return_target = 1;
2382 /* If value was not generated in the target, store it there.
2383 Convert the value to TARGET's type first if nec. */
2385 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
2387 target = protect_from_queue (target, 1);
2388 if (GET_MODE (temp) != GET_MODE (target)
2389 && GET_MODE (temp) != VOIDmode)
2391 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2392 if (dont_return_target)
2394 /* In this case, we will return TEMP,
2395 so make sure it has the proper mode.
2396 But don't forget to store the value into TARGET. */
2397 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
2398 emit_move_insn (target, temp);
2400 else
2401 convert_move (target, temp, unsignedp);
2404 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
2406 /* Handle copying a string constant into an array.
2407 The string constant may be shorter than the array.
2408 So copy just the string's actual length, and clear the rest. */
2409 rtx size;
2411 /* Get the size of the data type of the string,
2412 which is actually the size of the target. */
2413 size = expr_size (exp);
2414 if (GET_CODE (size) == CONST_INT
2415 && INTVAL (size) < TREE_STRING_LENGTH (exp))
2416 emit_block_move (target, temp, size,
2417 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2418 else
2420 /* Compute the size of the data to copy from the string. */
2421 tree copy_size
2422 = fold (build (MIN_EXPR, sizetype,
2423 size_binop (CEIL_DIV_EXPR,
2424 TYPE_SIZE (TREE_TYPE (exp)),
2425 size_int (BITS_PER_UNIT)),
2426 convert (sizetype,
2427 build_int_2 (TREE_STRING_LENGTH (exp), 0))));
2428 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
2429 VOIDmode, 0);
2430 rtx label = 0;
2432 /* Copy that much. */
2433 emit_block_move (target, temp, copy_size_rtx,
2434 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2436 /* Figure out how much is left in TARGET
2437 that we have to clear. */
2438 if (GET_CODE (copy_size_rtx) == CONST_INT)
2440 temp = plus_constant (XEXP (target, 0),
2441 TREE_STRING_LENGTH (exp));
2442 size = plus_constant (size,
2443 - TREE_STRING_LENGTH (exp));
2445 else
2447 enum machine_mode size_mode = Pmode;
2449 temp = force_reg (Pmode, XEXP (target, 0));
2450 temp = expand_binop (size_mode, add_optab, temp,
2451 copy_size_rtx, NULL_RTX, 0,
2452 OPTAB_LIB_WIDEN);
2454 size = expand_binop (size_mode, sub_optab, size,
2455 copy_size_rtx, NULL_RTX, 0,
2456 OPTAB_LIB_WIDEN);
2458 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
2459 GET_MODE (size), 0, 0);
2460 label = gen_label_rtx ();
2461 emit_jump_insn (gen_blt (label));
2464 if (size != const0_rtx)
2466 #ifdef TARGET_MEM_FUNCTIONS
2467 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
2468 temp, Pmode, const0_rtx, Pmode, size, Pmode);
2469 #else
2470 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
2471 temp, Pmode, size, Pmode);
2472 #endif
2474 if (label)
2475 emit_label (label);
2478 else if (GET_MODE (temp) == BLKmode)
2479 emit_block_move (target, temp, expr_size (exp),
2480 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2481 else
2482 emit_move_insn (target, temp);
2484 if (dont_return_target)
2485 return temp;
2486 return target;
2489 /* Store the value of constructor EXP into the rtx TARGET.
2490 TARGET is either a REG or a MEM. */
2492 static void
2493 store_constructor (exp, target)
2494 tree exp;
2495 rtx target;
2497 tree type = TREE_TYPE (exp);
2499 /* We know our target cannot conflict, since safe_from_p has been called. */
2500 #if 0
2501 /* Don't try copying piece by piece into a hard register
2502 since that is vulnerable to being clobbered by EXP.
2503 Instead, construct in a pseudo register and then copy it all. */
2504 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
2506 rtx temp = gen_reg_rtx (GET_MODE (target));
2507 store_constructor (exp, temp);
2508 emit_move_insn (target, temp);
2509 return;
2511 #endif
2513 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE)
2515 register tree elt;
2517 /* Inform later passes that the whole union value is dead. */
2518 if (TREE_CODE (type) == UNION_TYPE)
2519 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2521 /* If we are building a static constructor into a register,
2522 set the initial value as zero so we can fold the value into
2523 a constant. */
2524 else if (GET_CODE (target) == REG && TREE_STATIC (exp))
2525 emit_move_insn (target, const0_rtx);
2527 /* If the constructor has fewer fields than the structure,
2528 clear the whole structure first. */
2529 else if (list_length (CONSTRUCTOR_ELTS (exp))
2530 != list_length (TYPE_FIELDS (type)))
2531 clear_storage (target, int_size_in_bytes (type));
2532 else
2533 /* Inform later passes that the old value is dead. */
2534 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2536 /* Store each element of the constructor into
2537 the corresponding field of TARGET. */
2539 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
2541 register tree field = TREE_PURPOSE (elt);
2542 register enum machine_mode mode;
2543 int bitsize;
2544 int bitpos;
2545 int unsignedp;
2547 /* Just ignore missing fields.
2548 We cleared the whole structure, above,
2549 if any fields are missing. */
2550 if (field == 0)
2551 continue;
2553 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
2554 unsignedp = TREE_UNSIGNED (field);
2555 mode = DECL_MODE (field);
2556 if (DECL_BIT_FIELD (field))
2557 mode = VOIDmode;
2559 if (TREE_CODE (DECL_FIELD_BITPOS (field)) != INTEGER_CST)
2560 /* ??? This case remains to be written. */
2561 abort ();
2563 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
2565 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
2566 /* The alignment of TARGET is
2567 at least what its type requires. */
2568 VOIDmode, 0,
2569 TYPE_ALIGN (type) / BITS_PER_UNIT,
2570 int_size_in_bytes (type));
2573 else if (TREE_CODE (type) == ARRAY_TYPE)
2575 register tree elt;
2576 register int i;
2577 tree domain = TYPE_DOMAIN (type);
2578 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
2579 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
2580 tree elttype = TREE_TYPE (type);
2582 /* If the constructor has fewer fields than the structure,
2583 clear the whole structure first. Similarly if this this is
2584 static constructor of a non-BLKmode object. */
2586 if (list_length (CONSTRUCTOR_ELTS (exp)) < maxelt - minelt + 1
2587 || (GET_CODE (target) == REG && TREE_STATIC (exp)))
2588 clear_storage (target, maxelt - minelt + 1);
2589 else
2590 /* Inform later passes that the old value is dead. */
2591 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2593 /* Store each element of the constructor into
2594 the corresponding element of TARGET, determined
2595 by counting the elements. */
2596 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
2597 elt;
2598 elt = TREE_CHAIN (elt), i++)
2600 register enum machine_mode mode;
2601 int bitsize;
2602 int bitpos;
2603 int unsignedp;
2605 mode = TYPE_MODE (elttype);
2606 bitsize = GET_MODE_BITSIZE (mode);
2607 unsignedp = TREE_UNSIGNED (elttype);
2609 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
2611 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
2612 /* The alignment of TARGET is
2613 at least what its type requires. */
2614 VOIDmode, 0,
2615 TYPE_ALIGN (type) / BITS_PER_UNIT,
2616 int_size_in_bytes (type));
2620 else
2621 abort ();
2624 /* Store the value of EXP (an expression tree)
2625 into a subfield of TARGET which has mode MODE and occupies
2626 BITSIZE bits, starting BITPOS bits from the start of TARGET.
2627 If MODE is VOIDmode, it means that we are storing into a bit-field.
2629 If VALUE_MODE is VOIDmode, return nothing in particular.
2630 UNSIGNEDP is not used in this case.
2632 Otherwise, return an rtx for the value stored. This rtx
2633 has mode VALUE_MODE if that is convenient to do.
2634 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
2636 ALIGN is the alignment that TARGET is known to have, measured in bytes.
2637 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
2639 static rtx
2640 store_field (target, bitsize, bitpos, mode, exp, value_mode,
2641 unsignedp, align, total_size)
2642 rtx target;
2643 int bitsize, bitpos;
2644 enum machine_mode mode;
2645 tree exp;
2646 enum machine_mode value_mode;
2647 int unsignedp;
2648 int align;
2649 int total_size;
2651 HOST_WIDE_INT width_mask = 0;
2653 if (bitsize < HOST_BITS_PER_WIDE_INT)
2654 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
2656 /* If we are storing into an unaligned field of an aligned union that is
2657 in a register, we may have the mode of TARGET being an integer mode but
2658 MODE == BLKmode. In that case, get an aligned object whose size and
2659 alignment are the same as TARGET and store TARGET into it (we can avoid
2660 the store if the field being stored is the entire width of TARGET). Then
2661 call ourselves recursively to store the field into a BLKmode version of
2662 that object. Finally, load from the object into TARGET. This is not
2663 very efficient in general, but should only be slightly more expensive
2664 than the otherwise-required unaligned accesses. Perhaps this can be
2665 cleaned up later. */
2667 if (mode == BLKmode
2668 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
2670 rtx object = assign_stack_temp (GET_MODE (target),
2671 GET_MODE_SIZE (GET_MODE (target)), 0);
2672 rtx blk_object = copy_rtx (object);
2674 PUT_MODE (blk_object, BLKmode);
2676 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
2677 emit_move_insn (object, target);
2679 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
2680 align, total_size);
2682 emit_move_insn (target, object);
2684 return target;
2687 /* If the structure is in a register or if the component
2688 is a bit field, we cannot use addressing to access it.
2689 Use bit-field techniques or SUBREG to store in it. */
2691 if (mode == VOIDmode
2692 || (mode != BLKmode && ! direct_store[(int) mode])
2693 || GET_CODE (target) == REG
2694 || GET_CODE (target) == SUBREG)
2696 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
2697 /* Store the value in the bitfield. */
2698 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
2699 if (value_mode != VOIDmode)
2701 /* The caller wants an rtx for the value. */
2702 /* If possible, avoid refetching from the bitfield itself. */
2703 if (width_mask != 0
2704 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
2705 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
2706 return extract_bit_field (target, bitsize, bitpos, unsignedp,
2707 NULL_RTX, value_mode, 0, align,
2708 total_size);
2710 return const0_rtx;
2712 else
2714 rtx addr = XEXP (target, 0);
2715 rtx to_rtx;
2717 /* If a value is wanted, it must be the lhs;
2718 so make the address stable for multiple use. */
2720 if (value_mode != VOIDmode && GET_CODE (addr) != REG
2721 && ! CONSTANT_ADDRESS_P (addr)
2722 /* A frame-pointer reference is already stable. */
2723 && ! (GET_CODE (addr) == PLUS
2724 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2725 && (XEXP (addr, 0) == virtual_incoming_args_rtx
2726 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
2727 addr = copy_to_reg (addr);
2729 /* Now build a reference to just the desired component. */
2731 to_rtx = change_address (target, mode,
2732 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
2733 MEM_IN_STRUCT_P (to_rtx) = 1;
2735 return store_expr (exp, to_rtx, value_mode != VOIDmode);
2739 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
2740 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
2741 ARRAY_REFs at constant positions and find the ultimate containing object,
2742 which we return.
2744 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
2745 bit position, and *PUNSIGNEDP to the signedness of the field.
2746 If the position of the field is variable, we store a tree
2747 giving the variable offset (in units) in *POFFSET.
2748 This offset is in addition to the bit position.
2749 If the position is not variable, we store 0 in *POFFSET.
2751 If any of the extraction expressions is volatile,
2752 we store 1 in *PVOLATILEP. Otherwise we don't change that.
2754 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
2755 is a mode that can be used to access the field. In that case, *PBITSIZE
2756 is redundant.
2758 If the field describes a variable-sized object, *PMODE is set to
2759 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
2760 this case, but the address of the object can be found. */
2762 tree
2763 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode, punsignedp, pvolatilep)
2764 tree exp;
2765 int *pbitsize;
2766 int *pbitpos;
2767 tree *poffset;
2768 enum machine_mode *pmode;
2769 int *punsignedp;
2770 int *pvolatilep;
2772 tree size_tree = 0;
2773 enum machine_mode mode = VOIDmode;
2774 tree offset = 0;
2776 if (TREE_CODE (exp) == COMPONENT_REF)
2778 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
2779 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
2780 mode = DECL_MODE (TREE_OPERAND (exp, 1));
2781 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
2783 else if (TREE_CODE (exp) == BIT_FIELD_REF)
2785 size_tree = TREE_OPERAND (exp, 1);
2786 *punsignedp = TREE_UNSIGNED (exp);
2788 else
2790 mode = TYPE_MODE (TREE_TYPE (exp));
2791 *pbitsize = GET_MODE_BITSIZE (mode);
2792 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2795 if (size_tree)
2797 if (TREE_CODE (size_tree) != INTEGER_CST)
2798 mode = BLKmode, *pbitsize = -1;
2799 else
2800 *pbitsize = TREE_INT_CST_LOW (size_tree);
2803 /* Compute cumulative bit-offset for nested component-refs and array-refs,
2804 and find the ultimate containing object. */
2806 *pbitpos = 0;
2808 while (1)
2810 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
2812 tree pos = (TREE_CODE (exp) == COMPONENT_REF
2813 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
2814 : TREE_OPERAND (exp, 2));
2816 if (TREE_CODE (pos) == PLUS_EXPR)
2818 tree constant, var;
2819 if (TREE_CODE (TREE_OPERAND (pos, 0)) == INTEGER_CST)
2821 constant = TREE_OPERAND (pos, 0);
2822 var = TREE_OPERAND (pos, 1);
2824 else if (TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
2826 constant = TREE_OPERAND (pos, 1);
2827 var = TREE_OPERAND (pos, 0);
2829 else
2830 abort ();
2831 *pbitpos += TREE_INT_CST_LOW (constant);
2832 if (offset)
2833 offset = size_binop (PLUS_EXPR, offset,
2834 size_binop (FLOOR_DIV_EXPR, var,
2835 size_int (BITS_PER_UNIT)));
2836 else
2837 offset = size_binop (FLOOR_DIV_EXPR, var,
2838 size_int (BITS_PER_UNIT));
2840 else if (TREE_CODE (pos) == INTEGER_CST)
2841 *pbitpos += TREE_INT_CST_LOW (pos);
2842 else
2844 /* Assume here that the offset is a multiple of a unit.
2845 If not, there should be an explicitly added constant. */
2846 if (offset)
2847 offset = size_binop (PLUS_EXPR, offset,
2848 size_binop (FLOOR_DIV_EXPR, pos,
2849 size_int (BITS_PER_UNIT)));
2850 else
2851 offset = size_binop (FLOOR_DIV_EXPR, pos,
2852 size_int (BITS_PER_UNIT));
2856 else if (TREE_CODE (exp) == ARRAY_REF
2857 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
2858 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST)
2860 *pbitpos += (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
2861 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (exp))));
2863 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
2864 && ! ((TREE_CODE (exp) == NOP_EXPR
2865 || TREE_CODE (exp) == CONVERT_EXPR)
2866 && (TYPE_MODE (TREE_TYPE (exp))
2867 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
2868 break;
2870 /* If any reference in the chain is volatile, the effect is volatile. */
2871 if (TREE_THIS_VOLATILE (exp))
2872 *pvolatilep = 1;
2873 exp = TREE_OPERAND (exp, 0);
2876 /* If this was a bit-field, see if there is a mode that allows direct
2877 access in case EXP is in memory. */
2878 if (mode == VOIDmode && *pbitpos % *pbitsize == 0)
2880 mode = mode_for_size (*pbitsize, MODE_INT, 0);
2881 if (mode == BLKmode)
2882 mode = VOIDmode;
2885 *pmode = mode;
2886 *poffset = offset;
2887 #if 0
2888 /* We aren't finished fixing the callers to really handle nonzero offset. */
2889 if (offset != 0)
2890 abort ();
2891 #endif
2893 return exp;
2896 /* Given an rtx VALUE that may contain additions and multiplications,
2897 return an equivalent value that just refers to a register or memory.
2898 This is done by generating instructions to perform the arithmetic
2899 and returning a pseudo-register containing the value. */
2902 force_operand (value, target)
2903 rtx value, target;
2905 register optab binoptab = 0;
2906 /* Use a temporary to force order of execution of calls to
2907 `force_operand'. */
2908 rtx tmp;
2909 register rtx op2;
2910 /* Use subtarget as the target for operand 0 of a binary operation. */
2911 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
2913 if (GET_CODE (value) == PLUS)
2914 binoptab = add_optab;
2915 else if (GET_CODE (value) == MINUS)
2916 binoptab = sub_optab;
2917 else if (GET_CODE (value) == MULT)
2919 op2 = XEXP (value, 1);
2920 if (!CONSTANT_P (op2)
2921 && !(GET_CODE (op2) == REG && op2 != subtarget))
2922 subtarget = 0;
2923 tmp = force_operand (XEXP (value, 0), subtarget);
2924 return expand_mult (GET_MODE (value), tmp,
2925 force_operand (op2, NULL_RTX),
2926 target, 0);
2929 if (binoptab)
2931 op2 = XEXP (value, 1);
2932 if (!CONSTANT_P (op2)
2933 && !(GET_CODE (op2) == REG && op2 != subtarget))
2934 subtarget = 0;
2935 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
2937 binoptab = add_optab;
2938 op2 = negate_rtx (GET_MODE (value), op2);
2941 /* Check for an addition with OP2 a constant integer and our first
2942 operand a PLUS of a virtual register and something else. In that
2943 case, we want to emit the sum of the virtual register and the
2944 constant first and then add the other value. This allows virtual
2945 register instantiation to simply modify the constant rather than
2946 creating another one around this addition. */
2947 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
2948 && GET_CODE (XEXP (value, 0)) == PLUS
2949 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
2950 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
2951 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
2953 rtx temp = expand_binop (GET_MODE (value), binoptab,
2954 XEXP (XEXP (value, 0), 0), op2,
2955 subtarget, 0, OPTAB_LIB_WIDEN);
2956 return expand_binop (GET_MODE (value), binoptab, temp,
2957 force_operand (XEXP (XEXP (value, 0), 1), 0),
2958 target, 0, OPTAB_LIB_WIDEN);
2961 tmp = force_operand (XEXP (value, 0), subtarget);
2962 return expand_binop (GET_MODE (value), binoptab, tmp,
2963 force_operand (op2, NULL_RTX),
2964 target, 0, OPTAB_LIB_WIDEN);
2965 /* We give UNSIGNEP = 0 to expand_binop
2966 because the only operations we are expanding here are signed ones. */
2968 return value;
2971 /* Subroutine of expand_expr:
2972 save the non-copied parts (LIST) of an expr (LHS), and return a list
2973 which can restore these values to their previous values,
2974 should something modify their storage. */
2976 static tree
2977 save_noncopied_parts (lhs, list)
2978 tree lhs;
2979 tree list;
2981 tree tail;
2982 tree parts = 0;
2984 for (tail = list; tail; tail = TREE_CHAIN (tail))
2985 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
2986 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
2987 else
2989 tree part = TREE_VALUE (tail);
2990 tree part_type = TREE_TYPE (part);
2991 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
2992 rtx target = assign_stack_temp (TYPE_MODE (part_type),
2993 int_size_in_bytes (part_type), 0);
2994 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
2995 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
2996 parts = tree_cons (to_be_saved,
2997 build (RTL_EXPR, part_type, NULL_TREE,
2998 (tree) target),
2999 parts);
3000 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
3002 return parts;
3005 /* Subroutine of expand_expr:
3006 record the non-copied parts (LIST) of an expr (LHS), and return a list
3007 which specifies the initial values of these parts. */
3009 static tree
3010 init_noncopied_parts (lhs, list)
3011 tree lhs;
3012 tree list;
3014 tree tail;
3015 tree parts = 0;
3017 for (tail = list; tail; tail = TREE_CHAIN (tail))
3018 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3019 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
3020 else
3022 tree part = TREE_VALUE (tail);
3023 tree part_type = TREE_TYPE (part);
3024 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
3025 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
3027 return parts;
3030 /* Subroutine of expand_expr: return nonzero iff there is no way that
3031 EXP can reference X, which is being modified. */
3033 static int
3034 safe_from_p (x, exp)
3035 rtx x;
3036 tree exp;
3038 rtx exp_rtl = 0;
3039 int i, nops;
3041 if (x == 0)
3042 return 1;
3044 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
3045 find the underlying pseudo. */
3046 if (GET_CODE (x) == SUBREG)
3048 x = SUBREG_REG (x);
3049 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3050 return 0;
3053 /* If X is a location in the outgoing argument area, it is always safe. */
3054 if (GET_CODE (x) == MEM
3055 && (XEXP (x, 0) == virtual_outgoing_args_rtx
3056 || (GET_CODE (XEXP (x, 0)) == PLUS
3057 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
3058 return 1;
3060 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
3062 case 'd':
3063 exp_rtl = DECL_RTL (exp);
3064 break;
3066 case 'c':
3067 return 1;
3069 case 'x':
3070 if (TREE_CODE (exp) == TREE_LIST)
3071 return ((TREE_VALUE (exp) == 0
3072 || safe_from_p (x, TREE_VALUE (exp)))
3073 && (TREE_CHAIN (exp) == 0
3074 || safe_from_p (x, TREE_CHAIN (exp))));
3075 else
3076 return 0;
3078 case '1':
3079 return safe_from_p (x, TREE_OPERAND (exp, 0));
3081 case '2':
3082 case '<':
3083 return (safe_from_p (x, TREE_OPERAND (exp, 0))
3084 && safe_from_p (x, TREE_OPERAND (exp, 1)));
3086 case 'e':
3087 case 'r':
3088 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
3089 the expression. If it is set, we conflict iff we are that rtx or
3090 both are in memory. Otherwise, we check all operands of the
3091 expression recursively. */
3093 switch (TREE_CODE (exp))
3095 case ADDR_EXPR:
3096 return staticp (TREE_OPERAND (exp, 0));
3098 case INDIRECT_REF:
3099 if (GET_CODE (x) == MEM)
3100 return 0;
3101 break;
3103 case CALL_EXPR:
3104 exp_rtl = CALL_EXPR_RTL (exp);
3105 if (exp_rtl == 0)
3107 /* Assume that the call will clobber all hard registers and
3108 all of memory. */
3109 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3110 || GET_CODE (x) == MEM)
3111 return 0;
3114 break;
3116 case RTL_EXPR:
3117 exp_rtl = RTL_EXPR_RTL (exp);
3118 if (exp_rtl == 0)
3119 /* We don't know what this can modify. */
3120 return 0;
3122 break;
3124 case WITH_CLEANUP_EXPR:
3125 exp_rtl = RTL_EXPR_RTL (exp);
3126 break;
3128 case SAVE_EXPR:
3129 exp_rtl = SAVE_EXPR_RTL (exp);
3130 break;
3132 case BIND_EXPR:
3133 /* The only operand we look at is operand 1. The rest aren't
3134 part of the expression. */
3135 return safe_from_p (x, TREE_OPERAND (exp, 1));
3137 case METHOD_CALL_EXPR:
3138 /* This takes a rtx argument, but shouldn't appear here. */
3139 abort ();
3142 /* If we have an rtx, we do not need to scan our operands. */
3143 if (exp_rtl)
3144 break;
3146 nops = tree_code_length[(int) TREE_CODE (exp)];
3147 for (i = 0; i < nops; i++)
3148 if (TREE_OPERAND (exp, i) != 0
3149 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
3150 return 0;
3153 /* If we have an rtl, find any enclosed object. Then see if we conflict
3154 with it. */
3155 if (exp_rtl)
3157 if (GET_CODE (exp_rtl) == SUBREG)
3159 exp_rtl = SUBREG_REG (exp_rtl);
3160 if (GET_CODE (exp_rtl) == REG
3161 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
3162 return 0;
3165 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
3166 are memory and EXP is not readonly. */
3167 return ! (rtx_equal_p (x, exp_rtl)
3168 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
3169 && ! TREE_READONLY (exp)));
3172 /* If we reach here, it is safe. */
3173 return 1;
3176 /* Subroutine of expand_expr: return nonzero iff EXP is an
3177 expression whose type is statically determinable. */
3179 static int
3180 fixed_type_p (exp)
3181 tree exp;
3183 if (TREE_CODE (exp) == PARM_DECL
3184 || TREE_CODE (exp) == VAR_DECL
3185 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
3186 || TREE_CODE (exp) == COMPONENT_REF
3187 || TREE_CODE (exp) == ARRAY_REF)
3188 return 1;
3189 return 0;
3192 /* expand_expr: generate code for computing expression EXP.
3193 An rtx for the computed value is returned. The value is never null.
3194 In the case of a void EXP, const0_rtx is returned.
3196 The value may be stored in TARGET if TARGET is nonzero.
3197 TARGET is just a suggestion; callers must assume that
3198 the rtx returned may not be the same as TARGET.
3200 If TARGET is CONST0_RTX, it means that the value will be ignored.
3202 If TMODE is not VOIDmode, it suggests generating the
3203 result in mode TMODE. But this is done only when convenient.
3204 Otherwise, TMODE is ignored and the value generated in its natural mode.
3205 TMODE is just a suggestion; callers must assume that
3206 the rtx returned may not have mode TMODE.
3208 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
3209 with a constant address even if that address is not normally legitimate.
3210 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
3212 If MODIFIER is EXPAND_SUM then when EXP is an addition
3213 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
3214 or a nest of (PLUS ...) and (MINUS ...) where the terms are
3215 products as above, or REG or MEM, or constant.
3216 Ordinarily in such cases we would output mul or add instructions
3217 and then return a pseudo reg containing the sum.
3219 EXPAND_INITIALIZER is much like EXPAND_SUM except that
3220 it also marks a label as absolutely required (it can't be dead).
3221 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
3222 This is used for outputting expressions used in initializers. */
3225 expand_expr (exp, target, tmode, modifier)
3226 register tree exp;
3227 rtx target;
3228 enum machine_mode tmode;
3229 enum expand_modifier modifier;
3231 register rtx op0, op1, temp;
3232 tree type = TREE_TYPE (exp);
3233 int unsignedp = TREE_UNSIGNED (type);
3234 register enum machine_mode mode = TYPE_MODE (type);
3235 register enum tree_code code = TREE_CODE (exp);
3236 optab this_optab;
3237 /* Use subtarget as the target for operand 0 of a binary operation. */
3238 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3239 rtx original_target = target;
3240 int ignore = target == const0_rtx;
3241 tree context;
3243 /* Don't use hard regs as subtargets, because the combiner
3244 can only handle pseudo regs. */
3245 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
3246 subtarget = 0;
3247 /* Avoid subtargets inside loops,
3248 since they hide some invariant expressions. */
3249 if (preserve_subexpressions_p ())
3250 subtarget = 0;
3252 if (ignore) target = 0, original_target = 0;
3254 /* If will do cse, generate all results into pseudo registers
3255 since 1) that allows cse to find more things
3256 and 2) otherwise cse could produce an insn the machine
3257 cannot support. */
3259 if (! cse_not_expected && mode != BLKmode && target
3260 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
3261 target = subtarget;
3263 /* Ensure we reference a volatile object even if value is ignored. */
3264 if (ignore && TREE_THIS_VOLATILE (exp)
3265 && mode != VOIDmode && mode != BLKmode)
3267 target = gen_reg_rtx (mode);
3268 temp = expand_expr (exp, target, VOIDmode, modifier);
3269 if (temp != target)
3270 emit_move_insn (target, temp);
3271 return target;
3274 switch (code)
3276 case LABEL_DECL:
3278 tree function = decl_function_context (exp);
3279 /* Handle using a label in a containing function. */
3280 if (function != current_function_decl && function != 0)
3282 struct function *p = find_function_data (function);
3283 /* Allocate in the memory associated with the function
3284 that the label is in. */
3285 push_obstacks (p->function_obstack,
3286 p->function_maybepermanent_obstack);
3288 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3289 label_rtx (exp), p->forced_labels);
3290 pop_obstacks ();
3292 else if (modifier == EXPAND_INITIALIZER)
3293 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3294 label_rtx (exp), forced_labels);
3295 temp = gen_rtx (MEM, FUNCTION_MODE,
3296 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
3297 if (function != current_function_decl && function != 0)
3298 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
3299 return temp;
3302 case PARM_DECL:
3303 if (DECL_RTL (exp) == 0)
3305 error_with_decl (exp, "prior parameter's size depends on `%s'");
3306 return CONST0_RTX (mode);
3309 case FUNCTION_DECL:
3310 case VAR_DECL:
3311 case RESULT_DECL:
3312 if (DECL_RTL (exp) == 0)
3313 abort ();
3314 /* Ensure variable marked as used
3315 even if it doesn't go through a parser. */
3316 TREE_USED (exp) = 1;
3317 /* Handle variables inherited from containing functions. */
3318 context = decl_function_context (exp);
3320 /* We treat inline_function_decl as an alias for the current function
3321 because that is the inline function whose vars, types, etc.
3322 are being merged into the current function.
3323 See expand_inline_function. */
3324 if (context != 0 && context != current_function_decl
3325 && context != inline_function_decl
3326 /* If var is static, we don't need a static chain to access it. */
3327 && ! (GET_CODE (DECL_RTL (exp)) == MEM
3328 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
3330 rtx addr;
3332 /* Mark as non-local and addressable. */
3333 DECL_NONLOCAL (exp) = 1;
3334 mark_addressable (exp);
3335 if (GET_CODE (DECL_RTL (exp)) != MEM)
3336 abort ();
3337 addr = XEXP (DECL_RTL (exp), 0);
3338 if (GET_CODE (addr) == MEM)
3339 addr = gen_rtx (MEM, Pmode, fix_lexical_addr (XEXP (addr, 0), exp));
3340 else
3341 addr = fix_lexical_addr (addr, exp);
3342 return change_address (DECL_RTL (exp), mode, addr);
3345 /* This is the case of an array whose size is to be determined
3346 from its initializer, while the initializer is still being parsed.
3347 See expand_decl. */
3348 if (GET_CODE (DECL_RTL (exp)) == MEM
3349 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
3350 return change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
3351 XEXP (DECL_RTL (exp), 0));
3352 if (GET_CODE (DECL_RTL (exp)) == MEM
3353 && modifier != EXPAND_CONST_ADDRESS
3354 && modifier != EXPAND_SUM
3355 && modifier != EXPAND_INITIALIZER)
3357 /* DECL_RTL probably contains a constant address.
3358 On RISC machines where a constant address isn't valid,
3359 make some insns to get that address into a register. */
3360 if (!memory_address_p (DECL_MODE (exp), XEXP (DECL_RTL (exp), 0))
3361 || (flag_force_addr
3362 && CONSTANT_ADDRESS_P (XEXP (DECL_RTL (exp), 0))))
3363 return change_address (DECL_RTL (exp), VOIDmode,
3364 copy_rtx (XEXP (DECL_RTL (exp), 0)));
3367 /* If the mode of DECL_RTL does not match that of the decl, it
3368 must be a promoted value. We return a SUBREG of the wanted mode,
3369 but mark it so that we know that it was already extended. */
3371 if (GET_CODE (DECL_RTL (exp)) == REG
3372 && GET_MODE (DECL_RTL (exp)) != mode)
3374 enum machine_mode decl_mode = DECL_MODE (exp);
3376 /* Get the signedness used for this variable. Ensure we get the
3377 same mode we got when the variable was declared. */
3379 PROMOTE_MODE (decl_mode, unsignedp, type);
3381 if (decl_mode != GET_MODE (DECL_RTL (exp)))
3382 abort ();
3384 temp = gen_rtx (SUBREG, mode, DECL_RTL (exp), 0);
3385 SUBREG_PROMOTED_VAR_P (temp) = 1;
3386 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3387 return temp;
3390 return DECL_RTL (exp);
3392 case INTEGER_CST:
3393 return immed_double_const (TREE_INT_CST_LOW (exp),
3394 TREE_INT_CST_HIGH (exp),
3395 mode);
3397 case CONST_DECL:
3398 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
3400 case REAL_CST:
3401 /* If optimized, generate immediate CONST_DOUBLE
3402 which will be turned into memory by reload if necessary.
3404 We used to force a register so that loop.c could see it. But
3405 this does not allow gen_* patterns to perform optimizations with
3406 the constants. It also produces two insns in cases like "x = 1.0;".
3407 On most machines, floating-point constants are not permitted in
3408 many insns, so we'd end up copying it to a register in any case.
3410 Now, we do the copying in expand_binop, if appropriate. */
3411 return immed_real_const (exp);
3413 case COMPLEX_CST:
3414 case STRING_CST:
3415 if (! TREE_CST_RTL (exp))
3416 output_constant_def (exp);
3418 /* TREE_CST_RTL probably contains a constant address.
3419 On RISC machines where a constant address isn't valid,
3420 make some insns to get that address into a register. */
3421 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
3422 && modifier != EXPAND_CONST_ADDRESS
3423 && modifier != EXPAND_INITIALIZER
3424 && modifier != EXPAND_SUM
3425 && !memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0)))
3426 return change_address (TREE_CST_RTL (exp), VOIDmode,
3427 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
3428 return TREE_CST_RTL (exp);
3430 case SAVE_EXPR:
3431 context = decl_function_context (exp);
3432 /* We treat inline_function_decl as an alias for the current function
3433 because that is the inline function whose vars, types, etc.
3434 are being merged into the current function.
3435 See expand_inline_function. */
3436 if (context == current_function_decl || context == inline_function_decl)
3437 context = 0;
3439 /* If this is non-local, handle it. */
3440 if (context)
3442 temp = SAVE_EXPR_RTL (exp);
3443 if (temp && GET_CODE (temp) == REG)
3445 put_var_into_stack (exp);
3446 temp = SAVE_EXPR_RTL (exp);
3448 if (temp == 0 || GET_CODE (temp) != MEM)
3449 abort ();
3450 return change_address (temp, mode,
3451 fix_lexical_addr (XEXP (temp, 0), exp));
3453 if (SAVE_EXPR_RTL (exp) == 0)
3455 if (mode == BLKmode)
3456 temp
3457 = assign_stack_temp (mode,
3458 int_size_in_bytes (TREE_TYPE (exp)), 0);
3459 else
3461 enum machine_mode var_mode = mode;
3463 if (TREE_CODE (type) == INTEGER_TYPE
3464 || TREE_CODE (type) == ENUMERAL_TYPE
3465 || TREE_CODE (type) == BOOLEAN_TYPE
3466 || TREE_CODE (type) == CHAR_TYPE
3467 || TREE_CODE (type) == REAL_TYPE
3468 || TREE_CODE (type) == POINTER_TYPE
3469 || TREE_CODE (type) == OFFSET_TYPE)
3471 PROMOTE_MODE (var_mode, unsignedp, type);
3474 temp = gen_reg_rtx (var_mode);
3477 SAVE_EXPR_RTL (exp) = temp;
3478 store_expr (TREE_OPERAND (exp, 0), temp, 0);
3479 if (!optimize && GET_CODE (temp) == REG)
3480 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
3481 save_expr_regs);
3484 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
3485 must be a promoted value. We return a SUBREG of the wanted mode,
3486 but mark it so that we know that it was already extended. Note
3487 that `unsignedp' was modified above in this case. */
3489 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
3490 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
3492 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
3493 SUBREG_PROMOTED_VAR_P (temp) = 1;
3494 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3495 return temp;
3498 return SAVE_EXPR_RTL (exp);
3500 case EXIT_EXPR:
3501 /* Exit the current loop if the body-expression is true. */
3503 rtx label = gen_label_rtx ();
3504 do_jump (TREE_OPERAND (exp, 0), label, NULL_RTX);
3505 expand_exit_loop (NULL_PTR);
3506 emit_label (label);
3508 return const0_rtx;
3510 case LOOP_EXPR:
3511 expand_start_loop (1);
3512 expand_expr_stmt (TREE_OPERAND (exp, 0));
3513 expand_end_loop ();
3515 return const0_rtx;
3517 case BIND_EXPR:
3519 tree vars = TREE_OPERAND (exp, 0);
3520 int vars_need_expansion = 0;
3522 /* Need to open a binding contour here because
3523 if there are any cleanups they most be contained here. */
3524 expand_start_bindings (0);
3526 /* Mark the corresponding BLOCK for output in its proper place. */
3527 if (TREE_OPERAND (exp, 2) != 0
3528 && ! TREE_USED (TREE_OPERAND (exp, 2)))
3529 insert_block (TREE_OPERAND (exp, 2));
3531 /* If VARS have not yet been expanded, expand them now. */
3532 while (vars)
3534 if (DECL_RTL (vars) == 0)
3536 vars_need_expansion = 1;
3537 expand_decl (vars);
3539 expand_decl_init (vars);
3540 vars = TREE_CHAIN (vars);
3543 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
3545 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
3547 return temp;
3550 case RTL_EXPR:
3551 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
3552 abort ();
3553 emit_insns (RTL_EXPR_SEQUENCE (exp));
3554 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
3555 return RTL_EXPR_RTL (exp);
3557 case CONSTRUCTOR:
3558 /* All elts simple constants => refer to a constant in memory. But
3559 if this is a non-BLKmode mode, let it store a field at a time
3560 since that should make a CONST_INT or CONST_DOUBLE when we
3561 fold. */
3562 if (TREE_STATIC (exp) && (mode == BLKmode || TREE_ADDRESSABLE (exp)))
3564 rtx constructor = output_constant_def (exp);
3565 if (modifier != EXPAND_CONST_ADDRESS
3566 && modifier != EXPAND_INITIALIZER
3567 && modifier != EXPAND_SUM
3568 && !memory_address_p (GET_MODE (constructor),
3569 XEXP (constructor, 0)))
3570 constructor = change_address (constructor, VOIDmode,
3571 XEXP (constructor, 0));
3572 return constructor;
3575 if (ignore)
3577 tree elt;
3578 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3579 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
3580 return const0_rtx;
3582 else
3584 if (target == 0 || ! safe_from_p (target, exp))
3586 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
3587 target = gen_reg_rtx (mode);
3588 else
3590 rtx safe_target = assign_stack_temp (mode, int_size_in_bytes (type), 0);
3591 if (target)
3592 MEM_IN_STRUCT_P (safe_target) = MEM_IN_STRUCT_P (target);
3593 target = safe_target;
3596 store_constructor (exp, target);
3597 return target;
3600 case INDIRECT_REF:
3602 tree exp1 = TREE_OPERAND (exp, 0);
3603 tree exp2;
3605 /* A SAVE_EXPR as the address in an INDIRECT_EXPR is generated
3606 for *PTR += ANYTHING where PTR is put inside the SAVE_EXPR.
3607 This code has the same general effect as simply doing
3608 expand_expr on the save expr, except that the expression PTR
3609 is computed for use as a memory address. This means different
3610 code, suitable for indexing, may be generated. */
3611 if (TREE_CODE (exp1) == SAVE_EXPR
3612 && SAVE_EXPR_RTL (exp1) == 0
3613 && TREE_CODE (exp2 = TREE_OPERAND (exp1, 0)) != ERROR_MARK
3614 && TYPE_MODE (TREE_TYPE (exp1)) == Pmode
3615 && TYPE_MODE (TREE_TYPE (exp2)) == Pmode)
3617 temp = expand_expr (TREE_OPERAND (exp1, 0), NULL_RTX,
3618 VOIDmode, EXPAND_SUM);
3619 op0 = memory_address (mode, temp);
3620 op0 = copy_all_regs (op0);
3621 SAVE_EXPR_RTL (exp1) = op0;
3623 else
3625 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
3626 op0 = memory_address (mode, op0);
3629 temp = gen_rtx (MEM, mode, op0);
3630 /* If address was computed by addition,
3631 mark this as an element of an aggregate. */
3632 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
3633 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
3634 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
3635 || TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE
3636 || TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
3637 || TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
3638 || (TREE_CODE (exp1) == ADDR_EXPR
3639 && (exp2 = TREE_OPERAND (exp1, 0))
3640 && (TREE_CODE (TREE_TYPE (exp2)) == ARRAY_TYPE
3641 || TREE_CODE (TREE_TYPE (exp2)) == RECORD_TYPE
3642 || TREE_CODE (TREE_TYPE (exp2)) == UNION_TYPE)))
3643 MEM_IN_STRUCT_P (temp) = 1;
3644 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) || flag_volatile;
3645 #if 0 /* It is incorrectto set RTX_UNCHANGING_P here, because the fact that
3646 a location is accessed through a pointer to const does not mean
3647 that the value there can never change. */
3648 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
3649 #endif
3650 return temp;
3653 case ARRAY_REF:
3654 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
3655 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3657 /* Nonconstant array index or nonconstant element size.
3658 Generate the tree for *(&array+index) and expand that,
3659 except do it in a language-independent way
3660 and don't complain about non-lvalue arrays.
3661 `mark_addressable' should already have been called
3662 for any array for which this case will be reached. */
3664 /* Don't forget the const or volatile flag from the array element. */
3665 tree variant_type = build_type_variant (type,
3666 TREE_READONLY (exp),
3667 TREE_THIS_VOLATILE (exp));
3668 tree array_adr = build1 (ADDR_EXPR, build_pointer_type (variant_type),
3669 TREE_OPERAND (exp, 0));
3670 tree index = TREE_OPERAND (exp, 1);
3671 tree elt;
3673 /* Convert the integer argument to a type the same size as a pointer
3674 so the multiply won't overflow spuriously. */
3675 if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
3676 index = convert (type_for_size (POINTER_SIZE, 0), index);
3678 /* Don't think the address has side effects
3679 just because the array does.
3680 (In some cases the address might have side effects,
3681 and we fail to record that fact here. However, it should not
3682 matter, since expand_expr should not care.) */
3683 TREE_SIDE_EFFECTS (array_adr) = 0;
3685 elt = build1 (INDIRECT_REF, type,
3686 fold (build (PLUS_EXPR, TYPE_POINTER_TO (variant_type),
3687 array_adr,
3688 fold (build (MULT_EXPR,
3689 TYPE_POINTER_TO (variant_type),
3690 index, size_in_bytes (type))))));
3692 /* Volatility, etc., of new expression is same as old expression. */
3693 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
3694 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
3695 TREE_READONLY (elt) = TREE_READONLY (exp);
3697 return expand_expr (elt, target, tmode, modifier);
3700 /* Fold an expression like: "foo"[2].
3701 This is not done in fold so it won't happen inside &. */
3703 int i;
3704 tree arg0 = TREE_OPERAND (exp, 0);
3705 tree arg1 = TREE_OPERAND (exp, 1);
3707 if (TREE_CODE (arg0) == STRING_CST
3708 && TREE_CODE (arg1) == INTEGER_CST
3709 && !TREE_INT_CST_HIGH (arg1)
3710 && (i = TREE_INT_CST_LOW (arg1)) < TREE_STRING_LENGTH (arg0))
3712 if (TREE_TYPE (TREE_TYPE (arg0)) == integer_type_node)
3714 exp = build_int_2 (((int *)TREE_STRING_POINTER (arg0))[i], 0);
3715 TREE_TYPE (exp) = integer_type_node;
3716 return expand_expr (exp, target, tmode, modifier);
3718 if (TREE_TYPE (TREE_TYPE (arg0)) == char_type_node)
3720 exp = build_int_2 (TREE_STRING_POINTER (arg0)[i], 0);
3721 TREE_TYPE (exp) = integer_type_node;
3722 return expand_expr (convert (TREE_TYPE (TREE_TYPE (arg0)), exp), target, tmode, modifier);
3727 /* If this is a constant index into a constant array,
3728 just get the value from the array. Handle both the cases when
3729 we have an explicit constructor and when our operand is a variable
3730 that was declared const. */
3732 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
3733 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
3735 tree index = fold (TREE_OPERAND (exp, 1));
3736 if (TREE_CODE (index) == INTEGER_CST
3737 && TREE_INT_CST_HIGH (index) == 0)
3739 int i = TREE_INT_CST_LOW (index);
3740 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
3742 while (elem && i--)
3743 elem = TREE_CHAIN (elem);
3744 if (elem)
3745 return expand_expr (fold (TREE_VALUE (elem)), target,
3746 tmode, modifier);
3750 else if (TREE_READONLY (TREE_OPERAND (exp, 0))
3751 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
3752 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == ARRAY_TYPE
3753 && TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
3754 && DECL_INITIAL (TREE_OPERAND (exp, 0))
3755 && optimize >= 1
3756 && (TREE_CODE (DECL_INITIAL (TREE_OPERAND (exp, 0)))
3757 != ERROR_MARK))
3759 tree index = fold (TREE_OPERAND (exp, 1));
3760 if (TREE_CODE (index) == INTEGER_CST
3761 && TREE_INT_CST_HIGH (index) == 0)
3763 int i = TREE_INT_CST_LOW (index);
3764 tree init = DECL_INITIAL (TREE_OPERAND (exp, 0));
3766 if (TREE_CODE (init) == CONSTRUCTOR)
3768 tree elem = CONSTRUCTOR_ELTS (init);
3770 while (elem && i--)
3771 elem = TREE_CHAIN (elem);
3772 if (elem)
3773 return expand_expr (fold (TREE_VALUE (elem)), target,
3774 tmode, modifier);
3776 else if (TREE_CODE (init) == STRING_CST
3777 && i < TREE_STRING_LENGTH (init))
3779 temp = GEN_INT (TREE_STRING_POINTER (init)[i]);
3780 return convert_to_mode (mode, temp, 0);
3784 /* Treat array-ref with constant index as a component-ref. */
3786 case COMPONENT_REF:
3787 case BIT_FIELD_REF:
3788 /* If the operand is a CONSTRUCTOR, we can just extract the
3789 appropriate field if it is present. */
3790 if (code != ARRAY_REF
3791 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
3793 tree elt;
3795 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
3796 elt = TREE_CHAIN (elt))
3797 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
3798 return expand_expr (TREE_VALUE (elt), target, tmode, modifier);
3802 enum machine_mode mode1;
3803 int bitsize;
3804 int bitpos;
3805 tree offset;
3806 int volatilep = 0;
3807 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
3808 &mode1, &unsignedp, &volatilep);
3810 /* In some cases, we will be offsetting OP0's address by a constant.
3811 So get it as a sum, if possible. If we will be using it
3812 directly in an insn, we validate it. */
3813 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_SUM);
3815 /* If this is a constant, put it into a register if it is a
3816 legimate constant and memory if it isn't. */
3817 if (CONSTANT_P (op0))
3819 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
3820 if (LEGITIMATE_CONSTANT_P (op0))
3821 op0 = force_reg (mode, op0);
3822 else
3823 op0 = validize_mem (force_const_mem (mode, op0));
3826 if (offset != 0)
3828 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3830 if (GET_CODE (op0) != MEM)
3831 abort ();
3832 op0 = change_address (op0, VOIDmode,
3833 gen_rtx (PLUS, Pmode, XEXP (op0, 0),
3834 force_reg (Pmode, offset_rtx)));
3837 /* Don't forget about volatility even if this is a bitfield. */
3838 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
3840 op0 = copy_rtx (op0);
3841 MEM_VOLATILE_P (op0) = 1;
3844 if (mode1 == VOIDmode
3845 || (mode1 != BLKmode && ! direct_load[(int) mode1]
3846 && modifier != EXPAND_CONST_ADDRESS
3847 && modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
3848 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG)
3850 /* In cases where an aligned union has an unaligned object
3851 as a field, we might be extracting a BLKmode value from
3852 an integer-mode (e.g., SImode) object. Handle this case
3853 by doing the extract into an object as wide as the field
3854 (which we know to be the width of a basic mode), then
3855 storing into memory, and changing the mode to BLKmode. */
3856 enum machine_mode ext_mode = mode;
3858 if (ext_mode == BLKmode)
3859 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
3861 if (ext_mode == BLKmode)
3862 abort ();
3864 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
3865 unsignedp, target, ext_mode, ext_mode,
3866 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
3867 int_size_in_bytes (TREE_TYPE (tem)));
3868 if (mode == BLKmode)
3870 rtx new = assign_stack_temp (ext_mode,
3871 bitsize / BITS_PER_UNIT, 0);
3873 emit_move_insn (new, op0);
3874 op0 = copy_rtx (new);
3875 PUT_MODE (op0, BLKmode);
3878 return op0;
3881 /* Get a reference to just this component. */
3882 if (modifier == EXPAND_CONST_ADDRESS
3883 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
3884 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
3885 (bitpos / BITS_PER_UNIT)));
3886 else
3887 op0 = change_address (op0, mode1,
3888 plus_constant (XEXP (op0, 0),
3889 (bitpos / BITS_PER_UNIT)));
3890 MEM_IN_STRUCT_P (op0) = 1;
3891 MEM_VOLATILE_P (op0) |= volatilep;
3892 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
3893 return op0;
3894 if (target == 0)
3895 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
3896 convert_move (target, op0, unsignedp);
3897 return target;
3900 case OFFSET_REF:
3902 tree base = build_unary_op (ADDR_EXPR, TREE_OPERAND (exp, 0), 0);
3903 tree addr = build (PLUS_EXPR, type, base, TREE_OPERAND (exp, 1));
3904 op0 = expand_expr (addr, NULL_RTX, VOIDmode, EXPAND_SUM);
3905 temp = gen_rtx (MEM, mode, memory_address (mode, op0));
3906 MEM_IN_STRUCT_P (temp) = 1;
3907 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) || flag_volatile;
3908 #if 0 /* It is incorrectto set RTX_UNCHANGING_P here, because the fact that
3909 a location is accessed through a pointer to const does not mean
3910 that the value there can never change. */
3911 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
3912 #endif
3913 return temp;
3916 /* Intended for a reference to a buffer of a file-object in Pascal.
3917 But it's not certain that a special tree code will really be
3918 necessary for these. INDIRECT_REF might work for them. */
3919 case BUFFER_REF:
3920 abort ();
3922 /* IN_EXPR: Inlined pascal set IN expression.
3924 Algorithm:
3925 rlo = set_low - (set_low%bits_per_word);
3926 the_word = set [ (index - rlo)/bits_per_word ];
3927 bit_index = index % bits_per_word;
3928 bitmask = 1 << bit_index;
3929 return !!(the_word & bitmask); */
3930 case IN_EXPR:
3931 preexpand_calls (exp);
3933 tree set = TREE_OPERAND (exp, 0);
3934 tree index = TREE_OPERAND (exp, 1);
3935 tree set_type = TREE_TYPE (set);
3937 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
3938 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
3940 rtx index_val;
3941 rtx lo_r;
3942 rtx hi_r;
3943 rtx rlow;
3944 rtx diff, quo, rem, addr, bit, result;
3945 rtx setval, setaddr;
3946 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
3948 if (target == 0)
3949 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
3951 /* If domain is empty, answer is no. */
3952 if (tree_int_cst_lt (set_high_bound, set_low_bound))
3953 return const0_rtx;
3955 index_val = expand_expr (index, 0, VOIDmode, 0);
3956 lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
3957 hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
3958 setval = expand_expr (set, 0, VOIDmode, 0);
3959 setaddr = XEXP (setval, 0);
3961 /* Compare index against bounds, if they are constant. */
3962 if (GET_CODE (index_val) == CONST_INT
3963 && GET_CODE (lo_r) == CONST_INT)
3965 if (INTVAL (index_val) < INTVAL (lo_r))
3966 return const0_rtx;
3969 if (GET_CODE (index_val) == CONST_INT
3970 && GET_CODE (hi_r) == CONST_INT)
3972 if (INTVAL (hi_r) < INTVAL (index_val))
3973 return const0_rtx;
3976 /* If we get here, we have to generate the code for both cases
3977 (in range and out of range). */
3979 op0 = gen_label_rtx ();
3980 op1 = gen_label_rtx ();
3982 if (! (GET_CODE (index_val) == CONST_INT
3983 && GET_CODE (lo_r) == CONST_INT))
3985 emit_cmp_insn (index_val, lo_r, LT, 0, GET_MODE (index_val), 0, 0);
3986 emit_jump_insn (gen_blt (op1));
3989 if (! (GET_CODE (index_val) == CONST_INT
3990 && GET_CODE (hi_r) == CONST_INT))
3992 emit_cmp_insn (index_val, hi_r, GT, 0, GET_MODE (index_val), 0, 0);
3993 emit_jump_insn (gen_bgt (op1));
3996 /* Calculate the element number of bit zero in the first word
3997 of the set. */
3998 if (GET_CODE (lo_r) == CONST_INT)
3999 rlow = gen_rtx (CONST_INT, VOIDmode,
4000 INTVAL (lo_r) & ~ (1 << BITS_PER_UNIT));
4001 else
4002 rlow = expand_binop (index_mode, and_optab,
4003 lo_r, gen_rtx (CONST_INT, VOIDmode,
4004 ~ (1 << BITS_PER_UNIT)),
4005 0, 0, OPTAB_LIB_WIDEN);
4007 diff = expand_binop (index_mode, sub_optab,
4008 index_val, rlow, 0, 0, OPTAB_LIB_WIDEN);
4010 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
4011 gen_rtx (CONST_INT, VOIDmode, BITS_PER_UNIT),
4012 0, 0);
4013 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
4014 gen_rtx (CONST_INT, VOIDmode, BITS_PER_UNIT),
4015 0, 0);
4016 addr = memory_address (byte_mode,
4017 expand_binop (index_mode, add_optab,
4018 diff, setaddr));
4019 /* Extract the bit we want to examine */
4020 bit = expand_shift (RSHIFT_EXPR, byte_mode,
4021 gen_rtx (MEM, byte_mode, addr), rem, 0, 1);
4022 result = expand_binop (SImode, and_optab, bit, const1_rtx, target,
4023 1, OPTAB_LIB_WIDEN);
4024 emit_move_insn (target, result);
4026 /* Output the code to handle the out-of-range case. */
4027 emit_jump (op0);
4028 emit_label (op1);
4029 emit_move_insn (target, const0_rtx);
4030 emit_label (op0);
4031 return target;
4034 case WITH_CLEANUP_EXPR:
4035 if (RTL_EXPR_RTL (exp) == 0)
4037 RTL_EXPR_RTL (exp)
4038 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
4039 cleanups_this_call
4040 = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
4041 /* That's it for this cleanup. */
4042 TREE_OPERAND (exp, 2) = 0;
4044 return RTL_EXPR_RTL (exp);
4046 case CALL_EXPR:
4047 /* Check for a built-in function. */
4048 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
4049 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == FUNCTION_DECL
4050 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4051 return expand_builtin (exp, target, subtarget, tmode, ignore);
4052 /* If this call was expanded already by preexpand_calls,
4053 just return the result we got. */
4054 if (CALL_EXPR_RTL (exp) != 0)
4055 return CALL_EXPR_RTL (exp);
4056 return expand_call (exp, target, ignore);
4058 case NON_LVALUE_EXPR:
4059 case NOP_EXPR:
4060 case CONVERT_EXPR:
4061 case REFERENCE_EXPR:
4062 if (TREE_CODE (type) == VOID_TYPE || ignore)
4064 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
4065 return const0_rtx;
4067 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
4068 return expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, modifier);
4069 if (TREE_CODE (type) == UNION_TYPE)
4071 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
4072 if (target == 0)
4074 if (mode == BLKmode)
4076 if (TYPE_SIZE (type) == 0
4077 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4078 abort ();
4079 target = assign_stack_temp (BLKmode,
4080 (TREE_INT_CST_LOW (TYPE_SIZE (type))
4081 + BITS_PER_UNIT - 1)
4082 / BITS_PER_UNIT, 0);
4084 else
4085 target = gen_reg_rtx (mode);
4087 if (GET_CODE (target) == MEM)
4088 /* Store data into beginning of memory target. */
4089 store_expr (TREE_OPERAND (exp, 0),
4090 change_address (target, TYPE_MODE (valtype), 0), 0);
4092 else if (GET_CODE (target) == REG)
4093 /* Store this field into a union of the proper type. */
4094 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
4095 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
4096 VOIDmode, 0, 1,
4097 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
4098 else
4099 abort ();
4101 /* Return the entire union. */
4102 return target;
4104 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
4105 if (GET_MODE (op0) == mode || GET_MODE (op0) == VOIDmode)
4106 return op0;
4107 if (modifier == EXPAND_INITIALIZER)
4108 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
4109 if (flag_force_mem && GET_CODE (op0) == MEM)
4110 op0 = copy_to_reg (op0);
4112 if (target == 0)
4113 return convert_to_mode (mode, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4114 else
4115 convert_move (target, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4116 return target;
4118 case PLUS_EXPR:
4119 /* We come here from MINUS_EXPR when the second operand is a constant. */
4120 plus_expr:
4121 this_optab = add_optab;
4123 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
4124 something else, make sure we add the register to the constant and
4125 then to the other thing. This case can occur during strength
4126 reduction and doing it this way will produce better code if the
4127 frame pointer or argument pointer is eliminated.
4129 fold-const.c will ensure that the constant is always in the inner
4130 PLUS_EXPR, so the only case we need to do anything about is if
4131 sp, ap, or fp is our second argument, in which case we must swap
4132 the innermost first argument and our second argument. */
4134 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
4135 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
4136 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
4137 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
4138 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
4139 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
4141 tree t = TREE_OPERAND (exp, 1);
4143 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4144 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
4147 /* If the result is to be Pmode and we are adding an integer to
4148 something, we might be forming a constant. So try to use
4149 plus_constant. If it produces a sum and we can't accept it,
4150 use force_operand. This allows P = &ARR[const] to generate
4151 efficient code on machines where a SYMBOL_REF is not a valid
4152 address.
4154 If this is an EXPAND_SUM call, always return the sum. */
4155 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
4156 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4157 && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
4158 || mode == Pmode))
4160 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
4161 EXPAND_SUM);
4162 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
4163 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4164 op1 = force_operand (op1, target);
4165 return op1;
4168 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4169 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
4170 && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
4171 || mode == Pmode))
4173 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
4174 EXPAND_SUM);
4175 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
4176 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4177 op0 = force_operand (op0, target);
4178 return op0;
4181 /* No sense saving up arithmetic to be done
4182 if it's all in the wrong mode to form part of an address.
4183 And force_operand won't know whether to sign-extend or
4184 zero-extend. */
4185 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4186 || mode != Pmode) goto binop;
4188 preexpand_calls (exp);
4189 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4190 subtarget = 0;
4192 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
4193 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
4195 /* Make sure any term that's a sum with a constant comes last. */
4196 if (GET_CODE (op0) == PLUS
4197 && CONSTANT_P (XEXP (op0, 1)))
4199 temp = op0;
4200 op0 = op1;
4201 op1 = temp;
4203 /* If adding to a sum including a constant,
4204 associate it to put the constant outside. */
4205 if (GET_CODE (op1) == PLUS
4206 && CONSTANT_P (XEXP (op1, 1)))
4208 rtx constant_term = const0_rtx;
4210 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
4211 if (temp != 0)
4212 op0 = temp;
4213 /* Ensure that MULT comes first if there is one. */
4214 else if (GET_CODE (op0) == MULT)
4215 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
4216 else
4217 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
4219 /* Let's also eliminate constants from op0 if possible. */
4220 op0 = eliminate_constant_term (op0, &constant_term);
4222 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
4223 their sum should be a constant. Form it into OP1, since the
4224 result we want will then be OP0 + OP1. */
4226 temp = simplify_binary_operation (PLUS, mode, constant_term,
4227 XEXP (op1, 1));
4228 if (temp != 0)
4229 op1 = temp;
4230 else
4231 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
4234 /* Put a constant term last and put a multiplication first. */
4235 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
4236 temp = op1, op1 = op0, op0 = temp;
4238 temp = simplify_binary_operation (PLUS, mode, op0, op1);
4239 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
4241 case MINUS_EXPR:
4242 /* Handle difference of two symbolic constants,
4243 for the sake of an initializer. */
4244 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4245 && really_constant_p (TREE_OPERAND (exp, 0))
4246 && really_constant_p (TREE_OPERAND (exp, 1)))
4248 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
4249 VOIDmode, modifier);
4250 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4251 VOIDmode, modifier);
4252 return gen_rtx (MINUS, mode, op0, op1);
4254 /* Convert A - const to A + (-const). */
4255 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
4257 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0),
4258 fold (build1 (NEGATE_EXPR, type,
4259 TREE_OPERAND (exp, 1))));
4260 goto plus_expr;
4262 this_optab = sub_optab;
4263 goto binop;
4265 case MULT_EXPR:
4266 preexpand_calls (exp);
4267 /* If first operand is constant, swap them.
4268 Thus the following special case checks need only
4269 check the second operand. */
4270 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
4272 register tree t1 = TREE_OPERAND (exp, 0);
4273 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
4274 TREE_OPERAND (exp, 1) = t1;
4277 /* Attempt to return something suitable for generating an
4278 indexed address, for machines that support that. */
4280 if (modifier == EXPAND_SUM && mode == Pmode
4281 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4282 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
4284 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
4286 /* Apply distributive law if OP0 is x+c. */
4287 if (GET_CODE (op0) == PLUS
4288 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
4289 return gen_rtx (PLUS, mode,
4290 gen_rtx (MULT, mode, XEXP (op0, 0),
4291 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
4292 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
4293 * INTVAL (XEXP (op0, 1))));
4295 if (GET_CODE (op0) != REG)
4296 op0 = force_operand (op0, NULL_RTX);
4297 if (GET_CODE (op0) != REG)
4298 op0 = copy_to_mode_reg (mode, op0);
4300 return gen_rtx (MULT, mode, op0,
4301 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
4304 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4305 subtarget = 0;
4307 /* Check for multiplying things that have been extended
4308 from a narrower type. If this machine supports multiplying
4309 in that narrower type with a result in the desired type,
4310 do it that way, and avoid the explicit type-conversion. */
4311 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
4312 && TREE_CODE (type) == INTEGER_TYPE
4313 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4314 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
4315 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4316 && int_fits_type_p (TREE_OPERAND (exp, 1),
4317 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4318 /* Don't use a widening multiply if a shift will do. */
4319 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
4320 > HOST_BITS_PER_WIDE_INT)
4321 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
4323 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
4324 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4326 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
4327 /* If both operands are extended, they must either both
4328 be zero-extended or both be sign-extended. */
4329 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4331 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
4333 enum machine_mode innermode
4334 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
4335 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4336 ? umul_widen_optab : smul_widen_optab);
4337 if (mode == GET_MODE_WIDER_MODE (innermode)
4338 && this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
4340 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4341 NULL_RTX, VOIDmode, 0);
4342 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
4343 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4344 VOIDmode, 0);
4345 else
4346 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
4347 NULL_RTX, VOIDmode, 0);
4348 goto binop2;
4351 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4352 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4353 return expand_mult (mode, op0, op1, target, unsignedp);
4355 case TRUNC_DIV_EXPR:
4356 case FLOOR_DIV_EXPR:
4357 case CEIL_DIV_EXPR:
4358 case ROUND_DIV_EXPR:
4359 case EXACT_DIV_EXPR:
4360 preexpand_calls (exp);
4361 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4362 subtarget = 0;
4363 /* Possible optimization: compute the dividend with EXPAND_SUM
4364 then if the divisor is constant can optimize the case
4365 where some terms of the dividend have coeffs divisible by it. */
4366 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4367 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4368 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
4370 case RDIV_EXPR:
4371 this_optab = flodiv_optab;
4372 goto binop;
4374 case TRUNC_MOD_EXPR:
4375 case FLOOR_MOD_EXPR:
4376 case CEIL_MOD_EXPR:
4377 case ROUND_MOD_EXPR:
4378 preexpand_calls (exp);
4379 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4380 subtarget = 0;
4381 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4382 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4383 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
4385 case FIX_ROUND_EXPR:
4386 case FIX_FLOOR_EXPR:
4387 case FIX_CEIL_EXPR:
4388 abort (); /* Not used for C. */
4390 case FIX_TRUNC_EXPR:
4391 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
4392 if (target == 0)
4393 target = gen_reg_rtx (mode);
4394 expand_fix (target, op0, unsignedp);
4395 return target;
4397 case FLOAT_EXPR:
4398 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
4399 if (target == 0)
4400 target = gen_reg_rtx (mode);
4401 /* expand_float can't figure out what to do if FROM has VOIDmode.
4402 So give it the correct mode. With -O, cse will optimize this. */
4403 if (GET_MODE (op0) == VOIDmode)
4404 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
4405 op0);
4406 expand_float (target, op0,
4407 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4408 return target;
4410 case NEGATE_EXPR:
4411 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4412 temp = expand_unop (mode, neg_optab, op0, target, 0);
4413 if (temp == 0)
4414 abort ();
4415 return temp;
4417 case ABS_EXPR:
4418 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4420 /* Unsigned abs is simply the operand. Testing here means we don't
4421 risk generating incorrect code below. */
4422 if (TREE_UNSIGNED (type))
4423 return op0;
4425 /* First try to do it with a special abs instruction. */
4426 temp = expand_unop (mode, abs_optab, op0, target, 0);
4427 if (temp != 0)
4428 return temp;
4430 /* If this machine has expensive jumps, we can do integer absolute
4431 value of X as (((signed) x >> (W-1)) ^ x) - ((signed) x >> (W-1)),
4432 where W is the width of MODE. */
4434 if (GET_MODE_CLASS (mode) == MODE_INT && BRANCH_COST >= 2)
4436 rtx extended = expand_shift (RSHIFT_EXPR, mode, op0,
4437 size_int (GET_MODE_BITSIZE (mode) - 1),
4438 NULL_RTX, 0);
4440 temp = expand_binop (mode, xor_optab, extended, op0, target, 0,
4441 OPTAB_LIB_WIDEN);
4442 if (temp != 0)
4443 temp = expand_binop (mode, sub_optab, temp, extended, target, 0,
4444 OPTAB_LIB_WIDEN);
4446 if (temp != 0)
4447 return temp;
4450 /* If that does not win, use conditional jump and negate. */
4451 target = original_target;
4452 temp = gen_label_rtx ();
4453 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 0))
4454 || (GET_CODE (target) == REG
4455 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4456 target = gen_reg_rtx (mode);
4457 emit_move_insn (target, op0);
4458 emit_cmp_insn (target,
4459 expand_expr (convert (type, integer_zero_node),
4460 NULL_RTX, VOIDmode, 0),
4461 GE, NULL_RTX, mode, 0, 0);
4462 NO_DEFER_POP;
4463 emit_jump_insn (gen_bge (temp));
4464 op0 = expand_unop (mode, neg_optab, target, target, 0);
4465 if (op0 != target)
4466 emit_move_insn (target, op0);
4467 emit_label (temp);
4468 OK_DEFER_POP;
4469 return target;
4471 case MAX_EXPR:
4472 case MIN_EXPR:
4473 target = original_target;
4474 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
4475 || (GET_CODE (target) == REG
4476 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4477 target = gen_reg_rtx (mode);
4478 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4479 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4481 /* First try to do it with a special MIN or MAX instruction.
4482 If that does not win, use a conditional jump to select the proper
4483 value. */
4484 this_optab = (TREE_UNSIGNED (type)
4485 ? (code == MIN_EXPR ? umin_optab : umax_optab)
4486 : (code == MIN_EXPR ? smin_optab : smax_optab));
4488 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
4489 OPTAB_WIDEN);
4490 if (temp != 0)
4491 return temp;
4493 if (target != op0)
4494 emit_move_insn (target, op0);
4495 op0 = gen_label_rtx ();
4496 if (code == MAX_EXPR)
4497 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
4498 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
4499 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
4500 else
4501 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
4502 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
4503 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
4504 if (temp == const0_rtx)
4505 emit_move_insn (target, op1);
4506 else if (temp != const_true_rtx)
4508 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
4509 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
4510 else
4511 abort ();
4512 emit_move_insn (target, op1);
4514 emit_label (op0);
4515 return target;
4517 /* ??? Can optimize when the operand of this is a bitwise operation,
4518 by using a different bitwise operation. */
4519 case BIT_NOT_EXPR:
4520 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4521 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
4522 if (temp == 0)
4523 abort ();
4524 return temp;
4526 case FFS_EXPR:
4527 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4528 temp = expand_unop (mode, ffs_optab, op0, target, 1);
4529 if (temp == 0)
4530 abort ();
4531 return temp;
4533 /* ??? Can optimize bitwise operations with one arg constant.
4534 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
4535 and (a bitwise1 b) bitwise2 b (etc)
4536 but that is probably not worth while. */
4538 /* BIT_AND_EXPR is for bitwise anding.
4539 TRUTH_AND_EXPR is for anding two boolean values
4540 when we want in all cases to compute both of them.
4541 In general it is fastest to do TRUTH_AND_EXPR by
4542 computing both operands as actual zero-or-1 values
4543 and then bitwise anding. In cases where there cannot
4544 be any side effects, better code would be made by
4545 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR;
4546 but the question is how to recognize those cases. */
4548 case TRUTH_AND_EXPR:
4549 case BIT_AND_EXPR:
4550 this_optab = and_optab;
4551 goto binop;
4553 /* See comment above about TRUTH_AND_EXPR; it applies here too. */
4554 case TRUTH_OR_EXPR:
4555 case BIT_IOR_EXPR:
4556 this_optab = ior_optab;
4557 goto binop;
4559 case BIT_XOR_EXPR:
4560 this_optab = xor_optab;
4561 goto binop;
4563 case LSHIFT_EXPR:
4564 case RSHIFT_EXPR:
4565 case LROTATE_EXPR:
4566 case RROTATE_EXPR:
4567 preexpand_calls (exp);
4568 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4569 subtarget = 0;
4570 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4571 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
4572 unsignedp);
4574 /* Could determine the answer when only additive constants differ.
4575 Also, the addition of one can be handled by changing the condition. */
4576 case LT_EXPR:
4577 case LE_EXPR:
4578 case GT_EXPR:
4579 case GE_EXPR:
4580 case EQ_EXPR:
4581 case NE_EXPR:
4582 preexpand_calls (exp);
4583 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
4584 if (temp != 0)
4585 return temp;
4586 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
4587 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
4588 && original_target
4589 && GET_CODE (original_target) == REG
4590 && (GET_MODE (original_target)
4591 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
4593 temp = expand_expr (TREE_OPERAND (exp, 0), original_target, VOIDmode, 0);
4594 if (temp != original_target)
4595 temp = copy_to_reg (temp);
4596 op1 = gen_label_rtx ();
4597 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
4598 GET_MODE (temp), unsignedp, 0);
4599 emit_jump_insn (gen_beq (op1));
4600 emit_move_insn (temp, const1_rtx);
4601 emit_label (op1);
4602 return temp;
4604 /* If no set-flag instruction, must generate a conditional
4605 store into a temporary variable. Drop through
4606 and handle this like && and ||. */
4608 case TRUTH_ANDIF_EXPR:
4609 case TRUTH_ORIF_EXPR:
4610 if (target == 0 || ! safe_from_p (target, exp)
4611 /* Make sure we don't have a hard reg (such as function's return
4612 value) live across basic blocks, if not optimizing. */
4613 || (!optimize && GET_CODE (target) == REG
4614 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4615 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4616 emit_clr_insn (target);
4617 op1 = gen_label_rtx ();
4618 jumpifnot (exp, op1);
4619 emit_0_to_1_insn (target);
4620 emit_label (op1);
4621 return target;
4623 case TRUTH_NOT_EXPR:
4624 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4625 /* The parser is careful to generate TRUTH_NOT_EXPR
4626 only with operands that are always zero or one. */
4627 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
4628 target, 1, OPTAB_LIB_WIDEN);
4629 if (temp == 0)
4630 abort ();
4631 return temp;
4633 case COMPOUND_EXPR:
4634 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4635 emit_queue ();
4636 return expand_expr (TREE_OPERAND (exp, 1),
4637 (ignore ? const0_rtx : target),
4638 VOIDmode, 0);
4640 case COND_EXPR:
4642 /* Note that COND_EXPRs whose type is a structure or union
4643 are required to be constructed to contain assignments of
4644 a temporary variable, so that we can evaluate them here
4645 for side effect only. If type is void, we must do likewise. */
4647 /* If an arm of the branch requires a cleanup,
4648 only that cleanup is performed. */
4650 tree singleton = 0;
4651 tree binary_op = 0, unary_op = 0;
4652 tree old_cleanups = cleanups_this_call;
4653 cleanups_this_call = 0;
4655 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
4656 convert it to our mode, if necessary. */
4657 if (integer_onep (TREE_OPERAND (exp, 1))
4658 && integer_zerop (TREE_OPERAND (exp, 2))
4659 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
4661 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
4662 if (GET_MODE (op0) == mode)
4663 return op0;
4664 if (target == 0)
4665 target = gen_reg_rtx (mode);
4666 convert_move (target, op0, unsignedp);
4667 return target;
4670 /* If we are not to produce a result, we have no target. Otherwise,
4671 if a target was specified use it; it will not be used as an
4672 intermediate target unless it is safe. If no target, use a
4673 temporary. */
4675 if (mode == VOIDmode || ignore)
4676 temp = 0;
4677 else if (original_target
4678 && safe_from_p (original_target, TREE_OPERAND (exp, 0)))
4679 temp = original_target;
4680 else if (mode == BLKmode)
4682 if (TYPE_SIZE (type) == 0
4683 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4684 abort ();
4685 temp = assign_stack_temp (BLKmode,
4686 (TREE_INT_CST_LOW (TYPE_SIZE (type))
4687 + BITS_PER_UNIT - 1)
4688 / BITS_PER_UNIT, 0);
4690 else
4691 temp = gen_reg_rtx (mode);
4693 /* Check for X ? A + B : A. If we have this, we can copy
4694 A to the output and conditionally add B. Similarly for unary
4695 operations. Don't do this if X has side-effects because
4696 those side effects might affect A or B and the "?" operation is
4697 a sequence point in ANSI. (We test for side effects later.) */
4699 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
4700 && operand_equal_p (TREE_OPERAND (exp, 2),
4701 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
4702 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
4703 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
4704 && operand_equal_p (TREE_OPERAND (exp, 1),
4705 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
4706 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
4707 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
4708 && operand_equal_p (TREE_OPERAND (exp, 2),
4709 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
4710 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
4711 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
4712 && operand_equal_p (TREE_OPERAND (exp, 1),
4713 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
4714 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
4716 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
4717 operation, do this as A + (X != 0). Similarly for other simple
4718 binary operators. */
4719 if (singleton && binary_op
4720 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4721 && (TREE_CODE (binary_op) == PLUS_EXPR
4722 || TREE_CODE (binary_op) == MINUS_EXPR
4723 || TREE_CODE (binary_op) == BIT_IOR_EXPR
4724 || TREE_CODE (binary_op) == BIT_XOR_EXPR
4725 || TREE_CODE (binary_op) == BIT_AND_EXPR)
4726 && integer_onep (TREE_OPERAND (binary_op, 1))
4727 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
4729 rtx result;
4730 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
4731 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
4732 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
4733 : TREE_CODE (binary_op) == BIT_XOR_EXPR ? xor_optab
4734 : and_optab);
4736 /* If we had X ? A : A + 1, do this as A + (X == 0).
4738 We have to invert the truth value here and then put it
4739 back later if do_store_flag fails. We cannot simply copy
4740 TREE_OPERAND (exp, 0) to another variable and modify that
4741 because invert_truthvalue can modify the tree pointed to
4742 by its argument. */
4743 if (singleton == TREE_OPERAND (exp, 1))
4744 TREE_OPERAND (exp, 0)
4745 = invert_truthvalue (TREE_OPERAND (exp, 0));
4747 result = do_store_flag (TREE_OPERAND (exp, 0),
4748 (safe_from_p (temp, singleton)
4749 ? temp : NULL_RTX),
4750 mode, BRANCH_COST <= 1);
4752 if (result)
4754 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
4755 return expand_binop (mode, boptab, op1, result, temp,
4756 unsignedp, OPTAB_LIB_WIDEN);
4758 else if (singleton == TREE_OPERAND (exp, 1))
4759 TREE_OPERAND (exp, 0)
4760 = invert_truthvalue (TREE_OPERAND (exp, 0));
4763 NO_DEFER_POP;
4764 op0 = gen_label_rtx ();
4766 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
4768 if (temp != 0)
4770 /* If the target conflicts with the other operand of the
4771 binary op, we can't use it. Also, we can't use the target
4772 if it is a hard register, because evaluating the condition
4773 might clobber it. */
4774 if ((binary_op
4775 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
4776 || (GET_CODE (temp) == REG
4777 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
4778 temp = gen_reg_rtx (mode);
4779 store_expr (singleton, temp, 0);
4781 else
4782 expand_expr (singleton,
4783 ignore ? const1_rtx : NULL_RTX, VOIDmode, 0);
4784 if (cleanups_this_call)
4786 sorry ("aggregate value in COND_EXPR");
4787 cleanups_this_call = 0;
4789 if (singleton == TREE_OPERAND (exp, 1))
4790 jumpif (TREE_OPERAND (exp, 0), op0);
4791 else
4792 jumpifnot (TREE_OPERAND (exp, 0), op0);
4794 if (binary_op && temp == 0)
4795 /* Just touch the other operand. */
4796 expand_expr (TREE_OPERAND (binary_op, 1),
4797 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
4798 else if (binary_op)
4799 store_expr (build (TREE_CODE (binary_op), type,
4800 make_tree (type, temp),
4801 TREE_OPERAND (binary_op, 1)),
4802 temp, 0);
4803 else
4804 store_expr (build1 (TREE_CODE (unary_op), type,
4805 make_tree (type, temp)),
4806 temp, 0);
4807 op1 = op0;
4809 #if 0
4810 /* This is now done in jump.c and is better done there because it
4811 produces shorter register lifetimes. */
4813 /* Check for both possibilities either constants or variables
4814 in registers (but not the same as the target!). If so, can
4815 save branches by assigning one, branching, and assigning the
4816 other. */
4817 else if (temp && GET_MODE (temp) != BLKmode
4818 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
4819 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
4820 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
4821 && DECL_RTL (TREE_OPERAND (exp, 1))
4822 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
4823 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
4824 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
4825 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
4826 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
4827 && DECL_RTL (TREE_OPERAND (exp, 2))
4828 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
4829 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
4831 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4832 temp = gen_reg_rtx (mode);
4833 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4834 jumpifnot (TREE_OPERAND (exp, 0), op0);
4835 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4836 op1 = op0;
4838 #endif
4839 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
4840 comparison operator. If we have one of these cases, set the
4841 output to A, branch on A (cse will merge these two references),
4842 then set the output to FOO. */
4843 else if (temp
4844 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
4845 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
4846 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4847 TREE_OPERAND (exp, 1), 0)
4848 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4849 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
4851 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4852 temp = gen_reg_rtx (mode);
4853 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4854 jumpif (TREE_OPERAND (exp, 0), op0);
4855 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4856 op1 = op0;
4858 else if (temp
4859 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
4860 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
4861 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4862 TREE_OPERAND (exp, 2), 0)
4863 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4864 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
4866 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4867 temp = gen_reg_rtx (mode);
4868 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4869 jumpifnot (TREE_OPERAND (exp, 0), op0);
4870 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4871 op1 = op0;
4873 else
4875 op1 = gen_label_rtx ();
4876 jumpifnot (TREE_OPERAND (exp, 0), op0);
4877 if (temp != 0)
4878 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4879 else
4880 expand_expr (TREE_OPERAND (exp, 1),
4881 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
4882 if (cleanups_this_call)
4884 sorry ("aggregate value in COND_EXPR");
4885 cleanups_this_call = 0;
4888 emit_queue ();
4889 emit_jump_insn (gen_jump (op1));
4890 emit_barrier ();
4891 emit_label (op0);
4892 if (temp != 0)
4893 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4894 else
4895 expand_expr (TREE_OPERAND (exp, 2),
4896 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
4899 if (cleanups_this_call)
4901 sorry ("aggregate value in COND_EXPR");
4902 cleanups_this_call = 0;
4905 emit_queue ();
4906 emit_label (op1);
4907 OK_DEFER_POP;
4908 cleanups_this_call = old_cleanups;
4909 return temp;
4912 case TARGET_EXPR:
4914 /* Something needs to be initialized, but we didn't know
4915 where that thing was when building the tree. For example,
4916 it could be the return value of a function, or a parameter
4917 to a function which lays down in the stack, or a temporary
4918 variable which must be passed by reference.
4920 We guarantee that the expression will either be constructed
4921 or copied into our original target. */
4923 tree slot = TREE_OPERAND (exp, 0);
4924 tree exp1;
4926 if (TREE_CODE (slot) != VAR_DECL)
4927 abort ();
4929 if (target == 0)
4931 if (DECL_RTL (slot) != 0)
4933 target = DECL_RTL (slot);
4934 /* If we have already expanded the slot, so don't do
4935 it again. (mrs) */
4936 if (TREE_OPERAND (exp, 1) == NULL_TREE)
4937 return target;
4939 else
4941 target = assign_stack_temp (mode, int_size_in_bytes (type), 0);
4942 /* All temp slots at this level must not conflict. */
4943 preserve_temp_slots (target);
4944 DECL_RTL (slot) = target;
4947 #if 0
4948 /* I bet this needs to be done, and I bet that it needs to
4949 be above, inside the else clause. The reason is
4950 simple, how else is it going to get cleaned up? (mrs)
4952 The reason is probably did not work before, and was
4953 commented out is because this was re-expanding already
4954 expanded target_exprs (target == 0 and DECL_RTL (slot)
4955 != 0) also cleaning them up many times as well. :-( */
4957 /* Since SLOT is not known to the called function
4958 to belong to its stack frame, we must build an explicit
4959 cleanup. This case occurs when we must build up a reference
4960 to pass the reference as an argument. In this case,
4961 it is very likely that such a reference need not be
4962 built here. */
4964 if (TREE_OPERAND (exp, 2) == 0)
4965 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
4966 if (TREE_OPERAND (exp, 2))
4967 cleanups_this_call = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2),
4968 cleanups_this_call);
4969 #endif
4971 else
4973 /* This case does occur, when expanding a parameter which
4974 needs to be constructed on the stack. The target
4975 is the actual stack address that we want to initialize.
4976 The function we call will perform the cleanup in this case. */
4978 DECL_RTL (slot) = target;
4981 exp1 = TREE_OPERAND (exp, 1);
4982 /* Mark it as expanded. */
4983 TREE_OPERAND (exp, 1) = NULL_TREE;
4985 return expand_expr (exp1, target, tmode, modifier);
4988 case INIT_EXPR:
4990 tree lhs = TREE_OPERAND (exp, 0);
4991 tree rhs = TREE_OPERAND (exp, 1);
4992 tree noncopied_parts = 0;
4993 tree lhs_type = TREE_TYPE (lhs);
4995 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
4996 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
4997 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
4998 TYPE_NONCOPIED_PARTS (lhs_type));
4999 while (noncopied_parts != 0)
5001 expand_assignment (TREE_VALUE (noncopied_parts),
5002 TREE_PURPOSE (noncopied_parts), 0, 0);
5003 noncopied_parts = TREE_CHAIN (noncopied_parts);
5005 return temp;
5008 case MODIFY_EXPR:
5010 /* If lhs is complex, expand calls in rhs before computing it.
5011 That's so we don't compute a pointer and save it over a call.
5012 If lhs is simple, compute it first so we can give it as a
5013 target if the rhs is just a call. This avoids an extra temp and copy
5014 and that prevents a partial-subsumption which makes bad code.
5015 Actually we could treat component_ref's of vars like vars. */
5017 tree lhs = TREE_OPERAND (exp, 0);
5018 tree rhs = TREE_OPERAND (exp, 1);
5019 tree noncopied_parts = 0;
5020 tree lhs_type = TREE_TYPE (lhs);
5022 temp = 0;
5024 if (TREE_CODE (lhs) != VAR_DECL
5025 && TREE_CODE (lhs) != RESULT_DECL
5026 && TREE_CODE (lhs) != PARM_DECL)
5027 preexpand_calls (exp);
5029 /* Check for |= or &= of a bitfield of size one into another bitfield
5030 of size 1. In this case, (unless we need the result of the
5031 assignment) we can do this more efficiently with a
5032 test followed by an assignment, if necessary.
5034 ??? At this point, we can't get a BIT_FIELD_REF here. But if
5035 things change so we do, this code should be enhanced to
5036 support it. */
5037 if (ignore
5038 && TREE_CODE (lhs) == COMPONENT_REF
5039 && (TREE_CODE (rhs) == BIT_IOR_EXPR
5040 || TREE_CODE (rhs) == BIT_AND_EXPR)
5041 && TREE_OPERAND (rhs, 0) == lhs
5042 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
5043 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
5044 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
5046 rtx label = gen_label_rtx ();
5048 do_jump (TREE_OPERAND (rhs, 1),
5049 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
5050 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
5051 expand_assignment (lhs, convert (TREE_TYPE (rhs),
5052 (TREE_CODE (rhs) == BIT_IOR_EXPR
5053 ? integer_one_node
5054 : integer_zero_node)),
5055 0, 0);
5056 do_pending_stack_adjust ();
5057 emit_label (label);
5058 return const0_rtx;
5061 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
5062 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
5063 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
5064 TYPE_NONCOPIED_PARTS (lhs_type));
5066 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5067 while (noncopied_parts != 0)
5069 expand_assignment (TREE_PURPOSE (noncopied_parts),
5070 TREE_VALUE (noncopied_parts), 0, 0);
5071 noncopied_parts = TREE_CHAIN (noncopied_parts);
5073 return temp;
5076 case PREINCREMENT_EXPR:
5077 case PREDECREMENT_EXPR:
5078 return expand_increment (exp, 0);
5080 case POSTINCREMENT_EXPR:
5081 case POSTDECREMENT_EXPR:
5082 /* Faster to treat as pre-increment if result is not used. */
5083 return expand_increment (exp, ! ignore);
5085 case ADDR_EXPR:
5086 /* Are we taking the address of a nested function? */
5087 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
5088 && decl_function_context (TREE_OPERAND (exp, 0)) != 0)
5090 op0 = trampoline_address (TREE_OPERAND (exp, 0));
5091 op0 = force_operand (op0, target);
5093 else
5095 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
5096 (modifier == EXPAND_INITIALIZER
5097 ? modifier : EXPAND_CONST_ADDRESS));
5098 if (GET_CODE (op0) != MEM)
5099 abort ();
5101 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5102 return XEXP (op0, 0);
5103 op0 = force_operand (XEXP (op0, 0), target);
5105 if (flag_force_addr && GET_CODE (op0) != REG)
5106 return force_reg (Pmode, op0);
5107 return op0;
5109 case ENTRY_VALUE_EXPR:
5110 abort ();
5112 /* COMPLEX type for Extended Pascal & Fortran */
5113 case COMPLEX_EXPR:
5115 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5117 rtx prev;
5119 /* Get the rtx code of the operands. */
5120 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5121 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
5123 if (! target)
5124 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5126 prev = get_last_insn ();
5128 /* Tell flow that the whole of the destination is being set. */
5129 if (GET_CODE (target) == REG)
5130 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
5132 /* Move the real (op0) and imaginary (op1) parts to their location. */
5133 emit_move_insn (gen_lowpart (mode, target), op0);
5134 emit_move_insn (gen_highpart (mode, target), op1);
5136 /* Complex construction should appear as a single unit. */
5137 group_insns (prev);
5139 return target;
5142 case REALPART_EXPR:
5144 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5145 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5146 if (! target)
5147 target = gen_reg_rtx (mode);
5148 emit_move_insn (target, gen_lowpart (mode, op0));
5149 return target;
5152 case IMAGPART_EXPR:
5154 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5155 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5156 if (! target)
5157 target = gen_reg_rtx (mode);
5158 emit_move_insn (target, gen_highpart (mode, op0));
5159 return target;
5162 case CONJ_EXPR:
5164 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5165 rtx imag_t;
5166 rtx prev;
5168 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5170 if (! target)
5171 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5173 prev = get_last_insn ();
5175 /* Tell flow that the whole of the destination is being set. */
5176 if (GET_CODE (target) == REG)
5177 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
5179 /* Store the realpart and the negated imagpart to target. */
5180 emit_move_insn (gen_lowpart (mode, target), gen_lowpart (mode, op0));
5182 imag_t = gen_highpart (mode, target);
5183 temp = expand_unop (mode, neg_optab,
5184 gen_highpart (mode, op0), imag_t, 0);
5185 if (temp != imag_t)
5186 emit_move_insn (imag_t, temp);
5188 /* Conjugate should appear as a single unit */
5189 group_insns (prev);
5191 return target;
5194 case ERROR_MARK:
5195 return const0_rtx;
5197 default:
5198 return (*lang_expand_expr) (exp, target, tmode, modifier);
5201 /* Here to do an ordinary binary operator, generating an instruction
5202 from the optab already placed in `this_optab'. */
5203 binop:
5204 preexpand_calls (exp);
5205 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5206 subtarget = 0;
5207 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5208 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5209 binop2:
5210 temp = expand_binop (mode, this_optab, op0, op1, target,
5211 unsignedp, OPTAB_LIB_WIDEN);
5212 if (temp == 0)
5213 abort ();
5214 return temp;
5217 /* Return the alignment in bits of EXP, a pointer valued expression.
5218 But don't return more than MAX_ALIGN no matter what.
5219 The alignment returned is, by default, the alignment of the thing that
5220 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
5222 Otherwise, look at the expression to see if we can do better, i.e., if the
5223 expression is actually pointing at an object whose alignment is tighter. */
5225 static int
5226 get_pointer_alignment (exp, max_align)
5227 tree exp;
5228 unsigned max_align;
5230 unsigned align, inner;
5232 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
5233 return 0;
5235 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
5236 align = MIN (align, max_align);
5238 while (1)
5240 switch (TREE_CODE (exp))
5242 case NOP_EXPR:
5243 case CONVERT_EXPR:
5244 case NON_LVALUE_EXPR:
5245 exp = TREE_OPERAND (exp, 0);
5246 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
5247 return align;
5248 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
5249 inner = MIN (inner, max_align);
5250 align = MAX (align, inner);
5251 break;
5253 case PLUS_EXPR:
5254 /* If sum of pointer + int, restrict our maximum alignment to that
5255 imposed by the integer. If not, we can't do any better than
5256 ALIGN. */
5257 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
5258 return align;
5260 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
5261 & (max_align - 1))
5262 != 0)
5263 max_align >>= 1;
5265 exp = TREE_OPERAND (exp, 0);
5266 break;
5268 case ADDR_EXPR:
5269 /* See what we are pointing at and look at its alignment. */
5270 exp = TREE_OPERAND (exp, 0);
5271 if (TREE_CODE (exp) == FUNCTION_DECL)
5272 align = MAX (align, FUNCTION_BOUNDARY);
5273 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
5274 align = MAX (align, DECL_ALIGN (exp));
5275 #ifdef CONSTANT_ALIGNMENT
5276 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
5277 align = CONSTANT_ALIGNMENT (exp, align);
5278 #endif
5279 return MIN (align, max_align);
5281 default:
5282 return align;
5287 /* Return the tree node and offset if a given argument corresponds to
5288 a string constant. */
5290 static tree
5291 string_constant (arg, ptr_offset)
5292 tree arg;
5293 tree *ptr_offset;
5295 STRIP_NOPS (arg);
5297 if (TREE_CODE (arg) == ADDR_EXPR
5298 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
5300 *ptr_offset = integer_zero_node;
5301 return TREE_OPERAND (arg, 0);
5303 else if (TREE_CODE (arg) == PLUS_EXPR)
5305 tree arg0 = TREE_OPERAND (arg, 0);
5306 tree arg1 = TREE_OPERAND (arg, 1);
5308 STRIP_NOPS (arg0);
5309 STRIP_NOPS (arg1);
5311 if (TREE_CODE (arg0) == ADDR_EXPR
5312 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
5314 *ptr_offset = arg1;
5315 return TREE_OPERAND (arg0, 0);
5317 else if (TREE_CODE (arg1) == ADDR_EXPR
5318 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
5320 *ptr_offset = arg0;
5321 return TREE_OPERAND (arg1, 0);
5325 return 0;
5328 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
5329 way, because it could contain a zero byte in the middle.
5330 TREE_STRING_LENGTH is the size of the character array, not the string.
5332 Unfortunately, string_constant can't access the values of const char
5333 arrays with initializers, so neither can we do so here. */
5335 static tree
5336 c_strlen (src)
5337 tree src;
5339 tree offset_node;
5340 int offset, max;
5341 char *ptr;
5343 src = string_constant (src, &offset_node);
5344 if (src == 0)
5345 return 0;
5346 max = TREE_STRING_LENGTH (src);
5347 ptr = TREE_STRING_POINTER (src);
5348 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
5350 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
5351 compute the offset to the following null if we don't know where to
5352 start searching for it. */
5353 int i;
5354 for (i = 0; i < max; i++)
5355 if (ptr[i] == 0)
5356 return 0;
5357 /* We don't know the starting offset, but we do know that the string
5358 has no internal zero bytes. We can assume that the offset falls
5359 within the bounds of the string; otherwise, the programmer deserves
5360 what he gets. Subtract the offset from the length of the string,
5361 and return that. */
5362 /* This would perhaps not be valid if we were dealing with named
5363 arrays in addition to literal string constants. */
5364 return size_binop (MINUS_EXPR, size_int (max), offset_node);
5367 /* We have a known offset into the string. Start searching there for
5368 a null character. */
5369 if (offset_node == 0)
5370 offset = 0;
5371 else
5373 /* Did we get a long long offset? If so, punt. */
5374 if (TREE_INT_CST_HIGH (offset_node) != 0)
5375 return 0;
5376 offset = TREE_INT_CST_LOW (offset_node);
5378 /* If the offset is known to be out of bounds, warn, and call strlen at
5379 runtime. */
5380 if (offset < 0 || offset > max)
5382 warning ("offset outside bounds of constant string");
5383 return 0;
5385 /* Use strlen to search for the first zero byte. Since any strings
5386 constructed with build_string will have nulls appended, we win even
5387 if we get handed something like (char[4])"abcd".
5389 Since OFFSET is our starting index into the string, no further
5390 calculation is needed. */
5391 return size_int (strlen (ptr + offset));
5394 /* Expand an expression EXP that calls a built-in function,
5395 with result going to TARGET if that's convenient
5396 (and in mode MODE if that's convenient).
5397 SUBTARGET may be used as the target for computing one of EXP's operands.
5398 IGNORE is nonzero if the value is to be ignored. */
5400 static rtx
5401 expand_builtin (exp, target, subtarget, mode, ignore)
5402 tree exp;
5403 rtx target;
5404 rtx subtarget;
5405 enum machine_mode mode;
5406 int ignore;
5408 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5409 tree arglist = TREE_OPERAND (exp, 1);
5410 rtx op0;
5411 rtx lab1, insns;
5412 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
5413 optab builtin_optab;
5415 switch (DECL_FUNCTION_CODE (fndecl))
5417 case BUILT_IN_ABS:
5418 case BUILT_IN_LABS:
5419 case BUILT_IN_FABS:
5420 /* build_function_call changes these into ABS_EXPR. */
5421 abort ();
5423 case BUILT_IN_SIN:
5424 case BUILT_IN_COS:
5425 case BUILT_IN_FSQRT:
5426 /* If not optimizing, call the library function. */
5427 if (! optimize)
5428 break;
5430 if (arglist == 0
5431 /* Arg could be wrong type if user redeclared this fcn wrong. */
5432 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
5433 return CONST0_RTX (TYPE_MODE (TREE_TYPE (exp)));
5435 /* Stabilize and compute the argument. */
5436 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
5437 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
5439 exp = copy_node (exp);
5440 arglist = copy_node (arglist);
5441 TREE_OPERAND (exp, 1) = arglist;
5442 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
5444 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
5446 /* Make a suitable register to place result in. */
5447 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5449 emit_queue ();
5450 start_sequence ();
5452 switch (DECL_FUNCTION_CODE (fndecl))
5454 case BUILT_IN_SIN:
5455 builtin_optab = sin_optab; break;
5456 case BUILT_IN_COS:
5457 builtin_optab = cos_optab; break;
5458 case BUILT_IN_FSQRT:
5459 builtin_optab = sqrt_optab; break;
5460 default:
5461 abort ();
5464 /* Compute into TARGET.
5465 Set TARGET to wherever the result comes back. */
5466 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
5467 builtin_optab, op0, target, 0);
5469 /* If we were unable to expand via the builtin, stop the
5470 sequence (without outputting the insns) and break, causing
5471 a call the the library function. */
5472 if (target == 0)
5474 end_sequence ();
5475 break;
5478 /* Check the results by default. But if flag_fast_math is turned on,
5479 then assume sqrt will always be called with valid arguments. */
5481 if (! flag_fast_math)
5483 /* Don't define the builtin FP instructions
5484 if your machine is not IEEE. */
5485 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
5486 abort ();
5488 lab1 = gen_label_rtx ();
5490 /* Test the result; if it is NaN, set errno=EDOM because
5491 the argument was not in the domain. */
5492 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
5493 emit_jump_insn (gen_beq (lab1));
5495 #if TARGET_EDOM
5497 #ifdef GEN_ERRNO_RTX
5498 rtx errno_rtx = GEN_ERRNO_RTX;
5499 #else
5500 rtx errno_rtx
5501 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "*errno"));
5502 #endif
5504 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
5506 #else
5507 /* We can't set errno=EDOM directly; let the library call do it.
5508 Pop the arguments right away in case the call gets deleted. */
5509 NO_DEFER_POP;
5510 expand_call (exp, target, 0);
5511 OK_DEFER_POP;
5512 #endif
5514 emit_label (lab1);
5517 /* Output the entire sequence. */
5518 insns = get_insns ();
5519 end_sequence ();
5520 emit_insns (insns);
5522 return target;
5524 case BUILT_IN_SAVEREGS:
5525 /* Don't do __builtin_saveregs more than once in a function.
5526 Save the result of the first call and reuse it. */
5527 if (saveregs_value != 0)
5528 return saveregs_value;
5530 /* When this function is called, it means that registers must be
5531 saved on entry to this function. So we migrate the
5532 call to the first insn of this function. */
5533 rtx temp;
5534 rtx seq;
5535 rtx valreg, saved_valreg;
5537 /* Now really call the function. `expand_call' does not call
5538 expand_builtin, so there is no danger of infinite recursion here. */
5539 start_sequence ();
5541 #ifdef EXPAND_BUILTIN_SAVEREGS
5542 /* Do whatever the machine needs done in this case. */
5543 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
5544 #else
5545 /* The register where the function returns its value
5546 is likely to have something else in it, such as an argument.
5547 So preserve that register around the call. */
5548 if (value_mode != VOIDmode)
5550 valreg = hard_libcall_value (value_mode);
5551 saved_valreg = gen_reg_rtx (value_mode);
5552 emit_move_insn (saved_valreg, valreg);
5555 /* Generate the call, putting the value in a pseudo. */
5556 temp = expand_call (exp, target, ignore);
5558 if (value_mode != VOIDmode)
5559 emit_move_insn (valreg, saved_valreg);
5560 #endif
5562 seq = get_insns ();
5563 end_sequence ();
5565 saveregs_value = temp;
5567 /* This won't work inside a SEQUENCE--it really has to be
5568 at the start of the function. */
5569 if (in_sequence_p ())
5571 /* Better to do this than to crash. */
5572 error ("`va_start' used within `({...})'");
5573 return temp;
5576 /* Put the sequence after the NOTE that starts the function. */
5577 emit_insns_before (seq, NEXT_INSN (get_insns ()));
5578 return temp;
5581 /* __builtin_args_info (N) returns word N of the arg space info
5582 for the current function. The number and meanings of words
5583 is controlled by the definition of CUMULATIVE_ARGS. */
5584 case BUILT_IN_ARGS_INFO:
5586 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
5587 int i;
5588 int *word_ptr = (int *) &current_function_args_info;
5589 tree type, elts, result;
5591 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
5592 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
5593 __FILE__, __LINE__);
5595 if (arglist != 0)
5597 tree arg = TREE_VALUE (arglist);
5598 if (TREE_CODE (arg) != INTEGER_CST)
5599 error ("argument of __builtin_args_info must be constant");
5600 else
5602 int wordnum = TREE_INT_CST_LOW (arg);
5604 if (wordnum < 0 || wordnum >= nwords)
5605 error ("argument of __builtin_args_info out of range");
5606 else
5607 return GEN_INT (word_ptr[wordnum]);
5610 else
5611 error ("missing argument in __builtin_args_info");
5613 return const0_rtx;
5615 #if 0
5616 for (i = 0; i < nwords; i++)
5617 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
5619 type = build_array_type (integer_type_node,
5620 build_index_type (build_int_2 (nwords, 0)));
5621 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
5622 TREE_CONSTANT (result) = 1;
5623 TREE_STATIC (result) = 1;
5624 result = build (INDIRECT_REF, build_pointer_type (type), result);
5625 TREE_CONSTANT (result) = 1;
5626 return expand_expr (result, NULL_RTX, VOIDmode, 0);
5627 #endif
5630 /* Return the address of the first anonymous stack arg. */
5631 case BUILT_IN_NEXT_ARG:
5633 tree fntype = TREE_TYPE (current_function_decl);
5634 if (!(TYPE_ARG_TYPES (fntype) != 0
5635 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
5636 != void_type_node)))
5638 error ("`va_start' used in function with fixed args");
5639 return const0_rtx;
5643 return expand_binop (Pmode, add_optab,
5644 current_function_internal_arg_pointer,
5645 current_function_arg_offset_rtx,
5646 NULL_RTX, 0, OPTAB_LIB_WIDEN);
5648 case BUILT_IN_CLASSIFY_TYPE:
5649 if (arglist != 0)
5651 tree type = TREE_TYPE (TREE_VALUE (arglist));
5652 enum tree_code code = TREE_CODE (type);
5653 if (code == VOID_TYPE)
5654 return GEN_INT (void_type_class);
5655 if (code == INTEGER_TYPE)
5656 return GEN_INT (integer_type_class);
5657 if (code == CHAR_TYPE)
5658 return GEN_INT (char_type_class);
5659 if (code == ENUMERAL_TYPE)
5660 return GEN_INT (enumeral_type_class);
5661 if (code == BOOLEAN_TYPE)
5662 return GEN_INT (boolean_type_class);
5663 if (code == POINTER_TYPE)
5664 return GEN_INT (pointer_type_class);
5665 if (code == REFERENCE_TYPE)
5666 return GEN_INT (reference_type_class);
5667 if (code == OFFSET_TYPE)
5668 return GEN_INT (offset_type_class);
5669 if (code == REAL_TYPE)
5670 return GEN_INT (real_type_class);
5671 if (code == COMPLEX_TYPE)
5672 return GEN_INT (complex_type_class);
5673 if (code == FUNCTION_TYPE)
5674 return GEN_INT (function_type_class);
5675 if (code == METHOD_TYPE)
5676 return GEN_INT (method_type_class);
5677 if (code == RECORD_TYPE)
5678 return GEN_INT (record_type_class);
5679 if (code == UNION_TYPE)
5680 return GEN_INT (union_type_class);
5681 if (code == ARRAY_TYPE)
5682 return GEN_INT (array_type_class);
5683 if (code == STRING_TYPE)
5684 return GEN_INT (string_type_class);
5685 if (code == SET_TYPE)
5686 return GEN_INT (set_type_class);
5687 if (code == FILE_TYPE)
5688 return GEN_INT (file_type_class);
5689 if (code == LANG_TYPE)
5690 return GEN_INT (lang_type_class);
5692 return GEN_INT (no_type_class);
5694 case BUILT_IN_CONSTANT_P:
5695 if (arglist == 0)
5696 return const0_rtx;
5697 else
5698 return (TREE_CODE_CLASS (TREE_CODE (TREE_VALUE (arglist))) == 'c'
5699 ? const1_rtx : const0_rtx);
5701 case BUILT_IN_FRAME_ADDRESS:
5702 /* The argument must be a nonnegative integer constant.
5703 It counts the number of frames to scan up the stack.
5704 The value is the address of that frame. */
5705 case BUILT_IN_RETURN_ADDRESS:
5706 /* The argument must be a nonnegative integer constant.
5707 It counts the number of frames to scan up the stack.
5708 The value is the return address saved in that frame. */
5709 if (arglist == 0)
5710 /* Warning about missing arg was already issued. */
5711 return const0_rtx;
5712 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
5714 error ("invalid arg to __builtin_return_address");
5715 return const0_rtx;
5717 else if (tree_int_cst_lt (TREE_VALUE (arglist), integer_zero_node))
5719 error ("invalid arg to __builtin_return_address");
5720 return const0_rtx;
5722 else
5724 int count = TREE_INT_CST_LOW (TREE_VALUE (arglist));
5725 rtx tem = frame_pointer_rtx;
5726 int i;
5728 /* Scan back COUNT frames to the specified frame. */
5729 for (i = 0; i < count; i++)
5731 /* Assume the dynamic chain pointer is in the word that
5732 the frame address points to, unless otherwise specified. */
5733 #ifdef DYNAMIC_CHAIN_ADDRESS
5734 tem = DYNAMIC_CHAIN_ADDRESS (tem);
5735 #endif
5736 tem = memory_address (Pmode, tem);
5737 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
5740 /* For __builtin_frame_address, return what we've got. */
5741 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5742 return tem;
5744 /* For __builtin_return_address,
5745 Get the return address from that frame. */
5746 #ifdef RETURN_ADDR_RTX
5747 return RETURN_ADDR_RTX (count, tem);
5748 #else
5749 tem = memory_address (Pmode,
5750 plus_constant (tem, GET_MODE_SIZE (Pmode)));
5751 return copy_to_reg (gen_rtx (MEM, Pmode, tem));
5752 #endif
5755 case BUILT_IN_ALLOCA:
5756 if (arglist == 0
5757 /* Arg could be non-integer if user redeclared this fcn wrong. */
5758 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
5759 return const0_rtx;
5760 current_function_calls_alloca = 1;
5761 /* Compute the argument. */
5762 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
5764 /* Allocate the desired space. */
5765 target = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
5767 /* Record the new stack level for nonlocal gotos. */
5768 if (nonlocal_goto_handler_slot != 0)
5769 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
5770 return target;
5772 case BUILT_IN_FFS:
5773 /* If not optimizing, call the library function. */
5774 if (!optimize)
5775 break;
5777 if (arglist == 0
5778 /* Arg could be non-integer if user redeclared this fcn wrong. */
5779 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
5780 return const0_rtx;
5782 /* Compute the argument. */
5783 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
5784 /* Compute ffs, into TARGET if possible.
5785 Set TARGET to wherever the result comes back. */
5786 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
5787 ffs_optab, op0, target, 1);
5788 if (target == 0)
5789 abort ();
5790 return target;
5792 case BUILT_IN_STRLEN:
5793 /* If not optimizing, call the library function. */
5794 if (!optimize)
5795 break;
5797 if (arglist == 0
5798 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5799 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
5800 return const0_rtx;
5801 else
5803 tree src = TREE_VALUE (arglist);
5804 tree len = c_strlen (src);
5806 int align
5807 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5809 rtx result, src_rtx, char_rtx;
5810 enum machine_mode insn_mode = value_mode, char_mode;
5811 enum insn_code icode;
5813 /* If the length is known, just return it. */
5814 if (len != 0)
5815 return expand_expr (len, target, mode, 0);
5817 /* If SRC is not a pointer type, don't do this operation inline. */
5818 if (align == 0)
5819 break;
5821 /* Call a function if we can't compute strlen in the right mode. */
5823 while (insn_mode != VOIDmode)
5825 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
5826 if (icode != CODE_FOR_nothing)
5827 break;
5829 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
5831 if (insn_mode == VOIDmode)
5832 break;
5834 /* Make a place to write the result of the instruction. */
5835 result = target;
5836 if (! (result != 0
5837 && GET_CODE (result) == REG
5838 && GET_MODE (result) == insn_mode
5839 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
5840 result = gen_reg_rtx (insn_mode);
5842 /* Make sure the operands are acceptable to the predicates. */
5844 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
5845 result = gen_reg_rtx (insn_mode);
5847 src_rtx = memory_address (BLKmode,
5848 expand_expr (src, NULL_RTX, Pmode,
5849 EXPAND_NORMAL));
5850 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
5851 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
5853 char_rtx = const0_rtx;
5854 char_mode = insn_operand_mode[(int)icode][2];
5855 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
5856 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
5858 emit_insn (GEN_FCN (icode) (result,
5859 gen_rtx (MEM, BLKmode, src_rtx),
5860 char_rtx, GEN_INT (align)));
5862 /* Return the value in the proper mode for this function. */
5863 if (GET_MODE (result) == value_mode)
5864 return result;
5865 else if (target != 0)
5867 convert_move (target, result, 0);
5868 return target;
5870 else
5871 return convert_to_mode (value_mode, result, 0);
5874 case BUILT_IN_STRCPY:
5875 /* If not optimizing, call the library function. */
5876 if (!optimize)
5877 break;
5879 if (arglist == 0
5880 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5881 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5882 || TREE_CHAIN (arglist) == 0
5883 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
5884 return const0_rtx;
5885 else
5887 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
5889 if (len == 0)
5890 break;
5892 len = size_binop (PLUS_EXPR, len, integer_one_node);
5894 chainon (arglist, build_tree_list (NULL_TREE, len));
5897 /* Drops in. */
5898 case BUILT_IN_MEMCPY:
5899 /* If not optimizing, call the library function. */
5900 if (!optimize)
5901 break;
5903 if (arglist == 0
5904 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5905 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5906 || TREE_CHAIN (arglist) == 0
5907 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
5908 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
5909 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
5910 return const0_rtx;
5911 else
5913 tree dest = TREE_VALUE (arglist);
5914 tree src = TREE_VALUE (TREE_CHAIN (arglist));
5915 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5917 int src_align
5918 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5919 int dest_align
5920 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
5921 rtx dest_rtx;
5923 /* If either SRC or DEST is not a pointer type, don't do
5924 this operation in-line. */
5925 if (src_align == 0 || dest_align == 0)
5927 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
5928 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
5929 break;
5932 dest_rtx = expand_expr (dest, NULL_RTX, Pmode, EXPAND_NORMAL);
5934 /* Copy word part most expediently. */
5935 emit_block_move (gen_rtx (MEM, BLKmode,
5936 memory_address (BLKmode, dest_rtx)),
5937 gen_rtx (MEM, BLKmode,
5938 memory_address (BLKmode,
5939 expand_expr (src, NULL_RTX,
5940 Pmode,
5941 EXPAND_NORMAL))),
5942 expand_expr (len, NULL_RTX, VOIDmode, 0),
5943 MIN (src_align, dest_align));
5944 return dest_rtx;
5947 /* These comparison functions need an instruction that returns an actual
5948 index. An ordinary compare that just sets the condition codes
5949 is not enough. */
5950 #ifdef HAVE_cmpstrsi
5951 case BUILT_IN_STRCMP:
5952 /* If not optimizing, call the library function. */
5953 if (!optimize)
5954 break;
5956 if (arglist == 0
5957 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5958 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5959 || TREE_CHAIN (arglist) == 0
5960 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
5961 return const0_rtx;
5962 else if (!HAVE_cmpstrsi)
5963 break;
5965 tree arg1 = TREE_VALUE (arglist);
5966 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
5967 tree offset;
5968 tree len, len2;
5970 len = c_strlen (arg1);
5971 if (len)
5972 len = size_binop (PLUS_EXPR, integer_one_node, len);
5973 len2 = c_strlen (arg2);
5974 if (len2)
5975 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
5977 /* If we don't have a constant length for the first, use the length
5978 of the second, if we know it. We don't require a constant for
5979 this case; some cost analysis could be done if both are available
5980 but neither is constant. For now, assume they're equally cheap.
5982 If both strings have constant lengths, use the smaller. This
5983 could arise if optimization results in strcpy being called with
5984 two fixed strings, or if the code was machine-generated. We should
5985 add some code to the `memcmp' handler below to deal with such
5986 situations, someday. */
5987 if (!len || TREE_CODE (len) != INTEGER_CST)
5989 if (len2)
5990 len = len2;
5991 else if (len == 0)
5992 break;
5994 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
5996 if (tree_int_cst_lt (len2, len))
5997 len = len2;
6000 chainon (arglist, build_tree_list (NULL_TREE, len));
6003 /* Drops in. */
6004 case BUILT_IN_MEMCMP:
6005 /* If not optimizing, call the library function. */
6006 if (!optimize)
6007 break;
6009 if (arglist == 0
6010 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6011 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6012 || TREE_CHAIN (arglist) == 0
6013 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
6014 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
6015 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
6016 return const0_rtx;
6017 else if (!HAVE_cmpstrsi)
6018 break;
6020 tree arg1 = TREE_VALUE (arglist);
6021 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
6022 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6023 rtx result;
6025 int arg1_align
6026 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6027 int arg2_align
6028 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6029 enum machine_mode insn_mode
6030 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
6032 /* If we don't have POINTER_TYPE, call the function. */
6033 if (arg1_align == 0 || arg2_align == 0)
6035 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
6036 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
6037 break;
6040 /* Make a place to write the result of the instruction. */
6041 result = target;
6042 if (! (result != 0
6043 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
6044 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
6045 result = gen_reg_rtx (insn_mode);
6047 emit_insn (gen_cmpstrsi (result,
6048 gen_rtx (MEM, BLKmode,
6049 expand_expr (arg1, NULL_RTX, Pmode,
6050 EXPAND_NORMAL)),
6051 gen_rtx (MEM, BLKmode,
6052 expand_expr (arg2, NULL_RTX, Pmode,
6053 EXPAND_NORMAL)),
6054 expand_expr (len, NULL_RTX, VOIDmode, 0),
6055 GEN_INT (MIN (arg1_align, arg2_align))));
6057 /* Return the value in the proper mode for this function. */
6058 mode = TYPE_MODE (TREE_TYPE (exp));
6059 if (GET_MODE (result) == mode)
6060 return result;
6061 else if (target != 0)
6063 convert_move (target, result, 0);
6064 return target;
6066 else
6067 return convert_to_mode (mode, result, 0);
6069 #else
6070 case BUILT_IN_STRCMP:
6071 case BUILT_IN_MEMCMP:
6072 break;
6073 #endif
6075 default: /* just do library call, if unknown builtin */
6076 error ("built-in function %s not currently supported",
6077 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
6080 /* The switch statement above can drop through to cause the function
6081 to be called normally. */
6083 return expand_call (exp, target, ignore);
6086 /* Expand code for a post- or pre- increment or decrement
6087 and return the RTX for the result.
6088 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
6090 static rtx
6091 expand_increment (exp, post)
6092 register tree exp;
6093 int post;
6095 register rtx op0, op1;
6096 register rtx temp, value;
6097 register tree incremented = TREE_OPERAND (exp, 0);
6098 optab this_optab = add_optab;
6099 int icode;
6100 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
6101 int op0_is_copy = 0;
6103 /* Stabilize any component ref that might need to be
6104 evaluated more than once below. */
6105 if (TREE_CODE (incremented) == BIT_FIELD_REF
6106 || (TREE_CODE (incremented) == COMPONENT_REF
6107 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
6108 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
6109 incremented = stabilize_reference (incremented);
6111 /* Compute the operands as RTX.
6112 Note whether OP0 is the actual lvalue or a copy of it:
6113 I believe it is a copy iff it is a register or subreg
6114 and insns were generated in computing it. */
6116 temp = get_last_insn ();
6117 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
6119 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
6120 in place but intead must do sign- or zero-extension during assignment,
6121 so we copy it into a new register and let the code below use it as
6122 a copy.
6124 Note that we can safely modify this SUBREG since it is know not to be
6125 shared (it was made by the expand_expr call above). */
6127 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
6128 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
6130 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
6131 && temp != get_last_insn ());
6132 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6134 /* Decide whether incrementing or decrementing. */
6135 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
6136 || TREE_CODE (exp) == PREDECREMENT_EXPR)
6137 this_optab = sub_optab;
6139 /* If OP0 is not the actual lvalue, but rather a copy in a register,
6140 then we cannot just increment OP0. We must
6141 therefore contrive to increment the original value.
6142 Then we can return OP0 since it is a copy of the old value. */
6143 if (op0_is_copy)
6145 /* This is the easiest way to increment the value wherever it is.
6146 Problems with multiple evaluation of INCREMENTED
6147 are prevented because either (1) it is a component_ref,
6148 in which case it was stabilized above, or (2) it is an array_ref
6149 with constant index in an array in a register, which is
6150 safe to reevaluate. */
6151 tree newexp = build ((this_optab == add_optab
6152 ? PLUS_EXPR : MINUS_EXPR),
6153 TREE_TYPE (exp),
6154 incremented,
6155 TREE_OPERAND (exp, 1));
6156 temp = expand_assignment (incremented, newexp, ! post, 0);
6157 return post ? op0 : temp;
6160 /* Convert decrement by a constant into a negative increment. */
6161 if (this_optab == sub_optab
6162 && GET_CODE (op1) == CONST_INT)
6164 op1 = GEN_INT (- INTVAL (op1));
6165 this_optab = add_optab;
6168 if (post)
6170 /* We have a true reference to the value in OP0.
6171 If there is an insn to add or subtract in this mode, queue it. */
6173 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
6174 op0 = stabilize (op0);
6175 #endif
6177 icode = (int) this_optab->handlers[(int) mode].insn_code;
6178 if (icode != (int) CODE_FOR_nothing
6179 /* Make sure that OP0 is valid for operands 0 and 1
6180 of the insn we want to queue. */
6181 && (*insn_operand_predicate[icode][0]) (op0, mode)
6182 && (*insn_operand_predicate[icode][1]) (op0, mode))
6184 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
6185 op1 = force_reg (mode, op1);
6187 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
6191 /* Preincrement, or we can't increment with one simple insn. */
6192 if (post)
6193 /* Save a copy of the value before inc or dec, to return it later. */
6194 temp = value = copy_to_reg (op0);
6195 else
6196 /* Arrange to return the incremented value. */
6197 /* Copy the rtx because expand_binop will protect from the queue,
6198 and the results of that would be invalid for us to return
6199 if our caller does emit_queue before using our result. */
6200 temp = copy_rtx (value = op0);
6202 /* Increment however we can. */
6203 op1 = expand_binop (mode, this_optab, value, op1, op0,
6204 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
6205 /* Make sure the value is stored into OP0. */
6206 if (op1 != op0)
6207 emit_move_insn (op0, op1);
6209 return temp;
6212 /* Expand all function calls contained within EXP, innermost ones first.
6213 But don't look within expressions that have sequence points.
6214 For each CALL_EXPR, record the rtx for its value
6215 in the CALL_EXPR_RTL field. */
6217 static void
6218 preexpand_calls (exp)
6219 tree exp;
6221 register int nops, i;
6222 int type = TREE_CODE_CLASS (TREE_CODE (exp));
6224 if (! do_preexpand_calls)
6225 return;
6227 /* Only expressions and references can contain calls. */
6229 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
6230 return;
6232 switch (TREE_CODE (exp))
6234 case CALL_EXPR:
6235 /* Do nothing if already expanded. */
6236 if (CALL_EXPR_RTL (exp) != 0)
6237 return;
6239 /* Do nothing to built-in functions. */
6240 if (TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR
6241 || TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != FUNCTION_DECL
6242 || ! DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6243 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
6244 return;
6246 case COMPOUND_EXPR:
6247 case COND_EXPR:
6248 case TRUTH_ANDIF_EXPR:
6249 case TRUTH_ORIF_EXPR:
6250 /* If we find one of these, then we can be sure
6251 the adjust will be done for it (since it makes jumps).
6252 Do it now, so that if this is inside an argument
6253 of a function, we don't get the stack adjustment
6254 after some other args have already been pushed. */
6255 do_pending_stack_adjust ();
6256 return;
6258 case BLOCK:
6259 case RTL_EXPR:
6260 case WITH_CLEANUP_EXPR:
6261 return;
6263 case SAVE_EXPR:
6264 if (SAVE_EXPR_RTL (exp) != 0)
6265 return;
6268 nops = tree_code_length[(int) TREE_CODE (exp)];
6269 for (i = 0; i < nops; i++)
6270 if (TREE_OPERAND (exp, i) != 0)
6272 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
6273 if (type == 'e' || type == '<' || type == '1' || type == '2'
6274 || type == 'r')
6275 preexpand_calls (TREE_OPERAND (exp, i));
6279 /* At the start of a function, record that we have no previously-pushed
6280 arguments waiting to be popped. */
6282 void
6283 init_pending_stack_adjust ()
6285 pending_stack_adjust = 0;
6288 /* When exiting from function, if safe, clear out any pending stack adjust
6289 so the adjustment won't get done. */
6291 void
6292 clear_pending_stack_adjust ()
6294 #ifdef EXIT_IGNORE_STACK
6295 if (! flag_omit_frame_pointer && EXIT_IGNORE_STACK
6296 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
6297 && ! flag_inline_functions)
6298 pending_stack_adjust = 0;
6299 #endif
6302 /* Pop any previously-pushed arguments that have not been popped yet. */
6304 void
6305 do_pending_stack_adjust ()
6307 if (inhibit_defer_pop == 0)
6309 if (pending_stack_adjust != 0)
6310 adjust_stack (GEN_INT (pending_stack_adjust));
6311 pending_stack_adjust = 0;
6315 /* Expand all cleanups up to OLD_CLEANUPS.
6316 Needed here, and also for language-dependent calls. */
6318 void
6319 expand_cleanups_to (old_cleanups)
6320 tree old_cleanups;
6322 while (cleanups_this_call != old_cleanups)
6324 expand_expr (TREE_VALUE (cleanups_this_call), NULL_RTX, VOIDmode, 0);
6325 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
6329 /* Expand conditional expressions. */
6331 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
6332 LABEL is an rtx of code CODE_LABEL, in this function and all the
6333 functions here. */
6335 void
6336 jumpifnot (exp, label)
6337 tree exp;
6338 rtx label;
6340 do_jump (exp, label, NULL_RTX);
6343 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
6345 void
6346 jumpif (exp, label)
6347 tree exp;
6348 rtx label;
6350 do_jump (exp, NULL_RTX, label);
6353 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
6354 the result is zero, or IF_TRUE_LABEL if the result is one.
6355 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
6356 meaning fall through in that case.
6358 do_jump always does any pending stack adjust except when it does not
6359 actually perform a jump. An example where there is no jump
6360 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
6362 This function is responsible for optimizing cases such as
6363 &&, || and comparison operators in EXP. */
6365 void
6366 do_jump (exp, if_false_label, if_true_label)
6367 tree exp;
6368 rtx if_false_label, if_true_label;
6370 register enum tree_code code = TREE_CODE (exp);
6371 /* Some cases need to create a label to jump to
6372 in order to properly fall through.
6373 These cases set DROP_THROUGH_LABEL nonzero. */
6374 rtx drop_through_label = 0;
6375 rtx temp;
6376 rtx comparison = 0;
6377 int i;
6378 tree type;
6380 emit_queue ();
6382 switch (code)
6384 case ERROR_MARK:
6385 break;
6387 case INTEGER_CST:
6388 temp = integer_zerop (exp) ? if_false_label : if_true_label;
6389 if (temp)
6390 emit_jump (temp);
6391 break;
6393 #if 0
6394 /* This is not true with #pragma weak */
6395 case ADDR_EXPR:
6396 /* The address of something can never be zero. */
6397 if (if_true_label)
6398 emit_jump (if_true_label);
6399 break;
6400 #endif
6402 case NOP_EXPR:
6403 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
6404 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
6405 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
6406 goto normal;
6407 case CONVERT_EXPR:
6408 /* If we are narrowing the operand, we have to do the compare in the
6409 narrower mode. */
6410 if ((TYPE_PRECISION (TREE_TYPE (exp))
6411 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6412 goto normal;
6413 case NON_LVALUE_EXPR:
6414 case REFERENCE_EXPR:
6415 case ABS_EXPR:
6416 case NEGATE_EXPR:
6417 case LROTATE_EXPR:
6418 case RROTATE_EXPR:
6419 /* These cannot change zero->non-zero or vice versa. */
6420 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
6421 break;
6423 #if 0
6424 /* This is never less insns than evaluating the PLUS_EXPR followed by
6425 a test and can be longer if the test is eliminated. */
6426 case PLUS_EXPR:
6427 /* Reduce to minus. */
6428 exp = build (MINUS_EXPR, TREE_TYPE (exp),
6429 TREE_OPERAND (exp, 0),
6430 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
6431 TREE_OPERAND (exp, 1))));
6432 /* Process as MINUS. */
6433 #endif
6435 case MINUS_EXPR:
6436 /* Non-zero iff operands of minus differ. */
6437 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
6438 TREE_OPERAND (exp, 0),
6439 TREE_OPERAND (exp, 1)),
6440 NE, NE);
6441 break;
6443 case BIT_AND_EXPR:
6444 /* If we are AND'ing with a small constant, do this comparison in the
6445 smallest type that fits. If the machine doesn't have comparisons
6446 that small, it will be converted back to the wider comparison.
6447 This helps if we are testing the sign bit of a narrower object.
6448 combine can't do this for us because it can't know whether a
6449 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
6451 if (! SLOW_BYTE_ACCESS
6452 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6453 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
6454 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
6455 && (type = type_for_size (i + 1, 1)) != 0
6456 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
6457 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
6458 != CODE_FOR_nothing))
6460 do_jump (convert (type, exp), if_false_label, if_true_label);
6461 break;
6463 goto normal;
6465 case TRUTH_NOT_EXPR:
6466 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
6467 break;
6469 case TRUTH_ANDIF_EXPR:
6470 if (if_false_label == 0)
6471 if_false_label = drop_through_label = gen_label_rtx ();
6472 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
6473 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
6474 break;
6476 case TRUTH_ORIF_EXPR:
6477 if (if_true_label == 0)
6478 if_true_label = drop_through_label = gen_label_rtx ();
6479 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
6480 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
6481 break;
6483 case COMPOUND_EXPR:
6484 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6485 free_temp_slots ();
6486 emit_queue ();
6487 do_pending_stack_adjust ();
6488 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
6489 break;
6491 case COMPONENT_REF:
6492 case BIT_FIELD_REF:
6493 case ARRAY_REF:
6495 int bitsize, bitpos, unsignedp;
6496 enum machine_mode mode;
6497 tree type;
6498 tree offset;
6499 int volatilep = 0;
6501 /* Get description of this reference. We don't actually care
6502 about the underlying object here. */
6503 get_inner_reference (exp, &bitsize, &bitpos, &offset,
6504 &mode, &unsignedp, &volatilep);
6506 type = type_for_size (bitsize, unsignedp);
6507 if (! SLOW_BYTE_ACCESS
6508 && type != 0 && bitsize >= 0
6509 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
6510 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
6511 != CODE_FOR_nothing))
6513 do_jump (convert (type, exp), if_false_label, if_true_label);
6514 break;
6516 goto normal;
6519 case COND_EXPR:
6520 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
6521 if (integer_onep (TREE_OPERAND (exp, 1))
6522 && integer_zerop (TREE_OPERAND (exp, 2)))
6523 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
6525 else if (integer_zerop (TREE_OPERAND (exp, 1))
6526 && integer_onep (TREE_OPERAND (exp, 2)))
6527 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
6529 else
6531 register rtx label1 = gen_label_rtx ();
6532 drop_through_label = gen_label_rtx ();
6533 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
6534 /* Now the THEN-expression. */
6535 do_jump (TREE_OPERAND (exp, 1),
6536 if_false_label ? if_false_label : drop_through_label,
6537 if_true_label ? if_true_label : drop_through_label);
6538 /* In case the do_jump just above never jumps. */
6539 do_pending_stack_adjust ();
6540 emit_label (label1);
6541 /* Now the ELSE-expression. */
6542 do_jump (TREE_OPERAND (exp, 2),
6543 if_false_label ? if_false_label : drop_through_label,
6544 if_true_label ? if_true_label : drop_through_label);
6546 break;
6548 case EQ_EXPR:
6549 if (integer_zerop (TREE_OPERAND (exp, 1)))
6550 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
6551 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6552 == MODE_INT)
6554 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6555 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
6556 else
6557 comparison = compare (exp, EQ, EQ);
6558 break;
6560 case NE_EXPR:
6561 if (integer_zerop (TREE_OPERAND (exp, 1)))
6562 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
6563 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6564 == MODE_INT)
6566 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6567 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
6568 else
6569 comparison = compare (exp, NE, NE);
6570 break;
6572 case LT_EXPR:
6573 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6574 == MODE_INT)
6575 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6576 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
6577 else
6578 comparison = compare (exp, LT, LTU);
6579 break;
6581 case LE_EXPR:
6582 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6583 == MODE_INT)
6584 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6585 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
6586 else
6587 comparison = compare (exp, LE, LEU);
6588 break;
6590 case GT_EXPR:
6591 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6592 == MODE_INT)
6593 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6594 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
6595 else
6596 comparison = compare (exp, GT, GTU);
6597 break;
6599 case GE_EXPR:
6600 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6601 == MODE_INT)
6602 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6603 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
6604 else
6605 comparison = compare (exp, GE, GEU);
6606 break;
6608 default:
6609 normal:
6610 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
6611 #if 0
6612 /* This is not needed any more and causes poor code since it causes
6613 comparisons and tests from non-SI objects to have different code
6614 sequences. */
6615 /* Copy to register to avoid generating bad insns by cse
6616 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
6617 if (!cse_not_expected && GET_CODE (temp) == MEM)
6618 temp = copy_to_reg (temp);
6619 #endif
6620 do_pending_stack_adjust ();
6621 if (GET_CODE (temp) == CONST_INT)
6622 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
6623 else if (GET_CODE (temp) == LABEL_REF)
6624 comparison = const_true_rtx;
6625 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
6626 && !can_compare_p (GET_MODE (temp)))
6627 /* Note swapping the labels gives us not-equal. */
6628 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
6629 else if (GET_MODE (temp) != VOIDmode)
6630 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
6631 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
6632 GET_MODE (temp), NULL_RTX, 0);
6633 else
6634 abort ();
6637 /* Do any postincrements in the expression that was tested. */
6638 emit_queue ();
6640 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
6641 straight into a conditional jump instruction as the jump condition.
6642 Otherwise, all the work has been done already. */
6644 if (comparison == const_true_rtx)
6646 if (if_true_label)
6647 emit_jump (if_true_label);
6649 else if (comparison == const0_rtx)
6651 if (if_false_label)
6652 emit_jump (if_false_label);
6654 else if (comparison)
6655 do_jump_for_compare (comparison, if_false_label, if_true_label);
6657 free_temp_slots ();
6659 if (drop_through_label)
6661 /* If do_jump produces code that might be jumped around,
6662 do any stack adjusts from that code, before the place
6663 where control merges in. */
6664 do_pending_stack_adjust ();
6665 emit_label (drop_through_label);
6669 /* Given a comparison expression EXP for values too wide to be compared
6670 with one insn, test the comparison and jump to the appropriate label.
6671 The code of EXP is ignored; we always test GT if SWAP is 0,
6672 and LT if SWAP is 1. */
6674 static void
6675 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
6676 tree exp;
6677 int swap;
6678 rtx if_false_label, if_true_label;
6680 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
6681 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
6682 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6683 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
6684 rtx drop_through_label = 0;
6685 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
6686 int i;
6688 if (! if_true_label || ! if_false_label)
6689 drop_through_label = gen_label_rtx ();
6690 if (! if_true_label)
6691 if_true_label = drop_through_label;
6692 if (! if_false_label)
6693 if_false_label = drop_through_label;
6695 /* Compare a word at a time, high order first. */
6696 for (i = 0; i < nwords; i++)
6698 rtx comp;
6699 rtx op0_word, op1_word;
6701 if (WORDS_BIG_ENDIAN)
6703 op0_word = operand_subword_force (op0, i, mode);
6704 op1_word = operand_subword_force (op1, i, mode);
6706 else
6708 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
6709 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
6712 /* All but high-order word must be compared as unsigned. */
6713 comp = compare_from_rtx (op0_word, op1_word,
6714 (unsignedp || i > 0) ? GTU : GT,
6715 unsignedp, word_mode, NULL_RTX, 0);
6716 if (comp == const_true_rtx)
6717 emit_jump (if_true_label);
6718 else if (comp != const0_rtx)
6719 do_jump_for_compare (comp, NULL_RTX, if_true_label);
6721 /* Consider lower words only if these are equal. */
6722 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
6723 NULL_RTX, 0);
6724 if (comp == const_true_rtx)
6725 emit_jump (if_false_label);
6726 else if (comp != const0_rtx)
6727 do_jump_for_compare (comp, NULL_RTX, if_false_label);
6730 if (if_false_label)
6731 emit_jump (if_false_label);
6732 if (drop_through_label)
6733 emit_label (drop_through_label);
6736 /* Given an EQ_EXPR expression EXP for values too wide to be compared
6737 with one insn, test the comparison and jump to the appropriate label. */
6739 static void
6740 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
6741 tree exp;
6742 rtx if_false_label, if_true_label;
6744 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6745 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6746 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6747 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
6748 int i;
6749 rtx drop_through_label = 0;
6751 if (! if_false_label)
6752 drop_through_label = if_false_label = gen_label_rtx ();
6754 for (i = 0; i < nwords; i++)
6756 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
6757 operand_subword_force (op1, i, mode),
6758 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
6759 word_mode, NULL_RTX, 0);
6760 if (comp == const_true_rtx)
6761 emit_jump (if_false_label);
6762 else if (comp != const0_rtx)
6763 do_jump_for_compare (comp, if_false_label, NULL_RTX);
6766 if (if_true_label)
6767 emit_jump (if_true_label);
6768 if (drop_through_label)
6769 emit_label (drop_through_label);
6772 /* Jump according to whether OP0 is 0.
6773 We assume that OP0 has an integer mode that is too wide
6774 for the available compare insns. */
6776 static void
6777 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
6778 rtx op0;
6779 rtx if_false_label, if_true_label;
6781 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
6782 int i;
6783 rtx drop_through_label = 0;
6785 if (! if_false_label)
6786 drop_through_label = if_false_label = gen_label_rtx ();
6788 for (i = 0; i < nwords; i++)
6790 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
6791 GET_MODE (op0)),
6792 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
6793 if (comp == const_true_rtx)
6794 emit_jump (if_false_label);
6795 else if (comp != const0_rtx)
6796 do_jump_for_compare (comp, if_false_label, NULL_RTX);
6799 if (if_true_label)
6800 emit_jump (if_true_label);
6801 if (drop_through_label)
6802 emit_label (drop_through_label);
6805 /* Given a comparison expression in rtl form, output conditional branches to
6806 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
6808 static void
6809 do_jump_for_compare (comparison, if_false_label, if_true_label)
6810 rtx comparison, if_false_label, if_true_label;
6812 if (if_true_label)
6814 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
6815 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
6816 else
6817 abort ();
6819 if (if_false_label)
6820 emit_jump (if_false_label);
6822 else if (if_false_label)
6824 rtx insn;
6825 rtx prev = PREV_INSN (get_last_insn ());
6826 rtx branch = 0;
6828 /* Output the branch with the opposite condition. Then try to invert
6829 what is generated. If more than one insn is a branch, or if the
6830 branch is not the last insn written, abort. If we can't invert
6831 the branch, emit make a true label, redirect this jump to that,
6832 emit a jump to the false label and define the true label. */
6834 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
6835 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_false_label));
6836 else
6837 abort ();
6839 /* Here we get the insn before what was just emitted.
6840 On some machines, emitting the branch can discard
6841 the previous compare insn and emit a replacement. */
6842 if (prev == 0)
6843 /* If there's only one preceding insn... */
6844 insn = get_insns ();
6845 else
6846 insn = NEXT_INSN (prev);
6848 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
6849 if (GET_CODE (insn) == JUMP_INSN)
6851 if (branch)
6852 abort ();
6853 branch = insn;
6856 if (branch != get_last_insn ())
6857 abort ();
6859 if (! invert_jump (branch, if_false_label))
6861 if_true_label = gen_label_rtx ();
6862 redirect_jump (branch, if_true_label);
6863 emit_jump (if_false_label);
6864 emit_label (if_true_label);
6869 /* Generate code for a comparison expression EXP
6870 (including code to compute the values to be compared)
6871 and set (CC0) according to the result.
6872 SIGNED_CODE should be the rtx operation for this comparison for
6873 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
6875 We force a stack adjustment unless there are currently
6876 things pushed on the stack that aren't yet used. */
6878 static rtx
6879 compare (exp, signed_code, unsigned_code)
6880 register tree exp;
6881 enum rtx_code signed_code, unsigned_code;
6883 register rtx op0
6884 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6885 register rtx op1
6886 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6887 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
6888 register enum machine_mode mode = TYPE_MODE (type);
6889 int unsignedp = TREE_UNSIGNED (type);
6890 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
6892 return compare_from_rtx (op0, op1, code, unsignedp, mode,
6893 ((mode == BLKmode)
6894 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
6895 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
6898 /* Like compare but expects the values to compare as two rtx's.
6899 The decision as to signed or unsigned comparison must be made by the caller.
6901 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
6902 compared.
6904 If ALIGN is non-zero, it is the alignment of this type; if zero, the
6905 size of MODE should be used. */
6908 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
6909 register rtx op0, op1;
6910 enum rtx_code code;
6911 int unsignedp;
6912 enum machine_mode mode;
6913 rtx size;
6914 int align;
6916 /* If one operand is constant, make it the second one. */
6918 if (GET_CODE (op0) == CONST_INT || GET_CODE (op0) == CONST_DOUBLE)
6920 rtx tem = op0;
6921 op0 = op1;
6922 op1 = tem;
6923 code = swap_condition (code);
6926 if (flag_force_mem)
6928 op0 = force_not_mem (op0);
6929 op1 = force_not_mem (op1);
6932 do_pending_stack_adjust ();
6934 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT)
6935 return simplify_relational_operation (code, mode, op0, op1);
6937 #if 0
6938 /* There's no need to do this now that combine.c can eliminate lots of
6939 sign extensions. This can be less efficient in certain cases on other
6940 machines.
6942 /* If this is a signed equality comparison, we can do it as an
6943 unsigned comparison since zero-extension is cheaper than sign
6944 extension and comparisons with zero are done as unsigned. This is
6945 the case even on machines that can do fast sign extension, since
6946 zero-extension is easier to combinen with other operations than
6947 sign-extension is. If we are comparing against a constant, we must
6948 convert it to what it would look like unsigned. */
6949 if ((code == EQ || code == NE) && ! unsignedp
6950 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
6952 if (GET_CODE (op1) == CONST_INT
6953 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
6954 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
6955 unsignedp = 1;
6957 #endif
6959 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
6961 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
6964 /* Generate code to calculate EXP using a store-flag instruction
6965 and return an rtx for the result. EXP is either a comparison
6966 or a TRUTH_NOT_EXPR whose operand is a comparison.
6968 If TARGET is nonzero, store the result there if convenient.
6970 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
6971 cheap.
6973 Return zero if there is no suitable set-flag instruction
6974 available on this machine.
6976 Once expand_expr has been called on the arguments of the comparison,
6977 we are committed to doing the store flag, since it is not safe to
6978 re-evaluate the expression. We emit the store-flag insn by calling
6979 emit_store_flag, but only expand the arguments if we have a reason
6980 to believe that emit_store_flag will be successful. If we think that
6981 it will, but it isn't, we have to simulate the store-flag with a
6982 set/jump/set sequence. */
6984 static rtx
6985 do_store_flag (exp, target, mode, only_cheap)
6986 tree exp;
6987 rtx target;
6988 enum machine_mode mode;
6989 int only_cheap;
6991 enum rtx_code code;
6992 tree arg0, arg1, type;
6993 tree tem;
6994 enum machine_mode operand_mode;
6995 int invert = 0;
6996 int unsignedp;
6997 rtx op0, op1;
6998 enum insn_code icode;
6999 rtx subtarget = target;
7000 rtx result, label, pattern, jump_pat;
7002 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
7003 result at the end. We can't simply invert the test since it would
7004 have already been inverted if it were valid. This case occurs for
7005 some floating-point comparisons. */
7007 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
7008 invert = 1, exp = TREE_OPERAND (exp, 0);
7010 arg0 = TREE_OPERAND (exp, 0);
7011 arg1 = TREE_OPERAND (exp, 1);
7012 type = TREE_TYPE (arg0);
7013 operand_mode = TYPE_MODE (type);
7014 unsignedp = TREE_UNSIGNED (type);
7016 /* We won't bother with BLKmode store-flag operations because it would mean
7017 passing a lot of information to emit_store_flag. */
7018 if (operand_mode == BLKmode)
7019 return 0;
7021 STRIP_NOPS (arg0);
7022 STRIP_NOPS (arg1);
7024 /* Get the rtx comparison code to use. We know that EXP is a comparison
7025 operation of some type. Some comparisons against 1 and -1 can be
7026 converted to comparisons with zero. Do so here so that the tests
7027 below will be aware that we have a comparison with zero. These
7028 tests will not catch constants in the first operand, but constants
7029 are rarely passed as the first operand. */
7031 switch (TREE_CODE (exp))
7033 case EQ_EXPR:
7034 code = EQ;
7035 break;
7036 case NE_EXPR:
7037 code = NE;
7038 break;
7039 case LT_EXPR:
7040 if (integer_onep (arg1))
7041 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
7042 else
7043 code = unsignedp ? LTU : LT;
7044 break;
7045 case LE_EXPR:
7046 if (integer_all_onesp (arg1))
7047 arg1 = integer_zero_node, code = unsignedp ? LTU : LT;
7048 else
7049 code = unsignedp ? LEU : LE;
7050 break;
7051 case GT_EXPR:
7052 if (integer_all_onesp (arg1))
7053 arg1 = integer_zero_node, code = unsignedp ? GEU : GE;
7054 else
7055 code = unsignedp ? GTU : GT;
7056 break;
7057 case GE_EXPR:
7058 if (integer_onep (arg1))
7059 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
7060 else
7061 code = unsignedp ? GEU : GE;
7062 break;
7063 default:
7064 abort ();
7067 /* Put a constant second. */
7068 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
7070 tem = arg0; arg0 = arg1; arg1 = tem;
7071 code = swap_condition (code);
7074 /* If this is an equality or inequality test of a single bit, we can
7075 do this by shifting the bit being tested to the low-order bit and
7076 masking the result with the constant 1. If the condition was EQ,
7077 we xor it with 1. This does not require an scc insn and is faster
7078 than an scc insn even if we have it. */
7080 if ((code == NE || code == EQ)
7081 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7082 && integer_pow2p (TREE_OPERAND (arg0, 1))
7083 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
7085 int bitnum = exact_log2 (INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
7086 NULL_RTX, VOIDmode, 0)));
7088 if (subtarget == 0 || GET_CODE (subtarget) != REG
7089 || GET_MODE (subtarget) != operand_mode
7090 || ! safe_from_p (subtarget, TREE_OPERAND (arg0, 0)))
7091 subtarget = 0;
7093 op0 = expand_expr (TREE_OPERAND (arg0, 0), subtarget, VOIDmode, 0);
7095 if (bitnum != 0)
7096 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
7097 size_int (bitnum), target, 1);
7099 if (GET_MODE (op0) != mode)
7100 op0 = convert_to_mode (mode, op0, 1);
7102 if (bitnum != TYPE_PRECISION (type) - 1)
7103 op0 = expand_and (op0, const1_rtx, target);
7105 if ((code == EQ && ! invert) || (code == NE && invert))
7106 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, target, 0,
7107 OPTAB_LIB_WIDEN);
7109 return op0;
7112 /* Now see if we are likely to be able to do this. Return if not. */
7113 if (! can_compare_p (operand_mode))
7114 return 0;
7115 icode = setcc_gen_code[(int) code];
7116 if (icode == CODE_FOR_nothing
7117 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
7119 /* We can only do this if it is one of the special cases that
7120 can be handled without an scc insn. */
7121 if ((code == LT && integer_zerop (arg1))
7122 || (! only_cheap && code == GE && integer_zerop (arg1)))
7124 else if (BRANCH_COST >= 0
7125 && ! only_cheap && (code == NE || code == EQ)
7126 && TREE_CODE (type) != REAL_TYPE
7127 && ((abs_optab->handlers[(int) operand_mode].insn_code
7128 != CODE_FOR_nothing)
7129 || (ffs_optab->handlers[(int) operand_mode].insn_code
7130 != CODE_FOR_nothing)))
7132 else
7133 return 0;
7136 preexpand_calls (exp);
7137 if (subtarget == 0 || GET_CODE (subtarget) != REG
7138 || GET_MODE (subtarget) != operand_mode
7139 || ! safe_from_p (subtarget, arg1))
7140 subtarget = 0;
7142 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
7143 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
7145 if (target == 0)
7146 target = gen_reg_rtx (mode);
7148 result = emit_store_flag (target, code, op0, op1, operand_mode,
7149 unsignedp, 1);
7151 if (result)
7153 if (invert)
7154 result = expand_binop (mode, xor_optab, result, const1_rtx,
7155 result, 0, OPTAB_LIB_WIDEN);
7156 return result;
7159 /* If this failed, we have to do this with set/compare/jump/set code. */
7160 if (target == 0 || GET_CODE (target) != REG
7161 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
7162 target = gen_reg_rtx (GET_MODE (target));
7164 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
7165 result = compare_from_rtx (op0, op1, code, unsignedp,
7166 operand_mode, NULL_RTX, 0);
7167 if (GET_CODE (result) == CONST_INT)
7168 return (((result == const0_rtx && ! invert)
7169 || (result != const0_rtx && invert))
7170 ? const0_rtx : const1_rtx);
7172 label = gen_label_rtx ();
7173 if (bcc_gen_fctn[(int) code] == 0)
7174 abort ();
7176 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
7177 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
7178 emit_label (label);
7180 return target;
7183 /* Generate a tablejump instruction (used for switch statements). */
7185 #ifdef HAVE_tablejump
7187 /* INDEX is the value being switched on, with the lowest value
7188 in the table already subtracted.
7189 MODE is its expected mode (needed if INDEX is constant).
7190 RANGE is the length of the jump table.
7191 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
7193 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
7194 index value is out of range. */
7196 void
7197 do_tablejump (index, mode, range, table_label, default_label)
7198 rtx index, range, table_label, default_label;
7199 enum machine_mode mode;
7201 register rtx temp, vector;
7203 /* Do an unsigned comparison (in the proper mode) between the index
7204 expression and the value which represents the length of the range.
7205 Since we just finished subtracting the lower bound of the range
7206 from the index expression, this comparison allows us to simultaneously
7207 check that the original index expression value is both greater than
7208 or equal to the minimum value of the range and less than or equal to
7209 the maximum value of the range. */
7211 emit_cmp_insn (range, index, LTU, NULL_RTX, mode, 0, 0);
7212 emit_jump_insn (gen_bltu (default_label));
7214 /* If index is in range, it must fit in Pmode.
7215 Convert to Pmode so we can index with it. */
7216 if (mode != Pmode)
7217 index = convert_to_mode (Pmode, index, 1);
7219 /* If flag_force_addr were to affect this address
7220 it could interfere with the tricky assumptions made
7221 about addresses that contain label-refs,
7222 which may be valid only very near the tablejump itself. */
7223 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
7224 GET_MODE_SIZE, because this indicates how large insns are. The other
7225 uses should all be Pmode, because they are addresses. This code
7226 could fail if addresses and insns are not the same size. */
7227 index = memory_address_noforce
7228 (CASE_VECTOR_MODE,
7229 gen_rtx (PLUS, Pmode,
7230 gen_rtx (MULT, Pmode, index,
7231 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
7232 gen_rtx (LABEL_REF, Pmode, table_label)));
7233 temp = gen_reg_rtx (CASE_VECTOR_MODE);
7234 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
7235 RTX_UNCHANGING_P (vector) = 1;
7236 convert_move (temp, vector, 0);
7238 emit_jump_insn (gen_tablejump (temp, table_label));
7240 #ifndef CASE_VECTOR_PC_RELATIVE
7241 /* If we are generating PIC code or if the table is PC-relative, the
7242 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
7243 if (! flag_pic)
7244 emit_barrier ();
7245 #endif
7248 #endif /* HAVE_tablejump */