* loop.c (check_dbra_loop): Fix last change: examine both
[official-gcc.git] / gcc / expr.c
blob60c3431f73a053bdcef7ef91a4715d21fcb3ad52
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "obstack.h"
28 #include "flags.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "except.h"
32 #include "function.h"
33 #include "insn-config.h"
34 #include "insn-attr.h"
35 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
36 #include "expr.h"
37 #include "optabs.h"
38 #include "libfuncs.h"
39 #include "recog.h"
40 #include "reload.h"
41 #include "output.h"
42 #include "typeclass.h"
43 #include "toplev.h"
44 #include "ggc.h"
45 #include "langhooks.h"
46 #include "intl.h"
47 #include "tm_p.h"
49 /* Decide whether a function's arguments should be processed
50 from first to last or from last to first.
52 They should if the stack and args grow in opposite directions, but
53 only if we have push insns. */
55 #ifdef PUSH_ROUNDING
57 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
58 #define PUSH_ARGS_REVERSED /* If it's last to first. */
59 #endif
61 #endif
63 #ifndef STACK_PUSH_CODE
64 #ifdef STACK_GROWS_DOWNWARD
65 #define STACK_PUSH_CODE PRE_DEC
66 #else
67 #define STACK_PUSH_CODE PRE_INC
68 #endif
69 #endif
71 /* Assume that case vectors are not pc-relative. */
72 #ifndef CASE_VECTOR_PC_RELATIVE
73 #define CASE_VECTOR_PC_RELATIVE 0
74 #endif
76 /* If this is nonzero, we do not bother generating VOLATILE
77 around volatile memory references, and we are willing to
78 output indirect addresses. If cse is to follow, we reject
79 indirect addresses so a useful potential cse is generated;
80 if it is used only once, instruction combination will produce
81 the same indirect address eventually. */
82 int cse_not_expected;
84 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
85 static tree placeholder_list = 0;
87 /* This structure is used by move_by_pieces to describe the move to
88 be performed. */
89 struct move_by_pieces
91 rtx to;
92 rtx to_addr;
93 int autinc_to;
94 int explicit_inc_to;
95 rtx from;
96 rtx from_addr;
97 int autinc_from;
98 int explicit_inc_from;
99 unsigned HOST_WIDE_INT len;
100 HOST_WIDE_INT offset;
101 int reverse;
104 /* This structure is used by store_by_pieces to describe the clear to
105 be performed. */
107 struct store_by_pieces
109 rtx to;
110 rtx to_addr;
111 int autinc_to;
112 int explicit_inc_to;
113 unsigned HOST_WIDE_INT len;
114 HOST_WIDE_INT offset;
115 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
116 PTR constfundata;
117 int reverse;
120 extern struct obstack permanent_obstack;
122 static rtx enqueue_insn PARAMS ((rtx, rtx));
123 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
124 PARAMS ((unsigned HOST_WIDE_INT,
125 unsigned int));
126 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
127 struct move_by_pieces *));
128 static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
129 enum machine_mode));
130 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
131 unsigned int));
132 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
133 unsigned int));
134 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
135 enum machine_mode,
136 struct store_by_pieces *));
137 static rtx get_subtarget PARAMS ((rtx));
138 static int is_zeros_p PARAMS ((tree));
139 static int mostly_zeros_p PARAMS ((tree));
140 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
141 HOST_WIDE_INT, enum machine_mode,
142 tree, tree, int, int));
143 static void store_constructor PARAMS ((tree, rtx, int, HOST_WIDE_INT));
144 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
145 HOST_WIDE_INT, enum machine_mode,
146 tree, enum machine_mode, int, tree,
147 int));
148 static rtx var_rtx PARAMS ((tree));
149 static HOST_WIDE_INT highest_pow2_factor PARAMS ((tree));
150 static rtx expand_increment PARAMS ((tree, int, int));
151 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
152 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
153 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
154 rtx, rtx));
155 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
156 #ifdef PUSH_ROUNDING
157 static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
158 #endif
159 static void do_tablejump PARAMS ((rtx, enum machine_mode, rtx, rtx, rtx));
161 /* Record for each mode whether we can move a register directly to or
162 from an object of that mode in memory. If we can't, we won't try
163 to use that mode directly when accessing a field of that mode. */
165 static char direct_load[NUM_MACHINE_MODES];
166 static char direct_store[NUM_MACHINE_MODES];
168 /* If a memory-to-memory move would take MOVE_RATIO or more simple
169 move-instruction sequences, we will do a movstr or libcall instead. */
171 #ifndef MOVE_RATIO
172 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
173 #define MOVE_RATIO 2
174 #else
175 /* If we are optimizing for space (-Os), cut down the default move ratio. */
176 #define MOVE_RATIO (optimize_size ? 3 : 15)
177 #endif
178 #endif
180 /* This macro is used to determine whether move_by_pieces should be called
181 to perform a structure copy. */
182 #ifndef MOVE_BY_PIECES_P
183 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
184 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
185 #endif
187 /* This array records the insn_code of insns to perform block moves. */
188 enum insn_code movstr_optab[NUM_MACHINE_MODES];
190 /* This array records the insn_code of insns to perform block clears. */
191 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
193 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
195 #ifndef SLOW_UNALIGNED_ACCESS
196 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
197 #endif
199 /* This is run once per compilation to set up which modes can be used
200 directly in memory and to initialize the block move optab. */
202 void
203 init_expr_once ()
205 rtx insn, pat;
206 enum machine_mode mode;
207 int num_clobbers;
208 rtx mem, mem1;
210 start_sequence ();
212 /* Try indexing by frame ptr and try by stack ptr.
213 It is known that on the Convex the stack ptr isn't a valid index.
214 With luck, one or the other is valid on any machine. */
215 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
216 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
218 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
219 pat = PATTERN (insn);
221 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
222 mode = (enum machine_mode) ((int) mode + 1))
224 int regno;
225 rtx reg;
227 direct_load[(int) mode] = direct_store[(int) mode] = 0;
228 PUT_MODE (mem, mode);
229 PUT_MODE (mem1, mode);
231 /* See if there is some register that can be used in this mode and
232 directly loaded or stored from memory. */
234 if (mode != VOIDmode && mode != BLKmode)
235 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
236 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
237 regno++)
239 if (! HARD_REGNO_MODE_OK (regno, mode))
240 continue;
242 reg = gen_rtx_REG (mode, regno);
244 SET_SRC (pat) = mem;
245 SET_DEST (pat) = reg;
246 if (recog (pat, insn, &num_clobbers) >= 0)
247 direct_load[(int) mode] = 1;
249 SET_SRC (pat) = mem1;
250 SET_DEST (pat) = reg;
251 if (recog (pat, insn, &num_clobbers) >= 0)
252 direct_load[(int) mode] = 1;
254 SET_SRC (pat) = reg;
255 SET_DEST (pat) = mem;
256 if (recog (pat, insn, &num_clobbers) >= 0)
257 direct_store[(int) mode] = 1;
259 SET_SRC (pat) = reg;
260 SET_DEST (pat) = mem1;
261 if (recog (pat, insn, &num_clobbers) >= 0)
262 direct_store[(int) mode] = 1;
266 end_sequence ();
269 /* This is run at the start of compiling a function. */
271 void
272 init_expr ()
274 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
276 pending_chain = 0;
277 pending_stack_adjust = 0;
278 stack_pointer_delta = 0;
279 inhibit_defer_pop = 0;
280 saveregs_value = 0;
281 apply_args_value = 0;
282 forced_labels = 0;
285 void
286 mark_expr_status (p)
287 struct expr_status *p;
289 if (p == NULL)
290 return;
292 ggc_mark_rtx (p->x_saveregs_value);
293 ggc_mark_rtx (p->x_apply_args_value);
294 ggc_mark_rtx (p->x_forced_labels);
297 void
298 free_expr_status (f)
299 struct function *f;
301 free (f->expr);
302 f->expr = NULL;
305 /* Small sanity check that the queue is empty at the end of a function. */
307 void
308 finish_expr_for_function ()
310 if (pending_chain)
311 abort ();
314 /* Manage the queue of increment instructions to be output
315 for POSTINCREMENT_EXPR expressions, etc. */
317 /* Queue up to increment (or change) VAR later. BODY says how:
318 BODY should be the same thing you would pass to emit_insn
319 to increment right away. It will go to emit_insn later on.
321 The value is a QUEUED expression to be used in place of VAR
322 where you want to guarantee the pre-incrementation value of VAR. */
324 static rtx
325 enqueue_insn (var, body)
326 rtx var, body;
328 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
329 body, pending_chain);
330 return pending_chain;
333 /* Use protect_from_queue to convert a QUEUED expression
334 into something that you can put immediately into an instruction.
335 If the queued incrementation has not happened yet,
336 protect_from_queue returns the variable itself.
337 If the incrementation has happened, protect_from_queue returns a temp
338 that contains a copy of the old value of the variable.
340 Any time an rtx which might possibly be a QUEUED is to be put
341 into an instruction, it must be passed through protect_from_queue first.
342 QUEUED expressions are not meaningful in instructions.
344 Do not pass a value through protect_from_queue and then hold
345 on to it for a while before putting it in an instruction!
346 If the queue is flushed in between, incorrect code will result. */
349 protect_from_queue (x, modify)
350 rtx x;
351 int modify;
353 RTX_CODE code = GET_CODE (x);
355 #if 0 /* A QUEUED can hang around after the queue is forced out. */
356 /* Shortcut for most common case. */
357 if (pending_chain == 0)
358 return x;
359 #endif
361 if (code != QUEUED)
363 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
364 use of autoincrement. Make a copy of the contents of the memory
365 location rather than a copy of the address, but not if the value is
366 of mode BLKmode. Don't modify X in place since it might be
367 shared. */
368 if (code == MEM && GET_MODE (x) != BLKmode
369 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
371 rtx y = XEXP (x, 0);
372 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
374 if (QUEUED_INSN (y))
376 rtx temp = gen_reg_rtx (GET_MODE (x));
378 emit_insn_before (gen_move_insn (temp, new),
379 QUEUED_INSN (y));
380 return temp;
383 /* Copy the address into a pseudo, so that the returned value
384 remains correct across calls to emit_queue. */
385 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
388 /* Otherwise, recursively protect the subexpressions of all
389 the kinds of rtx's that can contain a QUEUED. */
390 if (code == MEM)
392 rtx tem = protect_from_queue (XEXP (x, 0), 0);
393 if (tem != XEXP (x, 0))
395 x = copy_rtx (x);
396 XEXP (x, 0) = tem;
399 else if (code == PLUS || code == MULT)
401 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
402 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
403 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
405 x = copy_rtx (x);
406 XEXP (x, 0) = new0;
407 XEXP (x, 1) = new1;
410 return x;
412 /* If the increment has not happened, use the variable itself. Copy it
413 into a new pseudo so that the value remains correct across calls to
414 emit_queue. */
415 if (QUEUED_INSN (x) == 0)
416 return copy_to_reg (QUEUED_VAR (x));
417 /* If the increment has happened and a pre-increment copy exists,
418 use that copy. */
419 if (QUEUED_COPY (x) != 0)
420 return QUEUED_COPY (x);
421 /* The increment has happened but we haven't set up a pre-increment copy.
422 Set one up now, and use it. */
423 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
424 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
425 QUEUED_INSN (x));
426 return QUEUED_COPY (x);
429 /* Return nonzero if X contains a QUEUED expression:
430 if it contains anything that will be altered by a queued increment.
431 We handle only combinations of MEM, PLUS, MINUS and MULT operators
432 since memory addresses generally contain only those. */
435 queued_subexp_p (x)
436 rtx x;
438 enum rtx_code code = GET_CODE (x);
439 switch (code)
441 case QUEUED:
442 return 1;
443 case MEM:
444 return queued_subexp_p (XEXP (x, 0));
445 case MULT:
446 case PLUS:
447 case MINUS:
448 return (queued_subexp_p (XEXP (x, 0))
449 || queued_subexp_p (XEXP (x, 1)));
450 default:
451 return 0;
455 /* Perform all the pending incrementations. */
457 void
458 emit_queue ()
460 rtx p;
461 while ((p = pending_chain))
463 rtx body = QUEUED_BODY (p);
465 if (GET_CODE (body) == SEQUENCE)
467 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
468 emit_insn (QUEUED_BODY (p));
470 else
471 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
472 pending_chain = QUEUED_NEXT (p);
476 /* Copy data from FROM to TO, where the machine modes are not the same.
477 Both modes may be integer, or both may be floating.
478 UNSIGNEDP should be nonzero if FROM is an unsigned type.
479 This causes zero-extension instead of sign-extension. */
481 void
482 convert_move (to, from, unsignedp)
483 rtx to, from;
484 int unsignedp;
486 enum machine_mode to_mode = GET_MODE (to);
487 enum machine_mode from_mode = GET_MODE (from);
488 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
489 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
490 enum insn_code code;
491 rtx libcall;
493 /* rtx code for making an equivalent value. */
494 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
496 to = protect_from_queue (to, 1);
497 from = protect_from_queue (from, 0);
499 if (to_real != from_real)
500 abort ();
502 /* If FROM is a SUBREG that indicates that we have already done at least
503 the required extension, strip it. We don't handle such SUBREGs as
504 TO here. */
506 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
507 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
508 >= GET_MODE_SIZE (to_mode))
509 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
510 from = gen_lowpart (to_mode, from), from_mode = to_mode;
512 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
513 abort ();
515 if (to_mode == from_mode
516 || (from_mode == VOIDmode && CONSTANT_P (from)))
518 emit_move_insn (to, from);
519 return;
522 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
524 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
525 abort ();
527 if (VECTOR_MODE_P (to_mode))
528 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
529 else
530 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
532 emit_move_insn (to, from);
533 return;
536 if (to_real != from_real)
537 abort ();
539 if (to_real)
541 rtx value, insns;
543 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
545 /* Try converting directly if the insn is supported. */
546 if ((code = can_extend_p (to_mode, from_mode, 0))
547 != CODE_FOR_nothing)
549 emit_unop_insn (code, to, from, UNKNOWN);
550 return;
554 #ifdef HAVE_trunchfqf2
555 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
557 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
558 return;
560 #endif
561 #ifdef HAVE_trunctqfqf2
562 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
564 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
565 return;
567 #endif
568 #ifdef HAVE_truncsfqf2
569 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
571 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
572 return;
574 #endif
575 #ifdef HAVE_truncdfqf2
576 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
578 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
579 return;
581 #endif
582 #ifdef HAVE_truncxfqf2
583 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
585 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
586 return;
588 #endif
589 #ifdef HAVE_trunctfqf2
590 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
592 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
593 return;
595 #endif
597 #ifdef HAVE_trunctqfhf2
598 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
600 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
601 return;
603 #endif
604 #ifdef HAVE_truncsfhf2
605 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
607 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
608 return;
610 #endif
611 #ifdef HAVE_truncdfhf2
612 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
614 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
615 return;
617 #endif
618 #ifdef HAVE_truncxfhf2
619 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
621 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
622 return;
624 #endif
625 #ifdef HAVE_trunctfhf2
626 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
628 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
629 return;
631 #endif
633 #ifdef HAVE_truncsftqf2
634 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
636 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
637 return;
639 #endif
640 #ifdef HAVE_truncdftqf2
641 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
643 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
644 return;
646 #endif
647 #ifdef HAVE_truncxftqf2
648 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
650 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
651 return;
653 #endif
654 #ifdef HAVE_trunctftqf2
655 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
657 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
658 return;
660 #endif
662 #ifdef HAVE_truncdfsf2
663 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
665 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
666 return;
668 #endif
669 #ifdef HAVE_truncxfsf2
670 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
672 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
673 return;
675 #endif
676 #ifdef HAVE_trunctfsf2
677 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
679 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
680 return;
682 #endif
683 #ifdef HAVE_truncxfdf2
684 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
686 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
687 return;
689 #endif
690 #ifdef HAVE_trunctfdf2
691 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
693 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
694 return;
696 #endif
698 libcall = (rtx) 0;
699 switch (from_mode)
701 case SFmode:
702 switch (to_mode)
704 case DFmode:
705 libcall = extendsfdf2_libfunc;
706 break;
708 case XFmode:
709 libcall = extendsfxf2_libfunc;
710 break;
712 case TFmode:
713 libcall = extendsftf2_libfunc;
714 break;
716 default:
717 break;
719 break;
721 case DFmode:
722 switch (to_mode)
724 case SFmode:
725 libcall = truncdfsf2_libfunc;
726 break;
728 case XFmode:
729 libcall = extenddfxf2_libfunc;
730 break;
732 case TFmode:
733 libcall = extenddftf2_libfunc;
734 break;
736 default:
737 break;
739 break;
741 case XFmode:
742 switch (to_mode)
744 case SFmode:
745 libcall = truncxfsf2_libfunc;
746 break;
748 case DFmode:
749 libcall = truncxfdf2_libfunc;
750 break;
752 default:
753 break;
755 break;
757 case TFmode:
758 switch (to_mode)
760 case SFmode:
761 libcall = trunctfsf2_libfunc;
762 break;
764 case DFmode:
765 libcall = trunctfdf2_libfunc;
766 break;
768 default:
769 break;
771 break;
773 default:
774 break;
777 if (libcall == (rtx) 0)
778 /* This conversion is not implemented yet. */
779 abort ();
781 start_sequence ();
782 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
783 1, from, from_mode);
784 insns = get_insns ();
785 end_sequence ();
786 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
787 from));
788 return;
791 /* Now both modes are integers. */
793 /* Handle expanding beyond a word. */
794 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
795 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
797 rtx insns;
798 rtx lowpart;
799 rtx fill_value;
800 rtx lowfrom;
801 int i;
802 enum machine_mode lowpart_mode;
803 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
805 /* Try converting directly if the insn is supported. */
806 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
807 != CODE_FOR_nothing)
809 /* If FROM is a SUBREG, put it into a register. Do this
810 so that we always generate the same set of insns for
811 better cse'ing; if an intermediate assignment occurred,
812 we won't be doing the operation directly on the SUBREG. */
813 if (optimize > 0 && GET_CODE (from) == SUBREG)
814 from = force_reg (from_mode, from);
815 emit_unop_insn (code, to, from, equiv_code);
816 return;
818 /* Next, try converting via full word. */
819 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
820 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
821 != CODE_FOR_nothing))
823 if (GET_CODE (to) == REG)
824 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
825 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
826 emit_unop_insn (code, to,
827 gen_lowpart (word_mode, to), equiv_code);
828 return;
831 /* No special multiword conversion insn; do it by hand. */
832 start_sequence ();
834 /* Since we will turn this into a no conflict block, we must ensure
835 that the source does not overlap the target. */
837 if (reg_overlap_mentioned_p (to, from))
838 from = force_reg (from_mode, from);
840 /* Get a copy of FROM widened to a word, if necessary. */
841 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
842 lowpart_mode = word_mode;
843 else
844 lowpart_mode = from_mode;
846 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
848 lowpart = gen_lowpart (lowpart_mode, to);
849 emit_move_insn (lowpart, lowfrom);
851 /* Compute the value to put in each remaining word. */
852 if (unsignedp)
853 fill_value = const0_rtx;
854 else
856 #ifdef HAVE_slt
857 if (HAVE_slt
858 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
859 && STORE_FLAG_VALUE == -1)
861 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
862 lowpart_mode, 0);
863 fill_value = gen_reg_rtx (word_mode);
864 emit_insn (gen_slt (fill_value));
866 else
867 #endif
869 fill_value
870 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
871 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
872 NULL_RTX, 0);
873 fill_value = convert_to_mode (word_mode, fill_value, 1);
877 /* Fill the remaining words. */
878 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
880 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
881 rtx subword = operand_subword (to, index, 1, to_mode);
883 if (subword == 0)
884 abort ();
886 if (fill_value != subword)
887 emit_move_insn (subword, fill_value);
890 insns = get_insns ();
891 end_sequence ();
893 emit_no_conflict_block (insns, to, from, NULL_RTX,
894 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
895 return;
898 /* Truncating multi-word to a word or less. */
899 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
900 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
902 if (!((GET_CODE (from) == MEM
903 && ! MEM_VOLATILE_P (from)
904 && direct_load[(int) to_mode]
905 && ! mode_dependent_address_p (XEXP (from, 0)))
906 || GET_CODE (from) == REG
907 || GET_CODE (from) == SUBREG))
908 from = force_reg (from_mode, from);
909 convert_move (to, gen_lowpart (word_mode, from), 0);
910 return;
913 /* Handle pointer conversion. */ /* SPEE 900220. */
914 if (to_mode == PQImode)
916 if (from_mode != QImode)
917 from = convert_to_mode (QImode, from, unsignedp);
919 #ifdef HAVE_truncqipqi2
920 if (HAVE_truncqipqi2)
922 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
923 return;
925 #endif /* HAVE_truncqipqi2 */
926 abort ();
929 if (from_mode == PQImode)
931 if (to_mode != QImode)
933 from = convert_to_mode (QImode, from, unsignedp);
934 from_mode = QImode;
936 else
938 #ifdef HAVE_extendpqiqi2
939 if (HAVE_extendpqiqi2)
941 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
942 return;
944 #endif /* HAVE_extendpqiqi2 */
945 abort ();
949 if (to_mode == PSImode)
951 if (from_mode != SImode)
952 from = convert_to_mode (SImode, from, unsignedp);
954 #ifdef HAVE_truncsipsi2
955 if (HAVE_truncsipsi2)
957 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
958 return;
960 #endif /* HAVE_truncsipsi2 */
961 abort ();
964 if (from_mode == PSImode)
966 if (to_mode != SImode)
968 from = convert_to_mode (SImode, from, unsignedp);
969 from_mode = SImode;
971 else
973 #ifdef HAVE_extendpsisi2
974 if (! unsignedp && HAVE_extendpsisi2)
976 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
977 return;
979 #endif /* HAVE_extendpsisi2 */
980 #ifdef HAVE_zero_extendpsisi2
981 if (unsignedp && HAVE_zero_extendpsisi2)
983 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
984 return;
986 #endif /* HAVE_zero_extendpsisi2 */
987 abort ();
991 if (to_mode == PDImode)
993 if (from_mode != DImode)
994 from = convert_to_mode (DImode, from, unsignedp);
996 #ifdef HAVE_truncdipdi2
997 if (HAVE_truncdipdi2)
999 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1000 return;
1002 #endif /* HAVE_truncdipdi2 */
1003 abort ();
1006 if (from_mode == PDImode)
1008 if (to_mode != DImode)
1010 from = convert_to_mode (DImode, from, unsignedp);
1011 from_mode = DImode;
1013 else
1015 #ifdef HAVE_extendpdidi2
1016 if (HAVE_extendpdidi2)
1018 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1019 return;
1021 #endif /* HAVE_extendpdidi2 */
1022 abort ();
1026 /* Now follow all the conversions between integers
1027 no more than a word long. */
1029 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1030 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1031 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1032 GET_MODE_BITSIZE (from_mode)))
1034 if (!((GET_CODE (from) == MEM
1035 && ! MEM_VOLATILE_P (from)
1036 && direct_load[(int) to_mode]
1037 && ! mode_dependent_address_p (XEXP (from, 0)))
1038 || GET_CODE (from) == REG
1039 || GET_CODE (from) == SUBREG))
1040 from = force_reg (from_mode, from);
1041 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1042 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1043 from = copy_to_reg (from);
1044 emit_move_insn (to, gen_lowpart (to_mode, from));
1045 return;
1048 /* Handle extension. */
1049 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1051 /* Convert directly if that works. */
1052 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1053 != CODE_FOR_nothing)
1055 if (flag_force_mem)
1056 from = force_not_mem (from);
1058 emit_unop_insn (code, to, from, equiv_code);
1059 return;
1061 else
1063 enum machine_mode intermediate;
1064 rtx tmp;
1065 tree shift_amount;
1067 /* Search for a mode to convert via. */
1068 for (intermediate = from_mode; intermediate != VOIDmode;
1069 intermediate = GET_MODE_WIDER_MODE (intermediate))
1070 if (((can_extend_p (to_mode, intermediate, unsignedp)
1071 != CODE_FOR_nothing)
1072 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1073 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1074 GET_MODE_BITSIZE (intermediate))))
1075 && (can_extend_p (intermediate, from_mode, unsignedp)
1076 != CODE_FOR_nothing))
1078 convert_move (to, convert_to_mode (intermediate, from,
1079 unsignedp), unsignedp);
1080 return;
1083 /* No suitable intermediate mode.
1084 Generate what we need with shifts. */
1085 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1086 - GET_MODE_BITSIZE (from_mode), 0);
1087 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1088 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1089 to, unsignedp);
1090 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1091 to, unsignedp);
1092 if (tmp != to)
1093 emit_move_insn (to, tmp);
1094 return;
1098 /* Support special truncate insns for certain modes. */
1100 if (from_mode == DImode && to_mode == SImode)
1102 #ifdef HAVE_truncdisi2
1103 if (HAVE_truncdisi2)
1105 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1106 return;
1108 #endif
1109 convert_move (to, force_reg (from_mode, from), unsignedp);
1110 return;
1113 if (from_mode == DImode && to_mode == HImode)
1115 #ifdef HAVE_truncdihi2
1116 if (HAVE_truncdihi2)
1118 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1119 return;
1121 #endif
1122 convert_move (to, force_reg (from_mode, from), unsignedp);
1123 return;
1126 if (from_mode == DImode && to_mode == QImode)
1128 #ifdef HAVE_truncdiqi2
1129 if (HAVE_truncdiqi2)
1131 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1132 return;
1134 #endif
1135 convert_move (to, force_reg (from_mode, from), unsignedp);
1136 return;
1139 if (from_mode == SImode && to_mode == HImode)
1141 #ifdef HAVE_truncsihi2
1142 if (HAVE_truncsihi2)
1144 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1145 return;
1147 #endif
1148 convert_move (to, force_reg (from_mode, from), unsignedp);
1149 return;
1152 if (from_mode == SImode && to_mode == QImode)
1154 #ifdef HAVE_truncsiqi2
1155 if (HAVE_truncsiqi2)
1157 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1158 return;
1160 #endif
1161 convert_move (to, force_reg (from_mode, from), unsignedp);
1162 return;
1165 if (from_mode == HImode && to_mode == QImode)
1167 #ifdef HAVE_trunchiqi2
1168 if (HAVE_trunchiqi2)
1170 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1171 return;
1173 #endif
1174 convert_move (to, force_reg (from_mode, from), unsignedp);
1175 return;
1178 if (from_mode == TImode && to_mode == DImode)
1180 #ifdef HAVE_trunctidi2
1181 if (HAVE_trunctidi2)
1183 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1184 return;
1186 #endif
1187 convert_move (to, force_reg (from_mode, from), unsignedp);
1188 return;
1191 if (from_mode == TImode && to_mode == SImode)
1193 #ifdef HAVE_trunctisi2
1194 if (HAVE_trunctisi2)
1196 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1197 return;
1199 #endif
1200 convert_move (to, force_reg (from_mode, from), unsignedp);
1201 return;
1204 if (from_mode == TImode && to_mode == HImode)
1206 #ifdef HAVE_trunctihi2
1207 if (HAVE_trunctihi2)
1209 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1210 return;
1212 #endif
1213 convert_move (to, force_reg (from_mode, from), unsignedp);
1214 return;
1217 if (from_mode == TImode && to_mode == QImode)
1219 #ifdef HAVE_trunctiqi2
1220 if (HAVE_trunctiqi2)
1222 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1223 return;
1225 #endif
1226 convert_move (to, force_reg (from_mode, from), unsignedp);
1227 return;
1230 /* Handle truncation of volatile memrefs, and so on;
1231 the things that couldn't be truncated directly,
1232 and for which there was no special instruction. */
1233 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1235 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1236 emit_move_insn (to, temp);
1237 return;
1240 /* Mode combination is not recognized. */
1241 abort ();
1244 /* Return an rtx for a value that would result
1245 from converting X to mode MODE.
1246 Both X and MODE may be floating, or both integer.
1247 UNSIGNEDP is nonzero if X is an unsigned value.
1248 This can be done by referring to a part of X in place
1249 or by copying to a new temporary with conversion.
1251 This function *must not* call protect_from_queue
1252 except when putting X into an insn (in which case convert_move does it). */
1255 convert_to_mode (mode, x, unsignedp)
1256 enum machine_mode mode;
1257 rtx x;
1258 int unsignedp;
1260 return convert_modes (mode, VOIDmode, x, unsignedp);
1263 /* Return an rtx for a value that would result
1264 from converting X from mode OLDMODE to mode MODE.
1265 Both modes may be floating, or both integer.
1266 UNSIGNEDP is nonzero if X is an unsigned value.
1268 This can be done by referring to a part of X in place
1269 or by copying to a new temporary with conversion.
1271 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1273 This function *must not* call protect_from_queue
1274 except when putting X into an insn (in which case convert_move does it). */
1277 convert_modes (mode, oldmode, x, unsignedp)
1278 enum machine_mode mode, oldmode;
1279 rtx x;
1280 int unsignedp;
1282 rtx temp;
1284 /* If FROM is a SUBREG that indicates that we have already done at least
1285 the required extension, strip it. */
1287 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1288 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1289 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1290 x = gen_lowpart (mode, x);
1292 if (GET_MODE (x) != VOIDmode)
1293 oldmode = GET_MODE (x);
1295 if (mode == oldmode)
1296 return x;
1298 /* There is one case that we must handle specially: If we are converting
1299 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1300 we are to interpret the constant as unsigned, gen_lowpart will do
1301 the wrong if the constant appears negative. What we want to do is
1302 make the high-order word of the constant zero, not all ones. */
1304 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1305 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1306 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1308 HOST_WIDE_INT val = INTVAL (x);
1310 if (oldmode != VOIDmode
1311 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1313 int width = GET_MODE_BITSIZE (oldmode);
1315 /* We need to zero extend VAL. */
1316 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1319 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1322 /* We can do this with a gen_lowpart if both desired and current modes
1323 are integer, and this is either a constant integer, a register, or a
1324 non-volatile MEM. Except for the constant case where MODE is no
1325 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1327 if ((GET_CODE (x) == CONST_INT
1328 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1329 || (GET_MODE_CLASS (mode) == MODE_INT
1330 && GET_MODE_CLASS (oldmode) == MODE_INT
1331 && (GET_CODE (x) == CONST_DOUBLE
1332 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1333 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1334 && direct_load[(int) mode])
1335 || (GET_CODE (x) == REG
1336 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1337 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1339 /* ?? If we don't know OLDMODE, we have to assume here that
1340 X does not need sign- or zero-extension. This may not be
1341 the case, but it's the best we can do. */
1342 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1343 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1345 HOST_WIDE_INT val = INTVAL (x);
1346 int width = GET_MODE_BITSIZE (oldmode);
1348 /* We must sign or zero-extend in this case. Start by
1349 zero-extending, then sign extend if we need to. */
1350 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1351 if (! unsignedp
1352 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1353 val |= (HOST_WIDE_INT) (-1) << width;
1355 return GEN_INT (trunc_int_for_mode (val, mode));
1358 return gen_lowpart (mode, x);
1361 temp = gen_reg_rtx (mode);
1362 convert_move (temp, x, unsignedp);
1363 return temp;
1366 /* This macro is used to determine what the largest unit size that
1367 move_by_pieces can use is. */
1369 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1370 move efficiently, as opposed to MOVE_MAX which is the maximum
1371 number of bytes we can move with a single instruction. */
1373 #ifndef MOVE_MAX_PIECES
1374 #define MOVE_MAX_PIECES MOVE_MAX
1375 #endif
1377 /* Generate several move instructions to copy LEN bytes from block FROM to
1378 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1379 and TO through protect_from_queue before calling.
1381 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1382 used to push FROM to the stack.
1384 ALIGN is maximum alignment we can assume. */
1386 void
1387 move_by_pieces (to, from, len, align)
1388 rtx to, from;
1389 unsigned HOST_WIDE_INT len;
1390 unsigned int align;
1392 struct move_by_pieces data;
1393 rtx to_addr, from_addr = XEXP (from, 0);
1394 unsigned int max_size = MOVE_MAX_PIECES + 1;
1395 enum machine_mode mode = VOIDmode, tmode;
1396 enum insn_code icode;
1398 data.offset = 0;
1399 data.from_addr = from_addr;
1400 if (to)
1402 to_addr = XEXP (to, 0);
1403 data.to = to;
1404 data.autinc_to
1405 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1406 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1407 data.reverse
1408 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1410 else
1412 to_addr = NULL_RTX;
1413 data.to = NULL_RTX;
1414 data.autinc_to = 1;
1415 #ifdef STACK_GROWS_DOWNWARD
1416 data.reverse = 1;
1417 #else
1418 data.reverse = 0;
1419 #endif
1421 data.to_addr = to_addr;
1422 data.from = from;
1423 data.autinc_from
1424 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1425 || GET_CODE (from_addr) == POST_INC
1426 || GET_CODE (from_addr) == POST_DEC);
1428 data.explicit_inc_from = 0;
1429 data.explicit_inc_to = 0;
1430 if (data.reverse) data.offset = len;
1431 data.len = len;
1433 /* If copying requires more than two move insns,
1434 copy addresses to registers (to make displacements shorter)
1435 and use post-increment if available. */
1436 if (!(data.autinc_from && data.autinc_to)
1437 && move_by_pieces_ninsns (len, align) > 2)
1439 /* Find the mode of the largest move... */
1440 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1441 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1442 if (GET_MODE_SIZE (tmode) < max_size)
1443 mode = tmode;
1445 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1447 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1448 data.autinc_from = 1;
1449 data.explicit_inc_from = -1;
1451 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1453 data.from_addr = copy_addr_to_reg (from_addr);
1454 data.autinc_from = 1;
1455 data.explicit_inc_from = 1;
1457 if (!data.autinc_from && CONSTANT_P (from_addr))
1458 data.from_addr = copy_addr_to_reg (from_addr);
1459 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1461 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1462 data.autinc_to = 1;
1463 data.explicit_inc_to = -1;
1465 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1467 data.to_addr = copy_addr_to_reg (to_addr);
1468 data.autinc_to = 1;
1469 data.explicit_inc_to = 1;
1471 if (!data.autinc_to && CONSTANT_P (to_addr))
1472 data.to_addr = copy_addr_to_reg (to_addr);
1475 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1476 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1477 align = MOVE_MAX * BITS_PER_UNIT;
1479 /* First move what we can in the largest integer mode, then go to
1480 successively smaller modes. */
1482 while (max_size > 1)
1484 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1485 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1486 if (GET_MODE_SIZE (tmode) < max_size)
1487 mode = tmode;
1489 if (mode == VOIDmode)
1490 break;
1492 icode = mov_optab->handlers[(int) mode].insn_code;
1493 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1494 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1496 max_size = GET_MODE_SIZE (mode);
1499 /* The code above should have handled everything. */
1500 if (data.len > 0)
1501 abort ();
1504 /* Return number of insns required to move L bytes by pieces.
1505 ALIGN (in bits) is maximum alignment we can assume. */
1507 static unsigned HOST_WIDE_INT
1508 move_by_pieces_ninsns (l, align)
1509 unsigned HOST_WIDE_INT l;
1510 unsigned int align;
1512 unsigned HOST_WIDE_INT n_insns = 0;
1513 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1515 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1516 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1517 align = MOVE_MAX * BITS_PER_UNIT;
1519 while (max_size > 1)
1521 enum machine_mode mode = VOIDmode, tmode;
1522 enum insn_code icode;
1524 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1525 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1526 if (GET_MODE_SIZE (tmode) < max_size)
1527 mode = tmode;
1529 if (mode == VOIDmode)
1530 break;
1532 icode = mov_optab->handlers[(int) mode].insn_code;
1533 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1534 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1536 max_size = GET_MODE_SIZE (mode);
1539 if (l)
1540 abort ();
1541 return n_insns;
1544 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1545 with move instructions for mode MODE. GENFUN is the gen_... function
1546 to make a move insn for that mode. DATA has all the other info. */
1548 static void
1549 move_by_pieces_1 (genfun, mode, data)
1550 rtx (*genfun) PARAMS ((rtx, ...));
1551 enum machine_mode mode;
1552 struct move_by_pieces *data;
1554 unsigned int size = GET_MODE_SIZE (mode);
1555 rtx to1 = NULL_RTX, from1;
1557 while (data->len >= size)
1559 if (data->reverse)
1560 data->offset -= size;
1562 if (data->to)
1564 if (data->autinc_to)
1565 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1566 data->offset);
1567 else
1568 to1 = adjust_address (data->to, mode, data->offset);
1571 if (data->autinc_from)
1572 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1573 data->offset);
1574 else
1575 from1 = adjust_address (data->from, mode, data->offset);
1577 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1578 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1579 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1580 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1582 if (data->to)
1583 emit_insn ((*genfun) (to1, from1));
1584 else
1586 #ifdef PUSH_ROUNDING
1587 emit_single_push_insn (mode, from1, NULL);
1588 #else
1589 abort ();
1590 #endif
1593 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1594 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1595 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1596 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1598 if (! data->reverse)
1599 data->offset += size;
1601 data->len -= size;
1605 /* Emit code to move a block Y to a block X.
1606 This may be done with string-move instructions,
1607 with multiple scalar move instructions, or with a library call.
1609 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1610 with mode BLKmode.
1611 SIZE is an rtx that says how long they are.
1612 ALIGN is the maximum alignment we can assume they have.
1614 Return the address of the new block, if memcpy is called and returns it,
1615 0 otherwise. */
1618 emit_block_move (x, y, size)
1619 rtx x, y;
1620 rtx size;
1622 rtx retval = 0;
1623 #ifdef TARGET_MEM_FUNCTIONS
1624 static tree fn;
1625 tree call_expr, arg_list;
1626 #endif
1627 unsigned int align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1629 if (GET_MODE (x) != BLKmode)
1630 abort ();
1632 if (GET_MODE (y) != BLKmode)
1633 abort ();
1635 x = protect_from_queue (x, 1);
1636 y = protect_from_queue (y, 0);
1637 size = protect_from_queue (size, 0);
1639 if (GET_CODE (x) != MEM)
1640 abort ();
1641 if (GET_CODE (y) != MEM)
1642 abort ();
1643 if (size == 0)
1644 abort ();
1646 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1647 move_by_pieces (x, y, INTVAL (size), align);
1648 else
1650 /* Try the most limited insn first, because there's no point
1651 including more than one in the machine description unless
1652 the more limited one has some advantage. */
1654 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1655 enum machine_mode mode;
1657 /* Since this is a move insn, we don't care about volatility. */
1658 volatile_ok = 1;
1660 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1661 mode = GET_MODE_WIDER_MODE (mode))
1663 enum insn_code code = movstr_optab[(int) mode];
1664 insn_operand_predicate_fn pred;
1666 if (code != CODE_FOR_nothing
1667 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1668 here because if SIZE is less than the mode mask, as it is
1669 returned by the macro, it will definitely be less than the
1670 actual mode mask. */
1671 && ((GET_CODE (size) == CONST_INT
1672 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1673 <= (GET_MODE_MASK (mode) >> 1)))
1674 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1675 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1676 || (*pred) (x, BLKmode))
1677 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1678 || (*pred) (y, BLKmode))
1679 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1680 || (*pred) (opalign, VOIDmode)))
1682 rtx op2;
1683 rtx last = get_last_insn ();
1684 rtx pat;
1686 op2 = convert_to_mode (mode, size, 1);
1687 pred = insn_data[(int) code].operand[2].predicate;
1688 if (pred != 0 && ! (*pred) (op2, mode))
1689 op2 = copy_to_mode_reg (mode, op2);
1691 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1692 if (pat)
1694 emit_insn (pat);
1695 volatile_ok = 0;
1696 return 0;
1698 else
1699 delete_insns_since (last);
1703 volatile_ok = 0;
1705 /* X, Y, or SIZE may have been passed through protect_from_queue.
1707 It is unsafe to save the value generated by protect_from_queue
1708 and reuse it later. Consider what happens if emit_queue is
1709 called before the return value from protect_from_queue is used.
1711 Expansion of the CALL_EXPR below will call emit_queue before
1712 we are finished emitting RTL for argument setup. So if we are
1713 not careful we could get the wrong value for an argument.
1715 To avoid this problem we go ahead and emit code to copy X, Y &
1716 SIZE into new pseudos. We can then place those new pseudos
1717 into an RTL_EXPR and use them later, even after a call to
1718 emit_queue.
1720 Note this is not strictly needed for library calls since they
1721 do not call emit_queue before loading their arguments. However,
1722 we may need to have library calls call emit_queue in the future
1723 since failing to do so could cause problems for targets which
1724 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1725 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1726 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1728 #ifdef TARGET_MEM_FUNCTIONS
1729 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1730 #else
1731 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1732 TREE_UNSIGNED (integer_type_node));
1733 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1734 #endif
1736 #ifdef TARGET_MEM_FUNCTIONS
1737 /* It is incorrect to use the libcall calling conventions to call
1738 memcpy in this context.
1740 This could be a user call to memcpy and the user may wish to
1741 examine the return value from memcpy.
1743 For targets where libcalls and normal calls have different conventions
1744 for returning pointers, we could end up generating incorrect code.
1746 So instead of using a libcall sequence we build up a suitable
1747 CALL_EXPR and expand the call in the normal fashion. */
1748 if (fn == NULL_TREE)
1750 tree fntype;
1752 /* This was copied from except.c, I don't know if all this is
1753 necessary in this context or not. */
1754 fn = get_identifier ("memcpy");
1755 fntype = build_pointer_type (void_type_node);
1756 fntype = build_function_type (fntype, NULL_TREE);
1757 fn = build_decl (FUNCTION_DECL, fn, fntype);
1758 ggc_add_tree_root (&fn, 1);
1759 DECL_EXTERNAL (fn) = 1;
1760 TREE_PUBLIC (fn) = 1;
1761 DECL_ARTIFICIAL (fn) = 1;
1762 TREE_NOTHROW (fn) = 1;
1763 make_decl_rtl (fn, NULL);
1764 assemble_external (fn);
1767 /* We need to make an argument list for the function call.
1769 memcpy has three arguments, the first two are void * addresses and
1770 the last is a size_t byte count for the copy. */
1771 arg_list
1772 = build_tree_list (NULL_TREE,
1773 make_tree (build_pointer_type (void_type_node), x));
1774 TREE_CHAIN (arg_list)
1775 = build_tree_list (NULL_TREE,
1776 make_tree (build_pointer_type (void_type_node), y));
1777 TREE_CHAIN (TREE_CHAIN (arg_list))
1778 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1779 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1781 /* Now we have to build up the CALL_EXPR itself. */
1782 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1783 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1784 call_expr, arg_list, NULL_TREE);
1785 TREE_SIDE_EFFECTS (call_expr) = 1;
1787 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1788 #else
1789 emit_library_call (bcopy_libfunc, LCT_NORMAL,
1790 VOIDmode, 3, y, Pmode, x, Pmode,
1791 convert_to_mode (TYPE_MODE (integer_type_node), size,
1792 TREE_UNSIGNED (integer_type_node)),
1793 TYPE_MODE (integer_type_node));
1794 #endif
1796 /* If we are initializing a readonly value, show the above call
1797 clobbered it. Otherwise, a load from it may erroneously be hoisted
1798 from a loop. */
1799 if (RTX_UNCHANGING_P (x))
1800 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
1803 return retval;
1806 /* Copy all or part of a value X into registers starting at REGNO.
1807 The number of registers to be filled is NREGS. */
1809 void
1810 move_block_to_reg (regno, x, nregs, mode)
1811 int regno;
1812 rtx x;
1813 int nregs;
1814 enum machine_mode mode;
1816 int i;
1817 #ifdef HAVE_load_multiple
1818 rtx pat;
1819 rtx last;
1820 #endif
1822 if (nregs == 0)
1823 return;
1825 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1826 x = validize_mem (force_const_mem (mode, x));
1828 /* See if the machine can do this with a load multiple insn. */
1829 #ifdef HAVE_load_multiple
1830 if (HAVE_load_multiple)
1832 last = get_last_insn ();
1833 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1834 GEN_INT (nregs));
1835 if (pat)
1837 emit_insn (pat);
1838 return;
1840 else
1841 delete_insns_since (last);
1843 #endif
1845 for (i = 0; i < nregs; i++)
1846 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1847 operand_subword_force (x, i, mode));
1850 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1851 The number of registers to be filled is NREGS. SIZE indicates the number
1852 of bytes in the object X. */
1854 void
1855 move_block_from_reg (regno, x, nregs, size)
1856 int regno;
1857 rtx x;
1858 int nregs;
1859 int size;
1861 int i;
1862 #ifdef HAVE_store_multiple
1863 rtx pat;
1864 rtx last;
1865 #endif
1866 enum machine_mode mode;
1868 if (nregs == 0)
1869 return;
1871 /* If SIZE is that of a mode no bigger than a word, just use that
1872 mode's store operation. */
1873 if (size <= UNITS_PER_WORD
1874 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode
1875 && !FUNCTION_ARG_REG_LITTLE_ENDIAN)
1877 emit_move_insn (adjust_address (x, mode, 0), gen_rtx_REG (mode, regno));
1878 return;
1881 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1882 to the left before storing to memory. Note that the previous test
1883 doesn't handle all cases (e.g. SIZE == 3). */
1884 if (size < UNITS_PER_WORD
1885 && BYTES_BIG_ENDIAN
1886 && !FUNCTION_ARG_REG_LITTLE_ENDIAN)
1888 rtx tem = operand_subword (x, 0, 1, BLKmode);
1889 rtx shift;
1891 if (tem == 0)
1892 abort ();
1894 shift = expand_shift (LSHIFT_EXPR, word_mode,
1895 gen_rtx_REG (word_mode, regno),
1896 build_int_2 ((UNITS_PER_WORD - size)
1897 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1898 emit_move_insn (tem, shift);
1899 return;
1902 /* See if the machine can do this with a store multiple insn. */
1903 #ifdef HAVE_store_multiple
1904 if (HAVE_store_multiple)
1906 last = get_last_insn ();
1907 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1908 GEN_INT (nregs));
1909 if (pat)
1911 emit_insn (pat);
1912 return;
1914 else
1915 delete_insns_since (last);
1917 #endif
1919 for (i = 0; i < nregs; i++)
1921 rtx tem = operand_subword (x, i, 1, BLKmode);
1923 if (tem == 0)
1924 abort ();
1926 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1930 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1931 registers represented by a PARALLEL. SSIZE represents the total size of
1932 block SRC in bytes, or -1 if not known. */
1933 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatant assumption that
1934 the balance will be in what would be the low-order memory addresses, i.e.
1935 left justified for big endian, right justified for little endian. This
1936 happens to be true for the targets currently using this support. If this
1937 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1938 would be needed. */
1940 void
1941 emit_group_load (dst, orig_src, ssize)
1942 rtx dst, orig_src;
1943 int ssize;
1945 rtx *tmps, src;
1946 int start, i;
1948 if (GET_CODE (dst) != PARALLEL)
1949 abort ();
1951 /* Check for a NULL entry, used to indicate that the parameter goes
1952 both on the stack and in registers. */
1953 if (XEXP (XVECEXP (dst, 0, 0), 0))
1954 start = 0;
1955 else
1956 start = 1;
1958 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
1960 /* Process the pieces. */
1961 for (i = start; i < XVECLEN (dst, 0); i++)
1963 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1964 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1965 unsigned int bytelen = GET_MODE_SIZE (mode);
1966 int shift = 0;
1968 /* Handle trailing fragments that run over the size of the struct. */
1969 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1971 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1972 bytelen = ssize - bytepos;
1973 if (bytelen <= 0)
1974 abort ();
1977 /* If we won't be loading directly from memory, protect the real source
1978 from strange tricks we might play; but make sure that the source can
1979 be loaded directly into the destination. */
1980 src = orig_src;
1981 if (GET_CODE (orig_src) != MEM
1982 && (!CONSTANT_P (orig_src)
1983 || (GET_MODE (orig_src) != mode
1984 && GET_MODE (orig_src) != VOIDmode)))
1986 if (GET_MODE (orig_src) == VOIDmode)
1987 src = gen_reg_rtx (mode);
1988 else
1989 src = gen_reg_rtx (GET_MODE (orig_src));
1991 emit_move_insn (src, orig_src);
1994 /* Optimize the access just a bit. */
1995 if (GET_CODE (src) == MEM
1996 && MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode)
1997 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1998 && bytelen == GET_MODE_SIZE (mode))
2000 tmps[i] = gen_reg_rtx (mode);
2001 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2003 else if (GET_CODE (src) == CONCAT)
2005 if (bytepos == 0
2006 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
2007 tmps[i] = XEXP (src, 0);
2008 else if (bytepos == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
2009 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
2010 tmps[i] = XEXP (src, 1);
2011 else if (bytepos == 0)
2013 rtx mem = assign_stack_temp (GET_MODE (src),
2014 GET_MODE_SIZE (GET_MODE (src)), 0);
2015 emit_move_insn (mem, src);
2016 tmps[i] = adjust_address (mem, mode, 0);
2018 else
2019 abort ();
2021 else if (CONSTANT_P (src)
2022 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2023 tmps[i] = src;
2024 else
2025 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2026 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2027 mode, mode, ssize);
2029 if (BYTES_BIG_ENDIAN && shift)
2030 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2031 tmps[i], 0, OPTAB_WIDEN);
2034 emit_queue ();
2036 /* Copy the extracted pieces into the proper (probable) hard regs. */
2037 for (i = start; i < XVECLEN (dst, 0); i++)
2038 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2041 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2042 registers represented by a PARALLEL. SSIZE represents the total size of
2043 block DST, or -1 if not known. */
2045 void
2046 emit_group_store (orig_dst, src, ssize)
2047 rtx orig_dst, src;
2048 int ssize;
2050 rtx *tmps, dst;
2051 int start, i;
2053 if (GET_CODE (src) != PARALLEL)
2054 abort ();
2056 /* Check for a NULL entry, used to indicate that the parameter goes
2057 both on the stack and in registers. */
2058 if (XEXP (XVECEXP (src, 0, 0), 0))
2059 start = 0;
2060 else
2061 start = 1;
2063 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2065 /* Copy the (probable) hard regs into pseudos. */
2066 for (i = start; i < XVECLEN (src, 0); i++)
2068 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2069 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2070 emit_move_insn (tmps[i], reg);
2072 emit_queue ();
2074 /* If we won't be storing directly into memory, protect the real destination
2075 from strange tricks we might play. */
2076 dst = orig_dst;
2077 if (GET_CODE (dst) == PARALLEL)
2079 rtx temp;
2081 /* We can get a PARALLEL dst if there is a conditional expression in
2082 a return statement. In that case, the dst and src are the same,
2083 so no action is necessary. */
2084 if (rtx_equal_p (dst, src))
2085 return;
2087 /* It is unclear if we can ever reach here, but we may as well handle
2088 it. Allocate a temporary, and split this into a store/load to/from
2089 the temporary. */
2091 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2092 emit_group_store (temp, src, ssize);
2093 emit_group_load (dst, temp, ssize);
2094 return;
2096 else if (GET_CODE (dst) != MEM)
2098 dst = gen_reg_rtx (GET_MODE (orig_dst));
2099 /* Make life a bit easier for combine. */
2100 emit_move_insn (dst, const0_rtx);
2103 /* Process the pieces. */
2104 for (i = start; i < XVECLEN (src, 0); i++)
2106 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2107 enum machine_mode mode = GET_MODE (tmps[i]);
2108 unsigned int bytelen = GET_MODE_SIZE (mode);
2110 /* Handle trailing fragments that run over the size of the struct. */
2111 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2113 if (BYTES_BIG_ENDIAN)
2115 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2116 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2117 tmps[i], 0, OPTAB_WIDEN);
2119 bytelen = ssize - bytepos;
2122 /* Optimize the access just a bit. */
2123 if (GET_CODE (dst) == MEM
2124 && MEM_ALIGN (dst) >= GET_MODE_ALIGNMENT (mode)
2125 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2126 && bytelen == GET_MODE_SIZE (mode))
2127 emit_move_insn (adjust_address (dst, mode, bytepos), tmps[i]);
2128 else
2129 store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2130 mode, tmps[i], ssize);
2133 emit_queue ();
2135 /* Copy from the pseudo into the (probable) hard reg. */
2136 if (GET_CODE (dst) == REG)
2137 emit_move_insn (orig_dst, dst);
2140 /* Generate code to copy a BLKmode object of TYPE out of a
2141 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2142 is null, a stack temporary is created. TGTBLK is returned.
2144 The primary purpose of this routine is to handle functions
2145 that return BLKmode structures in registers. Some machines
2146 (the PA for example) want to return all small structures
2147 in registers regardless of the structure's alignment. */
2150 copy_blkmode_from_reg (tgtblk, srcreg, type)
2151 rtx tgtblk;
2152 rtx srcreg;
2153 tree type;
2155 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2156 rtx src = NULL, dst = NULL;
2157 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2158 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2160 if (tgtblk == 0)
2162 tgtblk = assign_temp (build_qualified_type (type,
2163 (TYPE_QUALS (type)
2164 | TYPE_QUAL_CONST)),
2165 0, 1, 1);
2166 preserve_temp_slots (tgtblk);
2169 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2170 into a new pseudo which is a full word.
2172 If FUNCTION_ARG_REG_LITTLE_ENDIAN is set and convert_to_mode does a copy,
2173 the wrong part of the register gets copied so we fake a type conversion
2174 in place. */
2175 if (GET_MODE (srcreg) != BLKmode
2176 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2178 if (FUNCTION_ARG_REG_LITTLE_ENDIAN)
2179 srcreg = simplify_gen_subreg (word_mode, srcreg, GET_MODE (srcreg), 0);
2180 else
2181 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2184 /* Structures whose size is not a multiple of a word are aligned
2185 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2186 machine, this means we must skip the empty high order bytes when
2187 calculating the bit offset. */
2188 if (BYTES_BIG_ENDIAN
2189 && !FUNCTION_ARG_REG_LITTLE_ENDIAN
2190 && bytes % UNITS_PER_WORD)
2191 big_endian_correction
2192 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2194 /* Copy the structure BITSIZE bites at a time.
2196 We could probably emit more efficient code for machines which do not use
2197 strict alignment, but it doesn't seem worth the effort at the current
2198 time. */
2199 for (bitpos = 0, xbitpos = big_endian_correction;
2200 bitpos < bytes * BITS_PER_UNIT;
2201 bitpos += bitsize, xbitpos += bitsize)
2203 /* We need a new source operand each time xbitpos is on a
2204 word boundary and when xbitpos == big_endian_correction
2205 (the first time through). */
2206 if (xbitpos % BITS_PER_WORD == 0
2207 || xbitpos == big_endian_correction)
2208 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2209 GET_MODE (srcreg));
2211 /* We need a new destination operand each time bitpos is on
2212 a word boundary. */
2213 if (bitpos % BITS_PER_WORD == 0)
2214 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2216 /* Use xbitpos for the source extraction (right justified) and
2217 xbitpos for the destination store (left justified). */
2218 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2219 extract_bit_field (src, bitsize,
2220 xbitpos % BITS_PER_WORD, 1,
2221 NULL_RTX, word_mode, word_mode,
2222 BITS_PER_WORD),
2223 BITS_PER_WORD);
2226 return tgtblk;
2229 /* Add a USE expression for REG to the (possibly empty) list pointed
2230 to by CALL_FUSAGE. REG must denote a hard register. */
2232 void
2233 use_reg (call_fusage, reg)
2234 rtx *call_fusage, reg;
2236 if (GET_CODE (reg) != REG
2237 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2238 abort ();
2240 *call_fusage
2241 = gen_rtx_EXPR_LIST (VOIDmode,
2242 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2245 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2246 starting at REGNO. All of these registers must be hard registers. */
2248 void
2249 use_regs (call_fusage, regno, nregs)
2250 rtx *call_fusage;
2251 int regno;
2252 int nregs;
2254 int i;
2256 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2257 abort ();
2259 for (i = 0; i < nregs; i++)
2260 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2263 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2264 PARALLEL REGS. This is for calls that pass values in multiple
2265 non-contiguous locations. The Irix 6 ABI has examples of this. */
2267 void
2268 use_group_regs (call_fusage, regs)
2269 rtx *call_fusage;
2270 rtx regs;
2272 int i;
2274 for (i = 0; i < XVECLEN (regs, 0); i++)
2276 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2278 /* A NULL entry means the parameter goes both on the stack and in
2279 registers. This can also be a MEM for targets that pass values
2280 partially on the stack and partially in registers. */
2281 if (reg != 0 && GET_CODE (reg) == REG)
2282 use_reg (call_fusage, reg);
2288 can_store_by_pieces (len, constfun, constfundata, align)
2289 unsigned HOST_WIDE_INT len;
2290 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2291 PTR constfundata;
2292 unsigned int align;
2294 unsigned HOST_WIDE_INT max_size, l;
2295 HOST_WIDE_INT offset = 0;
2296 enum machine_mode mode, tmode;
2297 enum insn_code icode;
2298 int reverse;
2299 rtx cst;
2301 if (! MOVE_BY_PIECES_P (len, align))
2302 return 0;
2304 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2305 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2306 align = MOVE_MAX * BITS_PER_UNIT;
2308 /* We would first store what we can in the largest integer mode, then go to
2309 successively smaller modes. */
2311 for (reverse = 0;
2312 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2313 reverse++)
2315 l = len;
2316 mode = VOIDmode;
2317 max_size = MOVE_MAX_PIECES + 1;
2318 while (max_size > 1)
2320 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2321 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2322 if (GET_MODE_SIZE (tmode) < max_size)
2323 mode = tmode;
2325 if (mode == VOIDmode)
2326 break;
2328 icode = mov_optab->handlers[(int) mode].insn_code;
2329 if (icode != CODE_FOR_nothing
2330 && align >= GET_MODE_ALIGNMENT (mode))
2332 unsigned int size = GET_MODE_SIZE (mode);
2334 while (l >= size)
2336 if (reverse)
2337 offset -= size;
2339 cst = (*constfun) (constfundata, offset, mode);
2340 if (!LEGITIMATE_CONSTANT_P (cst))
2341 return 0;
2343 if (!reverse)
2344 offset += size;
2346 l -= size;
2350 max_size = GET_MODE_SIZE (mode);
2353 /* The code above should have handled everything. */
2354 if (l != 0)
2355 abort ();
2358 return 1;
2361 /* Generate several move instructions to store LEN bytes generated by
2362 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2363 pointer which will be passed as argument in every CONSTFUN call.
2364 ALIGN is maximum alignment we can assume. */
2366 void
2367 store_by_pieces (to, len, constfun, constfundata, align)
2368 rtx to;
2369 unsigned HOST_WIDE_INT len;
2370 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2371 PTR constfundata;
2372 unsigned int align;
2374 struct store_by_pieces data;
2376 if (! MOVE_BY_PIECES_P (len, align))
2377 abort ();
2378 to = protect_from_queue (to, 1);
2379 data.constfun = constfun;
2380 data.constfundata = constfundata;
2381 data.len = len;
2382 data.to = to;
2383 store_by_pieces_1 (&data, align);
2386 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2387 rtx with BLKmode). The caller must pass TO through protect_from_queue
2388 before calling. ALIGN is maximum alignment we can assume. */
2390 static void
2391 clear_by_pieces (to, len, align)
2392 rtx to;
2393 unsigned HOST_WIDE_INT len;
2394 unsigned int align;
2396 struct store_by_pieces data;
2398 data.constfun = clear_by_pieces_1;
2399 data.constfundata = NULL;
2400 data.len = len;
2401 data.to = to;
2402 store_by_pieces_1 (&data, align);
2405 /* Callback routine for clear_by_pieces.
2406 Return const0_rtx unconditionally. */
2408 static rtx
2409 clear_by_pieces_1 (data, offset, mode)
2410 PTR data ATTRIBUTE_UNUSED;
2411 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2412 enum machine_mode mode ATTRIBUTE_UNUSED;
2414 return const0_rtx;
2417 /* Subroutine of clear_by_pieces and store_by_pieces.
2418 Generate several move instructions to store LEN bytes of block TO. (A MEM
2419 rtx with BLKmode). The caller must pass TO through protect_from_queue
2420 before calling. ALIGN is maximum alignment we can assume. */
2422 static void
2423 store_by_pieces_1 (data, align)
2424 struct store_by_pieces *data;
2425 unsigned int align;
2427 rtx to_addr = XEXP (data->to, 0);
2428 unsigned HOST_WIDE_INT max_size = MOVE_MAX_PIECES + 1;
2429 enum machine_mode mode = VOIDmode, tmode;
2430 enum insn_code icode;
2432 data->offset = 0;
2433 data->to_addr = to_addr;
2434 data->autinc_to
2435 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2436 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2438 data->explicit_inc_to = 0;
2439 data->reverse
2440 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2441 if (data->reverse)
2442 data->offset = data->len;
2444 /* If storing requires more than two move insns,
2445 copy addresses to registers (to make displacements shorter)
2446 and use post-increment if available. */
2447 if (!data->autinc_to
2448 && move_by_pieces_ninsns (data->len, align) > 2)
2450 /* Determine the main mode we'll be using. */
2451 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2452 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2453 if (GET_MODE_SIZE (tmode) < max_size)
2454 mode = tmode;
2456 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2458 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2459 data->autinc_to = 1;
2460 data->explicit_inc_to = -1;
2463 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2464 && ! data->autinc_to)
2466 data->to_addr = copy_addr_to_reg (to_addr);
2467 data->autinc_to = 1;
2468 data->explicit_inc_to = 1;
2471 if ( !data->autinc_to && CONSTANT_P (to_addr))
2472 data->to_addr = copy_addr_to_reg (to_addr);
2475 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2476 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2477 align = MOVE_MAX * BITS_PER_UNIT;
2479 /* First store what we can in the largest integer mode, then go to
2480 successively smaller modes. */
2482 while (max_size > 1)
2484 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2485 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2486 if (GET_MODE_SIZE (tmode) < max_size)
2487 mode = tmode;
2489 if (mode == VOIDmode)
2490 break;
2492 icode = mov_optab->handlers[(int) mode].insn_code;
2493 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2494 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2496 max_size = GET_MODE_SIZE (mode);
2499 /* The code above should have handled everything. */
2500 if (data->len != 0)
2501 abort ();
2504 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2505 with move instructions for mode MODE. GENFUN is the gen_... function
2506 to make a move insn for that mode. DATA has all the other info. */
2508 static void
2509 store_by_pieces_2 (genfun, mode, data)
2510 rtx (*genfun) PARAMS ((rtx, ...));
2511 enum machine_mode mode;
2512 struct store_by_pieces *data;
2514 unsigned int size = GET_MODE_SIZE (mode);
2515 rtx to1, cst;
2517 while (data->len >= size)
2519 if (data->reverse)
2520 data->offset -= size;
2522 if (data->autinc_to)
2523 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2524 data->offset);
2525 else
2526 to1 = adjust_address (data->to, mode, data->offset);
2528 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2529 emit_insn (gen_add2_insn (data->to_addr,
2530 GEN_INT (-(HOST_WIDE_INT) size)));
2532 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2533 emit_insn ((*genfun) (to1, cst));
2535 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2536 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2538 if (! data->reverse)
2539 data->offset += size;
2541 data->len -= size;
2545 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2546 its length in bytes. */
2549 clear_storage (object, size)
2550 rtx object;
2551 rtx size;
2553 #ifdef TARGET_MEM_FUNCTIONS
2554 static tree fn;
2555 tree call_expr, arg_list;
2556 #endif
2557 rtx retval = 0;
2558 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2559 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2561 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2562 just move a zero. Otherwise, do this a piece at a time. */
2563 if (GET_MODE (object) != BLKmode
2564 && GET_CODE (size) == CONST_INT
2565 && GET_MODE_SIZE (GET_MODE (object)) == (unsigned int) INTVAL (size))
2566 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2567 else
2569 object = protect_from_queue (object, 1);
2570 size = protect_from_queue (size, 0);
2572 if (GET_CODE (size) == CONST_INT
2573 && MOVE_BY_PIECES_P (INTVAL (size), align))
2574 clear_by_pieces (object, INTVAL (size), align);
2575 else
2577 /* Try the most limited insn first, because there's no point
2578 including more than one in the machine description unless
2579 the more limited one has some advantage. */
2581 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2582 enum machine_mode mode;
2584 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2585 mode = GET_MODE_WIDER_MODE (mode))
2587 enum insn_code code = clrstr_optab[(int) mode];
2588 insn_operand_predicate_fn pred;
2590 if (code != CODE_FOR_nothing
2591 /* We don't need MODE to be narrower than
2592 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2593 the mode mask, as it is returned by the macro, it will
2594 definitely be less than the actual mode mask. */
2595 && ((GET_CODE (size) == CONST_INT
2596 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2597 <= (GET_MODE_MASK (mode) >> 1)))
2598 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2599 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2600 || (*pred) (object, BLKmode))
2601 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2602 || (*pred) (opalign, VOIDmode)))
2604 rtx op1;
2605 rtx last = get_last_insn ();
2606 rtx pat;
2608 op1 = convert_to_mode (mode, size, 1);
2609 pred = insn_data[(int) code].operand[1].predicate;
2610 if (pred != 0 && ! (*pred) (op1, mode))
2611 op1 = copy_to_mode_reg (mode, op1);
2613 pat = GEN_FCN ((int) code) (object, op1, opalign);
2614 if (pat)
2616 emit_insn (pat);
2617 return 0;
2619 else
2620 delete_insns_since (last);
2624 /* OBJECT or SIZE may have been passed through protect_from_queue.
2626 It is unsafe to save the value generated by protect_from_queue
2627 and reuse it later. Consider what happens if emit_queue is
2628 called before the return value from protect_from_queue is used.
2630 Expansion of the CALL_EXPR below will call emit_queue before
2631 we are finished emitting RTL for argument setup. So if we are
2632 not careful we could get the wrong value for an argument.
2634 To avoid this problem we go ahead and emit code to copy OBJECT
2635 and SIZE into new pseudos. We can then place those new pseudos
2636 into an RTL_EXPR and use them later, even after a call to
2637 emit_queue.
2639 Note this is not strictly needed for library calls since they
2640 do not call emit_queue before loading their arguments. However,
2641 we may need to have library calls call emit_queue in the future
2642 since failing to do so could cause problems for targets which
2643 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2644 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2646 #ifdef TARGET_MEM_FUNCTIONS
2647 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2648 #else
2649 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2650 TREE_UNSIGNED (integer_type_node));
2651 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2652 #endif
2654 #ifdef TARGET_MEM_FUNCTIONS
2655 /* It is incorrect to use the libcall calling conventions to call
2656 memset in this context.
2658 This could be a user call to memset and the user may wish to
2659 examine the return value from memset.
2661 For targets where libcalls and normal calls have different
2662 conventions for returning pointers, we could end up generating
2663 incorrect code.
2665 So instead of using a libcall sequence we build up a suitable
2666 CALL_EXPR and expand the call in the normal fashion. */
2667 if (fn == NULL_TREE)
2669 tree fntype;
2671 /* This was copied from except.c, I don't know if all this is
2672 necessary in this context or not. */
2673 fn = get_identifier ("memset");
2674 fntype = build_pointer_type (void_type_node);
2675 fntype = build_function_type (fntype, NULL_TREE);
2676 fn = build_decl (FUNCTION_DECL, fn, fntype);
2677 ggc_add_tree_root (&fn, 1);
2678 DECL_EXTERNAL (fn) = 1;
2679 TREE_PUBLIC (fn) = 1;
2680 DECL_ARTIFICIAL (fn) = 1;
2681 TREE_NOTHROW (fn) = 1;
2682 make_decl_rtl (fn, NULL);
2683 assemble_external (fn);
2686 /* We need to make an argument list for the function call.
2688 memset has three arguments, the first is a void * addresses, the
2689 second an integer with the initialization value, the last is a
2690 size_t byte count for the copy. */
2691 arg_list
2692 = build_tree_list (NULL_TREE,
2693 make_tree (build_pointer_type (void_type_node),
2694 object));
2695 TREE_CHAIN (arg_list)
2696 = build_tree_list (NULL_TREE,
2697 make_tree (integer_type_node, const0_rtx));
2698 TREE_CHAIN (TREE_CHAIN (arg_list))
2699 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2700 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2702 /* Now we have to build up the CALL_EXPR itself. */
2703 call_expr = build1 (ADDR_EXPR,
2704 build_pointer_type (TREE_TYPE (fn)), fn);
2705 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2706 call_expr, arg_list, NULL_TREE);
2707 TREE_SIDE_EFFECTS (call_expr) = 1;
2709 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2710 #else
2711 emit_library_call (bzero_libfunc, LCT_NORMAL,
2712 VOIDmode, 2, object, Pmode, size,
2713 TYPE_MODE (integer_type_node));
2714 #endif
2716 /* If we are initializing a readonly value, show the above call
2717 clobbered it. Otherwise, a load from it may erroneously be
2718 hoisted from a loop. */
2719 if (RTX_UNCHANGING_P (object))
2720 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
2724 return retval;
2727 /* Generate code to copy Y into X.
2728 Both Y and X must have the same mode, except that
2729 Y can be a constant with VOIDmode.
2730 This mode cannot be BLKmode; use emit_block_move for that.
2732 Return the last instruction emitted. */
2735 emit_move_insn (x, y)
2736 rtx x, y;
2738 enum machine_mode mode = GET_MODE (x);
2739 rtx y_cst = NULL_RTX;
2740 rtx last_insn;
2742 x = protect_from_queue (x, 1);
2743 y = protect_from_queue (y, 0);
2745 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2746 abort ();
2748 /* Never force constant_p_rtx to memory. */
2749 if (GET_CODE (y) == CONSTANT_P_RTX)
2751 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2753 y_cst = y;
2754 y = force_const_mem (mode, y);
2757 /* If X or Y are memory references, verify that their addresses are valid
2758 for the machine. */
2759 if (GET_CODE (x) == MEM
2760 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2761 && ! push_operand (x, GET_MODE (x)))
2762 || (flag_force_addr
2763 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2764 x = validize_mem (x);
2766 if (GET_CODE (y) == MEM
2767 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2768 || (flag_force_addr
2769 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2770 y = validize_mem (y);
2772 if (mode == BLKmode)
2773 abort ();
2775 last_insn = emit_move_insn_1 (x, y);
2777 if (y_cst && GET_CODE (x) == REG)
2778 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
2780 return last_insn;
2783 /* Low level part of emit_move_insn.
2784 Called just like emit_move_insn, but assumes X and Y
2785 are basically valid. */
2788 emit_move_insn_1 (x, y)
2789 rtx x, y;
2791 enum machine_mode mode = GET_MODE (x);
2792 enum machine_mode submode;
2793 enum mode_class class = GET_MODE_CLASS (mode);
2794 unsigned int i;
2796 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2797 abort ();
2799 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2800 return
2801 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2803 /* Expand complex moves by moving real part and imag part, if possible. */
2804 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2805 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2806 * BITS_PER_UNIT),
2807 (class == MODE_COMPLEX_INT
2808 ? MODE_INT : MODE_FLOAT),
2810 && (mov_optab->handlers[(int) submode].insn_code
2811 != CODE_FOR_nothing))
2813 /* Don't split destination if it is a stack push. */
2814 int stack = push_operand (x, GET_MODE (x));
2816 #ifdef PUSH_ROUNDING
2817 /* In case we output to the stack, but the size is smaller machine can
2818 push exactly, we need to use move instructions. */
2819 if (stack
2820 && PUSH_ROUNDING (GET_MODE_SIZE (submode)) != GET_MODE_SIZE (submode))
2822 rtx temp;
2823 int offset1, offset2;
2825 /* Do not use anti_adjust_stack, since we don't want to update
2826 stack_pointer_delta. */
2827 temp = expand_binop (Pmode,
2828 #ifdef STACK_GROWS_DOWNWARD
2829 sub_optab,
2830 #else
2831 add_optab,
2832 #endif
2833 stack_pointer_rtx,
2834 GEN_INT
2835 (PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))),
2836 stack_pointer_rtx,
2838 OPTAB_LIB_WIDEN);
2839 if (temp != stack_pointer_rtx)
2840 emit_move_insn (stack_pointer_rtx, temp);
2841 #ifdef STACK_GROWS_DOWNWARD
2842 offset1 = 0;
2843 offset2 = GET_MODE_SIZE (submode);
2844 #else
2845 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2846 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2847 + GET_MODE_SIZE (submode));
2848 #endif
2849 emit_move_insn (change_address (x, submode,
2850 gen_rtx_PLUS (Pmode,
2851 stack_pointer_rtx,
2852 GEN_INT (offset1))),
2853 gen_realpart (submode, y));
2854 emit_move_insn (change_address (x, submode,
2855 gen_rtx_PLUS (Pmode,
2856 stack_pointer_rtx,
2857 GEN_INT (offset2))),
2858 gen_imagpart (submode, y));
2860 else
2861 #endif
2862 /* If this is a stack, push the highpart first, so it
2863 will be in the argument order.
2865 In that case, change_address is used only to convert
2866 the mode, not to change the address. */
2867 if (stack)
2869 /* Note that the real part always precedes the imag part in memory
2870 regardless of machine's endianness. */
2871 #ifdef STACK_GROWS_DOWNWARD
2872 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2873 (gen_rtx_MEM (submode, XEXP (x, 0)),
2874 gen_imagpart (submode, y)));
2875 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2876 (gen_rtx_MEM (submode, XEXP (x, 0)),
2877 gen_realpart (submode, y)));
2878 #else
2879 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2880 (gen_rtx_MEM (submode, XEXP (x, 0)),
2881 gen_realpart (submode, y)));
2882 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2883 (gen_rtx_MEM (submode, XEXP (x, 0)),
2884 gen_imagpart (submode, y)));
2885 #endif
2887 else
2889 rtx realpart_x, realpart_y;
2890 rtx imagpart_x, imagpart_y;
2892 /* If this is a complex value with each part being smaller than a
2893 word, the usual calling sequence will likely pack the pieces into
2894 a single register. Unfortunately, SUBREG of hard registers only
2895 deals in terms of words, so we have a problem converting input
2896 arguments to the CONCAT of two registers that is used elsewhere
2897 for complex values. If this is before reload, we can copy it into
2898 memory and reload. FIXME, we should see about using extract and
2899 insert on integer registers, but complex short and complex char
2900 variables should be rarely used. */
2901 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2902 && (reload_in_progress | reload_completed) == 0)
2904 int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2905 int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2907 if (packed_dest_p || packed_src_p)
2909 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2910 ? MODE_FLOAT : MODE_INT);
2912 enum machine_mode reg_mode
2913 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2915 if (reg_mode != BLKmode)
2917 rtx mem = assign_stack_temp (reg_mode,
2918 GET_MODE_SIZE (mode), 0);
2919 rtx cmem = adjust_address (mem, mode, 0);
2921 cfun->cannot_inline
2922 = N_("function using short complex types cannot be inline");
2924 if (packed_dest_p)
2926 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2927 emit_move_insn_1 (cmem, y);
2928 return emit_move_insn_1 (sreg, mem);
2930 else
2932 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2933 emit_move_insn_1 (mem, sreg);
2934 return emit_move_insn_1 (x, cmem);
2940 realpart_x = gen_realpart (submode, x);
2941 realpart_y = gen_realpart (submode, y);
2942 imagpart_x = gen_imagpart (submode, x);
2943 imagpart_y = gen_imagpart (submode, y);
2945 /* Show the output dies here. This is necessary for SUBREGs
2946 of pseudos since we cannot track their lifetimes correctly;
2947 hard regs shouldn't appear here except as return values.
2948 We never want to emit such a clobber after reload. */
2949 if (x != y
2950 && ! (reload_in_progress || reload_completed)
2951 && (GET_CODE (realpart_x) == SUBREG
2952 || GET_CODE (imagpart_x) == SUBREG))
2954 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2957 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2958 (realpart_x, realpart_y));
2959 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2960 (imagpart_x, imagpart_y));
2963 return get_last_insn ();
2966 /* This will handle any multi-word mode that lacks a move_insn pattern.
2967 However, you will get better code if you define such patterns,
2968 even if they must turn into multiple assembler instructions. */
2969 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2971 rtx last_insn = 0;
2972 rtx seq, inner;
2973 int need_clobber;
2975 #ifdef PUSH_ROUNDING
2977 /* If X is a push on the stack, do the push now and replace
2978 X with a reference to the stack pointer. */
2979 if (push_operand (x, GET_MODE (x)))
2981 rtx temp;
2982 enum rtx_code code;
2984 /* Do not use anti_adjust_stack, since we don't want to update
2985 stack_pointer_delta. */
2986 temp = expand_binop (Pmode,
2987 #ifdef STACK_GROWS_DOWNWARD
2988 sub_optab,
2989 #else
2990 add_optab,
2991 #endif
2992 stack_pointer_rtx,
2993 GEN_INT
2994 (PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))),
2995 stack_pointer_rtx,
2997 OPTAB_LIB_WIDEN);
2998 if (temp != stack_pointer_rtx)
2999 emit_move_insn (stack_pointer_rtx, temp);
3001 code = GET_CODE (XEXP (x, 0));
3002 /* Just hope that small offsets off SP are OK. */
3003 if (code == POST_INC)
3004 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3005 GEN_INT (-(HOST_WIDE_INT)
3006 GET_MODE_SIZE (GET_MODE (x))));
3007 else if (code == POST_DEC)
3008 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3009 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3010 else
3011 temp = stack_pointer_rtx;
3013 x = change_address (x, VOIDmode, temp);
3015 #endif
3017 /* If we are in reload, see if either operand is a MEM whose address
3018 is scheduled for replacement. */
3019 if (reload_in_progress && GET_CODE (x) == MEM
3020 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3021 x = replace_equiv_address_nv (x, inner);
3022 if (reload_in_progress && GET_CODE (y) == MEM
3023 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3024 y = replace_equiv_address_nv (y, inner);
3026 start_sequence ();
3028 need_clobber = 0;
3029 for (i = 0;
3030 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3031 i++)
3033 rtx xpart = operand_subword (x, i, 1, mode);
3034 rtx ypart = operand_subword (y, i, 1, mode);
3036 /* If we can't get a part of Y, put Y into memory if it is a
3037 constant. Otherwise, force it into a register. If we still
3038 can't get a part of Y, abort. */
3039 if (ypart == 0 && CONSTANT_P (y))
3041 y = force_const_mem (mode, y);
3042 ypart = operand_subword (y, i, 1, mode);
3044 else if (ypart == 0)
3045 ypart = operand_subword_force (y, i, mode);
3047 if (xpart == 0 || ypart == 0)
3048 abort ();
3050 need_clobber |= (GET_CODE (xpart) == SUBREG);
3052 last_insn = emit_move_insn (xpart, ypart);
3055 seq = gen_sequence ();
3056 end_sequence ();
3058 /* Show the output dies here. This is necessary for SUBREGs
3059 of pseudos since we cannot track their lifetimes correctly;
3060 hard regs shouldn't appear here except as return values.
3061 We never want to emit such a clobber after reload. */
3062 if (x != y
3063 && ! (reload_in_progress || reload_completed)
3064 && need_clobber != 0)
3066 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3069 emit_insn (seq);
3071 return last_insn;
3073 else
3074 abort ();
3077 /* Pushing data onto the stack. */
3079 /* Push a block of length SIZE (perhaps variable)
3080 and return an rtx to address the beginning of the block.
3081 Note that it is not possible for the value returned to be a QUEUED.
3082 The value may be virtual_outgoing_args_rtx.
3084 EXTRA is the number of bytes of padding to push in addition to SIZE.
3085 BELOW nonzero means this padding comes at low addresses;
3086 otherwise, the padding comes at high addresses. */
3089 push_block (size, extra, below)
3090 rtx size;
3091 int extra, below;
3093 rtx temp;
3095 size = convert_modes (Pmode, ptr_mode, size, 1);
3096 if (CONSTANT_P (size))
3097 anti_adjust_stack (plus_constant (size, extra));
3098 else if (GET_CODE (size) == REG && extra == 0)
3099 anti_adjust_stack (size);
3100 else
3102 temp = copy_to_mode_reg (Pmode, size);
3103 if (extra != 0)
3104 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3105 temp, 0, OPTAB_LIB_WIDEN);
3106 anti_adjust_stack (temp);
3109 #ifndef STACK_GROWS_DOWNWARD
3110 if (0)
3111 #else
3112 if (1)
3113 #endif
3115 temp = virtual_outgoing_args_rtx;
3116 if (extra != 0 && below)
3117 temp = plus_constant (temp, extra);
3119 else
3121 if (GET_CODE (size) == CONST_INT)
3122 temp = plus_constant (virtual_outgoing_args_rtx,
3123 -INTVAL (size) - (below ? 0 : extra));
3124 else if (extra != 0 && !below)
3125 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3126 negate_rtx (Pmode, plus_constant (size, extra)));
3127 else
3128 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3129 negate_rtx (Pmode, size));
3132 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3135 #ifdef PUSH_ROUNDING
3137 /* Emit single push insn. */
3139 static void
3140 emit_single_push_insn (mode, x, type)
3141 rtx x;
3142 enum machine_mode mode;
3143 tree type;
3145 rtx dest_addr;
3146 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3147 rtx dest;
3148 enum insn_code icode;
3149 insn_operand_predicate_fn pred;
3151 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3152 /* If there is push pattern, use it. Otherwise try old way of throwing
3153 MEM representing push operation to move expander. */
3154 icode = push_optab->handlers[(int) mode].insn_code;
3155 if (icode != CODE_FOR_nothing)
3157 if (((pred = insn_data[(int) icode].operand[0].predicate)
3158 && !((*pred) (x, mode))))
3159 x = force_reg (mode, x);
3160 emit_insn (GEN_FCN (icode) (x));
3161 return;
3163 if (GET_MODE_SIZE (mode) == rounded_size)
3164 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3165 else
3167 #ifdef STACK_GROWS_DOWNWARD
3168 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3169 GEN_INT (-(HOST_WIDE_INT)rounded_size));
3170 #else
3171 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3172 GEN_INT (rounded_size));
3173 #endif
3174 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3177 dest = gen_rtx_MEM (mode, dest_addr);
3179 if (type != 0)
3181 set_mem_attributes (dest, type, 1);
3183 if (flag_optimize_sibling_calls)
3184 /* Function incoming arguments may overlap with sibling call
3185 outgoing arguments and we cannot allow reordering of reads
3186 from function arguments with stores to outgoing arguments
3187 of sibling calls. */
3188 set_mem_alias_set (dest, 0);
3190 emit_move_insn (dest, x);
3192 #endif
3194 /* Generate code to push X onto the stack, assuming it has mode MODE and
3195 type TYPE.
3196 MODE is redundant except when X is a CONST_INT (since they don't
3197 carry mode info).
3198 SIZE is an rtx for the size of data to be copied (in bytes),
3199 needed only if X is BLKmode.
3201 ALIGN (in bits) is maximum alignment we can assume.
3203 If PARTIAL and REG are both nonzero, then copy that many of the first
3204 words of X into registers starting with REG, and push the rest of X.
3205 The amount of space pushed is decreased by PARTIAL words,
3206 rounded *down* to a multiple of PARM_BOUNDARY.
3207 REG must be a hard register in this case.
3208 If REG is zero but PARTIAL is not, take any all others actions for an
3209 argument partially in registers, but do not actually load any
3210 registers.
3212 EXTRA is the amount in bytes of extra space to leave next to this arg.
3213 This is ignored if an argument block has already been allocated.
3215 On a machine that lacks real push insns, ARGS_ADDR is the address of
3216 the bottom of the argument block for this call. We use indexing off there
3217 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3218 argument block has not been preallocated.
3220 ARGS_SO_FAR is the size of args previously pushed for this call.
3222 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3223 for arguments passed in registers. If nonzero, it will be the number
3224 of bytes required. */
3226 void
3227 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3228 args_addr, args_so_far, reg_parm_stack_space,
3229 alignment_pad)
3230 rtx x;
3231 enum machine_mode mode;
3232 tree type;
3233 rtx size;
3234 unsigned int align;
3235 int partial;
3236 rtx reg;
3237 int extra;
3238 rtx args_addr;
3239 rtx args_so_far;
3240 int reg_parm_stack_space;
3241 rtx alignment_pad;
3243 rtx xinner;
3244 enum direction stack_direction
3245 #ifdef STACK_GROWS_DOWNWARD
3246 = downward;
3247 #else
3248 = upward;
3249 #endif
3251 /* Decide where to pad the argument: `downward' for below,
3252 `upward' for above, or `none' for don't pad it.
3253 Default is below for small data on big-endian machines; else above. */
3254 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3256 /* Invert direction if stack is post-decrement.
3257 FIXME: why? */
3258 if (STACK_PUSH_CODE == POST_DEC)
3259 if (where_pad != none)
3260 where_pad = (where_pad == downward ? upward : downward);
3262 xinner = x = protect_from_queue (x, 0);
3264 if (mode == BLKmode)
3266 /* Copy a block into the stack, entirely or partially. */
3268 rtx temp;
3269 int used = partial * UNITS_PER_WORD;
3270 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3271 int skip;
3273 if (size == 0)
3274 abort ();
3276 used -= offset;
3278 /* USED is now the # of bytes we need not copy to the stack
3279 because registers will take care of them. */
3281 if (partial != 0)
3282 xinner = adjust_address (xinner, BLKmode, used);
3284 /* If the partial register-part of the arg counts in its stack size,
3285 skip the part of stack space corresponding to the registers.
3286 Otherwise, start copying to the beginning of the stack space,
3287 by setting SKIP to 0. */
3288 skip = (reg_parm_stack_space == 0) ? 0 : used;
3290 #ifdef PUSH_ROUNDING
3291 /* Do it with several push insns if that doesn't take lots of insns
3292 and if there is no difficulty with push insns that skip bytes
3293 on the stack for alignment purposes. */
3294 if (args_addr == 0
3295 && PUSH_ARGS
3296 && GET_CODE (size) == CONST_INT
3297 && skip == 0
3298 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3299 /* Here we avoid the case of a structure whose weak alignment
3300 forces many pushes of a small amount of data,
3301 and such small pushes do rounding that causes trouble. */
3302 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3303 || align >= BIGGEST_ALIGNMENT
3304 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3305 == (align / BITS_PER_UNIT)))
3306 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3308 /* Push padding now if padding above and stack grows down,
3309 or if padding below and stack grows up.
3310 But if space already allocated, this has already been done. */
3311 if (extra && args_addr == 0
3312 && where_pad != none && where_pad != stack_direction)
3313 anti_adjust_stack (GEN_INT (extra));
3315 move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
3317 else
3318 #endif /* PUSH_ROUNDING */
3320 rtx target;
3322 /* Otherwise make space on the stack and copy the data
3323 to the address of that space. */
3325 /* Deduct words put into registers from the size we must copy. */
3326 if (partial != 0)
3328 if (GET_CODE (size) == CONST_INT)
3329 size = GEN_INT (INTVAL (size) - used);
3330 else
3331 size = expand_binop (GET_MODE (size), sub_optab, size,
3332 GEN_INT (used), NULL_RTX, 0,
3333 OPTAB_LIB_WIDEN);
3336 /* Get the address of the stack space.
3337 In this case, we do not deal with EXTRA separately.
3338 A single stack adjust will do. */
3339 if (! args_addr)
3341 temp = push_block (size, extra, where_pad == downward);
3342 extra = 0;
3344 else if (GET_CODE (args_so_far) == CONST_INT)
3345 temp = memory_address (BLKmode,
3346 plus_constant (args_addr,
3347 skip + INTVAL (args_so_far)));
3348 else
3349 temp = memory_address (BLKmode,
3350 plus_constant (gen_rtx_PLUS (Pmode,
3351 args_addr,
3352 args_so_far),
3353 skip));
3354 target = gen_rtx_MEM (BLKmode, temp);
3356 if (type != 0)
3358 set_mem_attributes (target, type, 1);
3359 /* Function incoming arguments may overlap with sibling call
3360 outgoing arguments and we cannot allow reordering of reads
3361 from function arguments with stores to outgoing arguments
3362 of sibling calls. */
3363 set_mem_alias_set (target, 0);
3365 else
3366 set_mem_align (target, align);
3368 /* TEMP is the address of the block. Copy the data there. */
3369 if (GET_CODE (size) == CONST_INT
3370 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3372 move_by_pieces (target, xinner, INTVAL (size), align);
3373 goto ret;
3375 else
3377 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3378 enum machine_mode mode;
3380 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3381 mode != VOIDmode;
3382 mode = GET_MODE_WIDER_MODE (mode))
3384 enum insn_code code = movstr_optab[(int) mode];
3385 insn_operand_predicate_fn pred;
3387 if (code != CODE_FOR_nothing
3388 && ((GET_CODE (size) == CONST_INT
3389 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3390 <= (GET_MODE_MASK (mode) >> 1)))
3391 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3392 && (!(pred = insn_data[(int) code].operand[0].predicate)
3393 || ((*pred) (target, BLKmode)))
3394 && (!(pred = insn_data[(int) code].operand[1].predicate)
3395 || ((*pred) (xinner, BLKmode)))
3396 && (!(pred = insn_data[(int) code].operand[3].predicate)
3397 || ((*pred) (opalign, VOIDmode))))
3399 rtx op2 = convert_to_mode (mode, size, 1);
3400 rtx last = get_last_insn ();
3401 rtx pat;
3403 pred = insn_data[(int) code].operand[2].predicate;
3404 if (pred != 0 && ! (*pred) (op2, mode))
3405 op2 = copy_to_mode_reg (mode, op2);
3407 pat = GEN_FCN ((int) code) (target, xinner,
3408 op2, opalign);
3409 if (pat)
3411 emit_insn (pat);
3412 goto ret;
3414 else
3415 delete_insns_since (last);
3420 if (!ACCUMULATE_OUTGOING_ARGS)
3422 /* If the source is referenced relative to the stack pointer,
3423 copy it to another register to stabilize it. We do not need
3424 to do this if we know that we won't be changing sp. */
3426 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3427 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3428 temp = copy_to_reg (temp);
3431 /* Make inhibit_defer_pop nonzero around the library call
3432 to force it to pop the bcopy-arguments right away. */
3433 NO_DEFER_POP;
3434 #ifdef TARGET_MEM_FUNCTIONS
3435 emit_library_call (memcpy_libfunc, LCT_NORMAL,
3436 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3437 convert_to_mode (TYPE_MODE (sizetype),
3438 size, TREE_UNSIGNED (sizetype)),
3439 TYPE_MODE (sizetype));
3440 #else
3441 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3442 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3443 convert_to_mode (TYPE_MODE (integer_type_node),
3444 size,
3445 TREE_UNSIGNED (integer_type_node)),
3446 TYPE_MODE (integer_type_node));
3447 #endif
3448 OK_DEFER_POP;
3451 else if (partial > 0)
3453 /* Scalar partly in registers. */
3455 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3456 int i;
3457 int not_stack;
3458 /* # words of start of argument
3459 that we must make space for but need not store. */
3460 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3461 int args_offset = INTVAL (args_so_far);
3462 int skip;
3464 /* Push padding now if padding above and stack grows down,
3465 or if padding below and stack grows up.
3466 But if space already allocated, this has already been done. */
3467 if (extra && args_addr == 0
3468 && where_pad != none && where_pad != stack_direction)
3469 anti_adjust_stack (GEN_INT (extra));
3471 /* If we make space by pushing it, we might as well push
3472 the real data. Otherwise, we can leave OFFSET nonzero
3473 and leave the space uninitialized. */
3474 if (args_addr == 0)
3475 offset = 0;
3477 /* Now NOT_STACK gets the number of words that we don't need to
3478 allocate on the stack. */
3479 not_stack = partial - offset;
3481 /* If the partial register-part of the arg counts in its stack size,
3482 skip the part of stack space corresponding to the registers.
3483 Otherwise, start copying to the beginning of the stack space,
3484 by setting SKIP to 0. */
3485 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3487 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3488 x = validize_mem (force_const_mem (mode, x));
3490 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3491 SUBREGs of such registers are not allowed. */
3492 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3493 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3494 x = copy_to_reg (x);
3496 /* Loop over all the words allocated on the stack for this arg. */
3497 /* We can do it by words, because any scalar bigger than a word
3498 has a size a multiple of a word. */
3499 #ifndef PUSH_ARGS_REVERSED
3500 for (i = not_stack; i < size; i++)
3501 #else
3502 for (i = size - 1; i >= not_stack; i--)
3503 #endif
3504 if (i >= not_stack + offset)
3505 emit_push_insn (operand_subword_force (x, i, mode),
3506 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3507 0, args_addr,
3508 GEN_INT (args_offset + ((i - not_stack + skip)
3509 * UNITS_PER_WORD)),
3510 reg_parm_stack_space, alignment_pad);
3512 else
3514 rtx addr;
3515 rtx target = NULL_RTX;
3516 rtx dest;
3518 /* Push padding now if padding above and stack grows down,
3519 or if padding below and stack grows up.
3520 But if space already allocated, this has already been done. */
3521 if (extra && args_addr == 0
3522 && where_pad != none && where_pad != stack_direction)
3523 anti_adjust_stack (GEN_INT (extra));
3525 #ifdef PUSH_ROUNDING
3526 if (args_addr == 0 && PUSH_ARGS)
3527 emit_single_push_insn (mode, x, type);
3528 else
3529 #endif
3531 if (GET_CODE (args_so_far) == CONST_INT)
3532 addr
3533 = memory_address (mode,
3534 plus_constant (args_addr,
3535 INTVAL (args_so_far)));
3536 else
3537 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3538 args_so_far));
3539 target = addr;
3540 dest = gen_rtx_MEM (mode, addr);
3541 if (type != 0)
3543 set_mem_attributes (dest, type, 1);
3544 /* Function incoming arguments may overlap with sibling call
3545 outgoing arguments and we cannot allow reordering of reads
3546 from function arguments with stores to outgoing arguments
3547 of sibling calls. */
3548 set_mem_alias_set (dest, 0);
3551 emit_move_insn (dest, x);
3556 ret:
3557 /* If part should go in registers, copy that part
3558 into the appropriate registers. Do this now, at the end,
3559 since mem-to-mem copies above may do function calls. */
3560 if (partial > 0 && reg != 0)
3562 /* Handle calls that pass values in multiple non-contiguous locations.
3563 The Irix 6 ABI has examples of this. */
3564 if (GET_CODE (reg) == PARALLEL)
3565 emit_group_load (reg, x, -1); /* ??? size? */
3566 else
3567 move_block_to_reg (REGNO (reg), x, partial, mode);
3570 if (extra && args_addr == 0 && where_pad == stack_direction)
3571 anti_adjust_stack (GEN_INT (extra));
3573 if (alignment_pad && args_addr == 0)
3574 anti_adjust_stack (alignment_pad);
3577 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3578 operations. */
3580 static rtx
3581 get_subtarget (x)
3582 rtx x;
3584 return ((x == 0
3585 /* Only registers can be subtargets. */
3586 || GET_CODE (x) != REG
3587 /* If the register is readonly, it can't be set more than once. */
3588 || RTX_UNCHANGING_P (x)
3589 /* Don't use hard regs to avoid extending their life. */
3590 || REGNO (x) < FIRST_PSEUDO_REGISTER
3591 /* Avoid subtargets inside loops,
3592 since they hide some invariant expressions. */
3593 || preserve_subexpressions_p ())
3594 ? 0 : x);
3597 /* Expand an assignment that stores the value of FROM into TO.
3598 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3599 (This may contain a QUEUED rtx;
3600 if the value is constant, this rtx is a constant.)
3601 Otherwise, the returned value is NULL_RTX.
3603 SUGGEST_REG is no longer actually used.
3604 It used to mean, copy the value through a register
3605 and return that register, if that is possible.
3606 We now use WANT_VALUE to decide whether to do this. */
3609 expand_assignment (to, from, want_value, suggest_reg)
3610 tree to, from;
3611 int want_value;
3612 int suggest_reg ATTRIBUTE_UNUSED;
3614 rtx to_rtx = 0;
3615 rtx result;
3617 /* Don't crash if the lhs of the assignment was erroneous. */
3619 if (TREE_CODE (to) == ERROR_MARK)
3621 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3622 return want_value ? result : NULL_RTX;
3625 /* Assignment of a structure component needs special treatment
3626 if the structure component's rtx is not simply a MEM.
3627 Assignment of an array element at a constant index, and assignment of
3628 an array element in an unaligned packed structure field, has the same
3629 problem. */
3631 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3632 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF)
3634 enum machine_mode mode1;
3635 HOST_WIDE_INT bitsize, bitpos;
3636 rtx orig_to_rtx;
3637 tree offset;
3638 int unsignedp;
3639 int volatilep = 0;
3640 tree tem;
3642 push_temp_slots ();
3643 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3644 &unsignedp, &volatilep);
3646 /* If we are going to use store_bit_field and extract_bit_field,
3647 make sure to_rtx will be safe for multiple use. */
3649 if (mode1 == VOIDmode && want_value)
3650 tem = stabilize_reference (tem);
3652 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3654 if (offset != 0)
3656 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3658 if (GET_CODE (to_rtx) != MEM)
3659 abort ();
3661 if (GET_MODE (offset_rtx) != ptr_mode)
3662 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3664 #ifdef POINTERS_EXTEND_UNSIGNED
3665 if (GET_MODE (offset_rtx) != Pmode)
3666 offset_rtx = convert_memory_address (Pmode, offset_rtx);
3667 #endif
3669 /* A constant address in TO_RTX can have VOIDmode, we must not try
3670 to call force_reg for that case. Avoid that case. */
3671 if (GET_CODE (to_rtx) == MEM
3672 && GET_MODE (to_rtx) == BLKmode
3673 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3674 && bitsize > 0
3675 && (bitpos % bitsize) == 0
3676 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3677 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3679 rtx temp
3680 = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3682 if (GET_CODE (XEXP (temp, 0)) == REG)
3683 to_rtx = temp;
3684 else
3685 to_rtx = (replace_equiv_address
3686 (to_rtx, force_reg (GET_MODE (XEXP (temp, 0)),
3687 XEXP (temp, 0))));
3688 bitpos = 0;
3691 to_rtx = offset_address (to_rtx, offset_rtx,
3692 highest_pow2_factor (offset));
3695 if (GET_CODE (to_rtx) == MEM)
3697 tree old_expr = MEM_EXPR (to_rtx);
3699 /* If the field is at offset zero, we could have been given the
3700 DECL_RTX of the parent struct. Don't munge it. */
3701 to_rtx = shallow_copy_rtx (to_rtx);
3703 set_mem_attributes (to_rtx, to, 0);
3705 /* If we changed MEM_EXPR, that means we're now referencing
3706 the COMPONENT_REF, which means that MEM_OFFSET must be
3707 relative to that field. But we've not yet reflected BITPOS
3708 in TO_RTX. This will be done in store_field. Adjust for
3709 that by biasing MEM_OFFSET by -bitpos. */
3710 if (MEM_EXPR (to_rtx) != old_expr && MEM_OFFSET (to_rtx)
3711 && (bitpos / BITS_PER_UNIT) != 0)
3712 set_mem_offset (to_rtx, GEN_INT (INTVAL (MEM_OFFSET (to_rtx))
3713 - (bitpos / BITS_PER_UNIT)));
3716 /* Deal with volatile and readonly fields. The former is only done
3717 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3718 if (volatilep && GET_CODE (to_rtx) == MEM)
3720 if (to_rtx == orig_to_rtx)
3721 to_rtx = copy_rtx (to_rtx);
3722 MEM_VOLATILE_P (to_rtx) = 1;
3725 if (TREE_CODE (to) == COMPONENT_REF
3726 && TREE_READONLY (TREE_OPERAND (to, 1)))
3728 if (to_rtx == orig_to_rtx)
3729 to_rtx = copy_rtx (to_rtx);
3730 RTX_UNCHANGING_P (to_rtx) = 1;
3733 if (! can_address_p (to))
3735 if (to_rtx == orig_to_rtx)
3736 to_rtx = copy_rtx (to_rtx);
3737 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3740 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3741 (want_value
3742 /* Spurious cast for HPUX compiler. */
3743 ? ((enum machine_mode)
3744 TYPE_MODE (TREE_TYPE (to)))
3745 : VOIDmode),
3746 unsignedp, TREE_TYPE (tem), get_alias_set (to));
3748 preserve_temp_slots (result);
3749 free_temp_slots ();
3750 pop_temp_slots ();
3752 /* If the value is meaningful, convert RESULT to the proper mode.
3753 Otherwise, return nothing. */
3754 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3755 TYPE_MODE (TREE_TYPE (from)),
3756 result,
3757 TREE_UNSIGNED (TREE_TYPE (to)))
3758 : NULL_RTX);
3761 /* If the rhs is a function call and its value is not an aggregate,
3762 call the function before we start to compute the lhs.
3763 This is needed for correct code for cases such as
3764 val = setjmp (buf) on machines where reference to val
3765 requires loading up part of an address in a separate insn.
3767 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3768 since it might be a promoted variable where the zero- or sign- extension
3769 needs to be done. Handling this in the normal way is safe because no
3770 computation is done before the call. */
3771 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3772 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3773 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3774 && GET_CODE (DECL_RTL (to)) == REG))
3776 rtx value;
3778 push_temp_slots ();
3779 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3780 if (to_rtx == 0)
3781 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3783 /* Handle calls that return values in multiple non-contiguous locations.
3784 The Irix 6 ABI has examples of this. */
3785 if (GET_CODE (to_rtx) == PARALLEL)
3786 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)));
3787 else if (GET_MODE (to_rtx) == BLKmode)
3788 emit_block_move (to_rtx, value, expr_size (from));
3789 else
3791 #ifdef POINTERS_EXTEND_UNSIGNED
3792 if (POINTER_TYPE_P (TREE_TYPE (to))
3793 && GET_MODE (to_rtx) != GET_MODE (value))
3794 value = convert_memory_address (GET_MODE (to_rtx), value);
3795 #endif
3796 emit_move_insn (to_rtx, value);
3798 preserve_temp_slots (to_rtx);
3799 free_temp_slots ();
3800 pop_temp_slots ();
3801 return want_value ? to_rtx : NULL_RTX;
3804 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3805 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3807 if (to_rtx == 0)
3808 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3810 /* Don't move directly into a return register. */
3811 if (TREE_CODE (to) == RESULT_DECL
3812 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3814 rtx temp;
3816 push_temp_slots ();
3817 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3819 if (GET_CODE (to_rtx) == PARALLEL)
3820 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)));
3821 else
3822 emit_move_insn (to_rtx, temp);
3824 preserve_temp_slots (to_rtx);
3825 free_temp_slots ();
3826 pop_temp_slots ();
3827 return want_value ? to_rtx : NULL_RTX;
3830 /* In case we are returning the contents of an object which overlaps
3831 the place the value is being stored, use a safe function when copying
3832 a value through a pointer into a structure value return block. */
3833 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3834 && current_function_returns_struct
3835 && !current_function_returns_pcc_struct)
3837 rtx from_rtx, size;
3839 push_temp_slots ();
3840 size = expr_size (from);
3841 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
3843 #ifdef TARGET_MEM_FUNCTIONS
3844 emit_library_call (memmove_libfunc, LCT_NORMAL,
3845 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3846 XEXP (from_rtx, 0), Pmode,
3847 convert_to_mode (TYPE_MODE (sizetype),
3848 size, TREE_UNSIGNED (sizetype)),
3849 TYPE_MODE (sizetype));
3850 #else
3851 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3852 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3853 XEXP (to_rtx, 0), Pmode,
3854 convert_to_mode (TYPE_MODE (integer_type_node),
3855 size, TREE_UNSIGNED (integer_type_node)),
3856 TYPE_MODE (integer_type_node));
3857 #endif
3859 preserve_temp_slots (to_rtx);
3860 free_temp_slots ();
3861 pop_temp_slots ();
3862 return want_value ? to_rtx : NULL_RTX;
3865 /* Compute FROM and store the value in the rtx we got. */
3867 push_temp_slots ();
3868 result = store_expr (from, to_rtx, want_value);
3869 preserve_temp_slots (result);
3870 free_temp_slots ();
3871 pop_temp_slots ();
3872 return want_value ? result : NULL_RTX;
3875 /* Generate code for computing expression EXP,
3876 and storing the value into TARGET.
3877 TARGET may contain a QUEUED rtx.
3879 If WANT_VALUE is nonzero, return a copy of the value
3880 not in TARGET, so that we can be sure to use the proper
3881 value in a containing expression even if TARGET has something
3882 else stored in it. If possible, we copy the value through a pseudo
3883 and return that pseudo. Or, if the value is constant, we try to
3884 return the constant. In some cases, we return a pseudo
3885 copied *from* TARGET.
3887 If the mode is BLKmode then we may return TARGET itself.
3888 It turns out that in BLKmode it doesn't cause a problem.
3889 because C has no operators that could combine two different
3890 assignments into the same BLKmode object with different values
3891 with no sequence point. Will other languages need this to
3892 be more thorough?
3894 If WANT_VALUE is 0, we return NULL, to make sure
3895 to catch quickly any cases where the caller uses the value
3896 and fails to set WANT_VALUE. */
3899 store_expr (exp, target, want_value)
3900 tree exp;
3901 rtx target;
3902 int want_value;
3904 rtx temp;
3905 int dont_return_target = 0;
3906 int dont_store_target = 0;
3908 if (TREE_CODE (exp) == COMPOUND_EXPR)
3910 /* Perform first part of compound expression, then assign from second
3911 part. */
3912 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3913 emit_queue ();
3914 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3916 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3918 /* For conditional expression, get safe form of the target. Then
3919 test the condition, doing the appropriate assignment on either
3920 side. This avoids the creation of unnecessary temporaries.
3921 For non-BLKmode, it is more efficient not to do this. */
3923 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3925 emit_queue ();
3926 target = protect_from_queue (target, 1);
3928 do_pending_stack_adjust ();
3929 NO_DEFER_POP;
3930 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3931 start_cleanup_deferral ();
3932 store_expr (TREE_OPERAND (exp, 1), target, 0);
3933 end_cleanup_deferral ();
3934 emit_queue ();
3935 emit_jump_insn (gen_jump (lab2));
3936 emit_barrier ();
3937 emit_label (lab1);
3938 start_cleanup_deferral ();
3939 store_expr (TREE_OPERAND (exp, 2), target, 0);
3940 end_cleanup_deferral ();
3941 emit_queue ();
3942 emit_label (lab2);
3943 OK_DEFER_POP;
3945 return want_value ? target : NULL_RTX;
3947 else if (queued_subexp_p (target))
3948 /* If target contains a postincrement, let's not risk
3949 using it as the place to generate the rhs. */
3951 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3953 /* Expand EXP into a new pseudo. */
3954 temp = gen_reg_rtx (GET_MODE (target));
3955 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3957 else
3958 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3960 /* If target is volatile, ANSI requires accessing the value
3961 *from* the target, if it is accessed. So make that happen.
3962 In no case return the target itself. */
3963 if (! MEM_VOLATILE_P (target) && want_value)
3964 dont_return_target = 1;
3966 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3967 && GET_MODE (target) != BLKmode)
3968 /* If target is in memory and caller wants value in a register instead,
3969 arrange that. Pass TARGET as target for expand_expr so that,
3970 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3971 We know expand_expr will not use the target in that case.
3972 Don't do this if TARGET is volatile because we are supposed
3973 to write it and then read it. */
3975 temp = expand_expr (exp, target, GET_MODE (target), 0);
3976 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3978 /* If TEMP is already in the desired TARGET, only copy it from
3979 memory and don't store it there again. */
3980 if (temp == target
3981 || (rtx_equal_p (temp, target)
3982 && ! side_effects_p (temp) && ! side_effects_p (target)))
3983 dont_store_target = 1;
3984 temp = copy_to_reg (temp);
3986 dont_return_target = 1;
3988 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3989 /* If this is an scalar in a register that is stored in a wider mode
3990 than the declared mode, compute the result into its declared mode
3991 and then convert to the wider mode. Our value is the computed
3992 expression. */
3994 /* If we don't want a value, we can do the conversion inside EXP,
3995 which will often result in some optimizations. Do the conversion
3996 in two steps: first change the signedness, if needed, then
3997 the extend. But don't do this if the type of EXP is a subtype
3998 of something else since then the conversion might involve
3999 more than just converting modes. */
4000 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4001 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4003 if (TREE_UNSIGNED (TREE_TYPE (exp))
4004 != SUBREG_PROMOTED_UNSIGNED_P (target))
4006 = convert
4007 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
4008 TREE_TYPE (exp)),
4009 exp);
4011 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
4012 SUBREG_PROMOTED_UNSIGNED_P (target)),
4013 exp);
4016 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4018 /* If TEMP is a volatile MEM and we want a result value, make
4019 the access now so it gets done only once. Likewise if
4020 it contains TARGET. */
4021 if (GET_CODE (temp) == MEM && want_value
4022 && (MEM_VOLATILE_P (temp)
4023 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
4024 temp = copy_to_reg (temp);
4026 /* If TEMP is a VOIDmode constant, use convert_modes to make
4027 sure that we properly convert it. */
4028 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4030 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4031 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4032 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4033 GET_MODE (target), temp,
4034 SUBREG_PROMOTED_UNSIGNED_P (target));
4037 convert_move (SUBREG_REG (target), temp,
4038 SUBREG_PROMOTED_UNSIGNED_P (target));
4040 /* If we promoted a constant, change the mode back down to match
4041 target. Otherwise, the caller might get confused by a result whose
4042 mode is larger than expected. */
4044 if (want_value && GET_MODE (temp) != GET_MODE (target)
4045 && GET_MODE (temp) != VOIDmode)
4047 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4048 SUBREG_PROMOTED_VAR_P (temp) = 1;
4049 SUBREG_PROMOTED_UNSIGNED_P (temp)
4050 = SUBREG_PROMOTED_UNSIGNED_P (target);
4053 return want_value ? temp : NULL_RTX;
4055 else
4057 temp = expand_expr (exp, target, GET_MODE (target), 0);
4058 /* Return TARGET if it's a specified hardware register.
4059 If TARGET is a volatile mem ref, either return TARGET
4060 or return a reg copied *from* TARGET; ANSI requires this.
4062 Otherwise, if TEMP is not TARGET, return TEMP
4063 if it is constant (for efficiency),
4064 or if we really want the correct value. */
4065 if (!(target && GET_CODE (target) == REG
4066 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4067 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4068 && ! rtx_equal_p (temp, target)
4069 && (CONSTANT_P (temp) || want_value))
4070 dont_return_target = 1;
4073 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4074 the same as that of TARGET, adjust the constant. This is needed, for
4075 example, in case it is a CONST_DOUBLE and we want only a word-sized
4076 value. */
4077 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4078 && TREE_CODE (exp) != ERROR_MARK
4079 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4080 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4081 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4083 /* If value was not generated in the target, store it there.
4084 Convert the value to TARGET's type first if necessary.
4085 If TEMP and TARGET compare equal according to rtx_equal_p, but
4086 one or both of them are volatile memory refs, we have to distinguish
4087 two cases:
4088 - expand_expr has used TARGET. In this case, we must not generate
4089 another copy. This can be detected by TARGET being equal according
4090 to == .
4091 - expand_expr has not used TARGET - that means that the source just
4092 happens to have the same RTX form. Since temp will have been created
4093 by expand_expr, it will compare unequal according to == .
4094 We must generate a copy in this case, to reach the correct number
4095 of volatile memory references. */
4097 if ((! rtx_equal_p (temp, target)
4098 || (temp != target && (side_effects_p (temp)
4099 || side_effects_p (target))))
4100 && TREE_CODE (exp) != ERROR_MARK
4101 && ! dont_store_target)
4103 target = protect_from_queue (target, 1);
4104 if (GET_MODE (temp) != GET_MODE (target)
4105 && GET_MODE (temp) != VOIDmode)
4107 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4108 if (dont_return_target)
4110 /* In this case, we will return TEMP,
4111 so make sure it has the proper mode.
4112 But don't forget to store the value into TARGET. */
4113 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4114 emit_move_insn (target, temp);
4116 else
4117 convert_move (target, temp, unsignedp);
4120 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4122 /* Handle copying a string constant into an array. The string
4123 constant may be shorter than the array. So copy just the string's
4124 actual length, and clear the rest. First get the size of the data
4125 type of the string, which is actually the size of the target. */
4126 rtx size = expr_size (exp);
4128 if (GET_CODE (size) == CONST_INT
4129 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4130 emit_block_move (target, temp, size);
4131 else
4133 /* Compute the size of the data to copy from the string. */
4134 tree copy_size
4135 = size_binop (MIN_EXPR,
4136 make_tree (sizetype, size),
4137 size_int (TREE_STRING_LENGTH (exp)));
4138 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
4139 VOIDmode, 0);
4140 rtx label = 0;
4142 /* Copy that much. */
4143 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx, 0);
4144 emit_block_move (target, temp, copy_size_rtx);
4146 /* Figure out how much is left in TARGET that we have to clear.
4147 Do all calculations in ptr_mode. */
4148 if (GET_CODE (copy_size_rtx) == CONST_INT)
4150 size = plus_constant (size, -INTVAL (copy_size_rtx));
4151 target = adjust_address (target, BLKmode,
4152 INTVAL (copy_size_rtx));
4154 else
4156 size = expand_binop (ptr_mode, sub_optab, size,
4157 copy_size_rtx, NULL_RTX, 0,
4158 OPTAB_LIB_WIDEN);
4160 #ifdef POINTERS_EXTEND_UNSIGNED
4161 if (GET_MODE (copy_size_rtx) != Pmode)
4162 copy_size_rtx = convert_memory_address (Pmode,
4163 copy_size_rtx);
4164 #endif
4166 target = offset_address (target, copy_size_rtx,
4167 highest_pow2_factor (copy_size));
4168 label = gen_label_rtx ();
4169 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4170 GET_MODE (size), 0, label);
4173 if (size != const0_rtx)
4174 clear_storage (target, size);
4176 if (label)
4177 emit_label (label);
4180 /* Handle calls that return values in multiple non-contiguous locations.
4181 The Irix 6 ABI has examples of this. */
4182 else if (GET_CODE (target) == PARALLEL)
4183 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)));
4184 else if (GET_MODE (temp) == BLKmode)
4185 emit_block_move (target, temp, expr_size (exp));
4186 else
4187 emit_move_insn (target, temp);
4190 /* If we don't want a value, return NULL_RTX. */
4191 if (! want_value)
4192 return NULL_RTX;
4194 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4195 ??? The latter test doesn't seem to make sense. */
4196 else if (dont_return_target && GET_CODE (temp) != MEM)
4197 return temp;
4199 /* Return TARGET itself if it is a hard register. */
4200 else if (want_value && GET_MODE (target) != BLKmode
4201 && ! (GET_CODE (target) == REG
4202 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4203 return copy_to_reg (target);
4205 else
4206 return target;
4209 /* Return 1 if EXP just contains zeros. */
4211 static int
4212 is_zeros_p (exp)
4213 tree exp;
4215 tree elt;
4217 switch (TREE_CODE (exp))
4219 case CONVERT_EXPR:
4220 case NOP_EXPR:
4221 case NON_LVALUE_EXPR:
4222 case VIEW_CONVERT_EXPR:
4223 return is_zeros_p (TREE_OPERAND (exp, 0));
4225 case INTEGER_CST:
4226 return integer_zerop (exp);
4228 case COMPLEX_CST:
4229 return
4230 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4232 case REAL_CST:
4233 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4235 case CONSTRUCTOR:
4236 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4237 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4238 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4239 if (! is_zeros_p (TREE_VALUE (elt)))
4240 return 0;
4242 return 1;
4244 default:
4245 return 0;
4249 /* Return 1 if EXP contains mostly (3/4) zeros. */
4251 static int
4252 mostly_zeros_p (exp)
4253 tree exp;
4255 if (TREE_CODE (exp) == CONSTRUCTOR)
4257 int elts = 0, zeros = 0;
4258 tree elt = CONSTRUCTOR_ELTS (exp);
4259 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4261 /* If there are no ranges of true bits, it is all zero. */
4262 return elt == NULL_TREE;
4264 for (; elt; elt = TREE_CHAIN (elt))
4266 /* We do not handle the case where the index is a RANGE_EXPR,
4267 so the statistic will be somewhat inaccurate.
4268 We do make a more accurate count in store_constructor itself,
4269 so since this function is only used for nested array elements,
4270 this should be close enough. */
4271 if (mostly_zeros_p (TREE_VALUE (elt)))
4272 zeros++;
4273 elts++;
4276 return 4 * zeros >= 3 * elts;
4279 return is_zeros_p (exp);
4282 /* Helper function for store_constructor.
4283 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4284 TYPE is the type of the CONSTRUCTOR, not the element type.
4285 CLEARED is as for store_constructor.
4286 ALIAS_SET is the alias set to use for any stores.
4288 This provides a recursive shortcut back to store_constructor when it isn't
4289 necessary to go through store_field. This is so that we can pass through
4290 the cleared field to let store_constructor know that we may not have to
4291 clear a substructure if the outer structure has already been cleared. */
4293 static void
4294 store_constructor_field (target, bitsize, bitpos, mode, exp, type, cleared,
4295 alias_set)
4296 rtx target;
4297 unsigned HOST_WIDE_INT bitsize;
4298 HOST_WIDE_INT bitpos;
4299 enum machine_mode mode;
4300 tree exp, type;
4301 int cleared;
4302 int alias_set;
4304 if (TREE_CODE (exp) == CONSTRUCTOR
4305 && bitpos % BITS_PER_UNIT == 0
4306 /* If we have a non-zero bitpos for a register target, then we just
4307 let store_field do the bitfield handling. This is unlikely to
4308 generate unnecessary clear instructions anyways. */
4309 && (bitpos == 0 || GET_CODE (target) == MEM))
4311 if (GET_CODE (target) == MEM)
4312 target
4313 = adjust_address (target,
4314 GET_MODE (target) == BLKmode
4315 || 0 != (bitpos
4316 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4317 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4320 /* Update the alias set, if required. */
4321 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4322 && MEM_ALIAS_SET (target) != 0)
4324 target = copy_rtx (target);
4325 set_mem_alias_set (target, alias_set);
4328 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4330 else
4331 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4332 alias_set);
4335 /* Store the value of constructor EXP into the rtx TARGET.
4336 TARGET is either a REG or a MEM; we know it cannot conflict, since
4337 safe_from_p has been called.
4338 CLEARED is true if TARGET is known to have been zero'd.
4339 SIZE is the number of bytes of TARGET we are allowed to modify: this
4340 may not be the same as the size of EXP if we are assigning to a field
4341 which has been packed to exclude padding bits. */
4343 static void
4344 store_constructor (exp, target, cleared, size)
4345 tree exp;
4346 rtx target;
4347 int cleared;
4348 HOST_WIDE_INT size;
4350 tree type = TREE_TYPE (exp);
4351 #ifdef WORD_REGISTER_OPERATIONS
4352 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4353 #endif
4355 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4356 || TREE_CODE (type) == QUAL_UNION_TYPE)
4358 tree elt;
4360 /* We either clear the aggregate or indicate the value is dead. */
4361 if ((TREE_CODE (type) == UNION_TYPE
4362 || TREE_CODE (type) == QUAL_UNION_TYPE)
4363 && ! cleared
4364 && ! CONSTRUCTOR_ELTS (exp))
4365 /* If the constructor is empty, clear the union. */
4367 clear_storage (target, expr_size (exp));
4368 cleared = 1;
4371 /* If we are building a static constructor into a register,
4372 set the initial value as zero so we can fold the value into
4373 a constant. But if more than one register is involved,
4374 this probably loses. */
4375 else if (! cleared && GET_CODE (target) == REG && TREE_STATIC (exp)
4376 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4378 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4379 cleared = 1;
4382 /* If the constructor has fewer fields than the structure
4383 or if we are initializing the structure to mostly zeros,
4384 clear the whole structure first. Don't do this if TARGET is a
4385 register whose mode size isn't equal to SIZE since clear_storage
4386 can't handle this case. */
4387 else if (! cleared && size > 0
4388 && ((list_length (CONSTRUCTOR_ELTS (exp))
4389 != fields_length (type))
4390 || mostly_zeros_p (exp))
4391 && (GET_CODE (target) != REG
4392 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4393 == size)))
4395 clear_storage (target, GEN_INT (size));
4396 cleared = 1;
4399 if (! cleared)
4400 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4402 /* Store each element of the constructor into
4403 the corresponding field of TARGET. */
4405 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4407 tree field = TREE_PURPOSE (elt);
4408 tree value = TREE_VALUE (elt);
4409 enum machine_mode mode;
4410 HOST_WIDE_INT bitsize;
4411 HOST_WIDE_INT bitpos = 0;
4412 int unsignedp;
4413 tree offset;
4414 rtx to_rtx = target;
4416 /* Just ignore missing fields.
4417 We cleared the whole structure, above,
4418 if any fields are missing. */
4419 if (field == 0)
4420 continue;
4422 if (cleared && is_zeros_p (value))
4423 continue;
4425 if (host_integerp (DECL_SIZE (field), 1))
4426 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4427 else
4428 bitsize = -1;
4430 unsignedp = TREE_UNSIGNED (field);
4431 mode = DECL_MODE (field);
4432 if (DECL_BIT_FIELD (field))
4433 mode = VOIDmode;
4435 offset = DECL_FIELD_OFFSET (field);
4436 if (host_integerp (offset, 0)
4437 && host_integerp (bit_position (field), 0))
4439 bitpos = int_bit_position (field);
4440 offset = 0;
4442 else
4443 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4445 if (offset)
4447 rtx offset_rtx;
4449 if (contains_placeholder_p (offset))
4450 offset = build (WITH_RECORD_EXPR, sizetype,
4451 offset, make_tree (TREE_TYPE (exp), target));
4453 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4454 if (GET_CODE (to_rtx) != MEM)
4455 abort ();
4457 if (GET_MODE (offset_rtx) != ptr_mode)
4458 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4460 #ifdef POINTERS_EXTEND_UNSIGNED
4461 if (GET_MODE (offset_rtx) != Pmode)
4462 offset_rtx = convert_memory_address (Pmode, offset_rtx);
4463 #endif
4465 to_rtx = offset_address (to_rtx, offset_rtx,
4466 highest_pow2_factor (offset));
4469 if (TREE_READONLY (field))
4471 if (GET_CODE (to_rtx) == MEM)
4472 to_rtx = copy_rtx (to_rtx);
4474 RTX_UNCHANGING_P (to_rtx) = 1;
4477 #ifdef WORD_REGISTER_OPERATIONS
4478 /* If this initializes a field that is smaller than a word, at the
4479 start of a word, try to widen it to a full word.
4480 This special case allows us to output C++ member function
4481 initializations in a form that the optimizers can understand. */
4482 if (GET_CODE (target) == REG
4483 && bitsize < BITS_PER_WORD
4484 && bitpos % BITS_PER_WORD == 0
4485 && GET_MODE_CLASS (mode) == MODE_INT
4486 && TREE_CODE (value) == INTEGER_CST
4487 && exp_size >= 0
4488 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4490 tree type = TREE_TYPE (value);
4492 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4494 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4495 value = convert (type, value);
4498 if (BYTES_BIG_ENDIAN)
4499 value
4500 = fold (build (LSHIFT_EXPR, type, value,
4501 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4502 bitsize = BITS_PER_WORD;
4503 mode = word_mode;
4505 #endif
4507 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4508 && DECL_NONADDRESSABLE_P (field))
4510 to_rtx = copy_rtx (to_rtx);
4511 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4514 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4515 value, type, cleared,
4516 get_alias_set (TREE_TYPE (field)));
4519 else if (TREE_CODE (type) == ARRAY_TYPE)
4521 tree elt;
4522 int i;
4523 int need_to_clear;
4524 tree domain = TYPE_DOMAIN (type);
4525 tree elttype = TREE_TYPE (type);
4526 int const_bounds_p = (TYPE_MIN_VALUE (domain)
4527 && TYPE_MAX_VALUE (domain)
4528 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4529 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4530 HOST_WIDE_INT minelt = 0;
4531 HOST_WIDE_INT maxelt = 0;
4533 /* If we have constant bounds for the range of the type, get them. */
4534 if (const_bounds_p)
4536 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4537 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4540 /* If the constructor has fewer elements than the array,
4541 clear the whole array first. Similarly if this is
4542 static constructor of a non-BLKmode object. */
4543 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4544 need_to_clear = 1;
4545 else
4547 HOST_WIDE_INT count = 0, zero_count = 0;
4548 need_to_clear = ! const_bounds_p;
4550 /* This loop is a more accurate version of the loop in
4551 mostly_zeros_p (it handles RANGE_EXPR in an index).
4552 It is also needed to check for missing elements. */
4553 for (elt = CONSTRUCTOR_ELTS (exp);
4554 elt != NULL_TREE && ! need_to_clear;
4555 elt = TREE_CHAIN (elt))
4557 tree index = TREE_PURPOSE (elt);
4558 HOST_WIDE_INT this_node_count;
4560 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4562 tree lo_index = TREE_OPERAND (index, 0);
4563 tree hi_index = TREE_OPERAND (index, 1);
4565 if (! host_integerp (lo_index, 1)
4566 || ! host_integerp (hi_index, 1))
4568 need_to_clear = 1;
4569 break;
4572 this_node_count = (tree_low_cst (hi_index, 1)
4573 - tree_low_cst (lo_index, 1) + 1);
4575 else
4576 this_node_count = 1;
4578 count += this_node_count;
4579 if (mostly_zeros_p (TREE_VALUE (elt)))
4580 zero_count += this_node_count;
4583 /* Clear the entire array first if there are any missing elements,
4584 or if the incidence of zero elements is >= 75%. */
4585 if (! need_to_clear
4586 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4587 need_to_clear = 1;
4590 if (need_to_clear && size > 0)
4592 if (! cleared)
4593 clear_storage (target, GEN_INT (size));
4594 cleared = 1;
4596 else if (REG_P (target))
4597 /* Inform later passes that the old value is dead. */
4598 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4600 /* Store each element of the constructor into
4601 the corresponding element of TARGET, determined
4602 by counting the elements. */
4603 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4604 elt;
4605 elt = TREE_CHAIN (elt), i++)
4607 enum machine_mode mode;
4608 HOST_WIDE_INT bitsize;
4609 HOST_WIDE_INT bitpos;
4610 int unsignedp;
4611 tree value = TREE_VALUE (elt);
4612 tree index = TREE_PURPOSE (elt);
4613 rtx xtarget = target;
4615 if (cleared && is_zeros_p (value))
4616 continue;
4618 unsignedp = TREE_UNSIGNED (elttype);
4619 mode = TYPE_MODE (elttype);
4620 if (mode == BLKmode)
4621 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4622 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4623 : -1);
4624 else
4625 bitsize = GET_MODE_BITSIZE (mode);
4627 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4629 tree lo_index = TREE_OPERAND (index, 0);
4630 tree hi_index = TREE_OPERAND (index, 1);
4631 rtx index_r, pos_rtx, hi_r, loop_top, loop_end;
4632 struct nesting *loop;
4633 HOST_WIDE_INT lo, hi, count;
4634 tree position;
4636 /* If the range is constant and "small", unroll the loop. */
4637 if (const_bounds_p
4638 && host_integerp (lo_index, 0)
4639 && host_integerp (hi_index, 0)
4640 && (lo = tree_low_cst (lo_index, 0),
4641 hi = tree_low_cst (hi_index, 0),
4642 count = hi - lo + 1,
4643 (GET_CODE (target) != MEM
4644 || count <= 2
4645 || (host_integerp (TYPE_SIZE (elttype), 1)
4646 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4647 <= 40 * 8)))))
4649 lo -= minelt; hi -= minelt;
4650 for (; lo <= hi; lo++)
4652 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4654 if (GET_CODE (target) == MEM
4655 && !MEM_KEEP_ALIAS_SET_P (target)
4656 && TYPE_NONALIASED_COMPONENT (type))
4658 target = copy_rtx (target);
4659 MEM_KEEP_ALIAS_SET_P (target) = 1;
4662 store_constructor_field
4663 (target, bitsize, bitpos, mode, value, type, cleared,
4664 get_alias_set (elttype));
4667 else
4669 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4670 loop_top = gen_label_rtx ();
4671 loop_end = gen_label_rtx ();
4673 unsignedp = TREE_UNSIGNED (domain);
4675 index = build_decl (VAR_DECL, NULL_TREE, domain);
4677 index_r
4678 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4679 &unsignedp, 0));
4680 SET_DECL_RTL (index, index_r);
4681 if (TREE_CODE (value) == SAVE_EXPR
4682 && SAVE_EXPR_RTL (value) == 0)
4684 /* Make sure value gets expanded once before the
4685 loop. */
4686 expand_expr (value, const0_rtx, VOIDmode, 0);
4687 emit_queue ();
4689 store_expr (lo_index, index_r, 0);
4690 loop = expand_start_loop (0);
4692 /* Assign value to element index. */
4693 position
4694 = convert (ssizetype,
4695 fold (build (MINUS_EXPR, TREE_TYPE (index),
4696 index, TYPE_MIN_VALUE (domain))));
4697 position = size_binop (MULT_EXPR, position,
4698 convert (ssizetype,
4699 TYPE_SIZE_UNIT (elttype)));
4701 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4702 xtarget = offset_address (target, pos_rtx,
4703 highest_pow2_factor (position));
4704 xtarget = adjust_address (xtarget, mode, 0);
4705 if (TREE_CODE (value) == CONSTRUCTOR)
4706 store_constructor (value, xtarget, cleared,
4707 bitsize / BITS_PER_UNIT);
4708 else
4709 store_expr (value, xtarget, 0);
4711 expand_exit_loop_if_false (loop,
4712 build (LT_EXPR, integer_type_node,
4713 index, hi_index));
4715 expand_increment (build (PREINCREMENT_EXPR,
4716 TREE_TYPE (index),
4717 index, integer_one_node), 0, 0);
4718 expand_end_loop ();
4719 emit_label (loop_end);
4722 else if ((index != 0 && ! host_integerp (index, 0))
4723 || ! host_integerp (TYPE_SIZE (elttype), 1))
4725 tree position;
4727 if (index == 0)
4728 index = ssize_int (1);
4730 if (minelt)
4731 index = convert (ssizetype,
4732 fold (build (MINUS_EXPR, index,
4733 TYPE_MIN_VALUE (domain))));
4735 position = size_binop (MULT_EXPR, index,
4736 convert (ssizetype,
4737 TYPE_SIZE_UNIT (elttype)));
4738 xtarget = offset_address (target,
4739 expand_expr (position, 0, VOIDmode, 0),
4740 highest_pow2_factor (position));
4741 xtarget = adjust_address (xtarget, mode, 0);
4742 store_expr (value, xtarget, 0);
4744 else
4746 if (index != 0)
4747 bitpos = ((tree_low_cst (index, 0) - minelt)
4748 * tree_low_cst (TYPE_SIZE (elttype), 1));
4749 else
4750 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4752 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
4753 && TYPE_NONALIASED_COMPONENT (type))
4755 target = copy_rtx (target);
4756 MEM_KEEP_ALIAS_SET_P (target) = 1;
4759 store_constructor_field (target, bitsize, bitpos, mode, value,
4760 type, cleared, get_alias_set (elttype));
4766 /* Set constructor assignments. */
4767 else if (TREE_CODE (type) == SET_TYPE)
4769 tree elt = CONSTRUCTOR_ELTS (exp);
4770 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4771 tree domain = TYPE_DOMAIN (type);
4772 tree domain_min, domain_max, bitlength;
4774 /* The default implementation strategy is to extract the constant
4775 parts of the constructor, use that to initialize the target,
4776 and then "or" in whatever non-constant ranges we need in addition.
4778 If a large set is all zero or all ones, it is
4779 probably better to set it using memset (if available) or bzero.
4780 Also, if a large set has just a single range, it may also be
4781 better to first clear all the first clear the set (using
4782 bzero/memset), and set the bits we want. */
4784 /* Check for all zeros. */
4785 if (elt == NULL_TREE && size > 0)
4787 if (!cleared)
4788 clear_storage (target, GEN_INT (size));
4789 return;
4792 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4793 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4794 bitlength = size_binop (PLUS_EXPR,
4795 size_diffop (domain_max, domain_min),
4796 ssize_int (1));
4798 nbits = tree_low_cst (bitlength, 1);
4800 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4801 are "complicated" (more than one range), initialize (the
4802 constant parts) by copying from a constant. */
4803 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4804 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4806 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4807 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4808 char *bit_buffer = (char *) alloca (nbits);
4809 HOST_WIDE_INT word = 0;
4810 unsigned int bit_pos = 0;
4811 unsigned int ibit = 0;
4812 unsigned int offset = 0; /* In bytes from beginning of set. */
4814 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4815 for (;;)
4817 if (bit_buffer[ibit])
4819 if (BYTES_BIG_ENDIAN)
4820 word |= (1 << (set_word_size - 1 - bit_pos));
4821 else
4822 word |= 1 << bit_pos;
4825 bit_pos++; ibit++;
4826 if (bit_pos >= set_word_size || ibit == nbits)
4828 if (word != 0 || ! cleared)
4830 rtx datum = GEN_INT (word);
4831 rtx to_rtx;
4833 /* The assumption here is that it is safe to use
4834 XEXP if the set is multi-word, but not if
4835 it's single-word. */
4836 if (GET_CODE (target) == MEM)
4837 to_rtx = adjust_address (target, mode, offset);
4838 else if (offset == 0)
4839 to_rtx = target;
4840 else
4841 abort ();
4842 emit_move_insn (to_rtx, datum);
4845 if (ibit == nbits)
4846 break;
4847 word = 0;
4848 bit_pos = 0;
4849 offset += set_word_size / BITS_PER_UNIT;
4853 else if (!cleared)
4854 /* Don't bother clearing storage if the set is all ones. */
4855 if (TREE_CHAIN (elt) != NULL_TREE
4856 || (TREE_PURPOSE (elt) == NULL_TREE
4857 ? nbits != 1
4858 : ( ! host_integerp (TREE_VALUE (elt), 0)
4859 || ! host_integerp (TREE_PURPOSE (elt), 0)
4860 || (tree_low_cst (TREE_VALUE (elt), 0)
4861 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
4862 != (HOST_WIDE_INT) nbits))))
4863 clear_storage (target, expr_size (exp));
4865 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4867 /* Start of range of element or NULL. */
4868 tree startbit = TREE_PURPOSE (elt);
4869 /* End of range of element, or element value. */
4870 tree endbit = TREE_VALUE (elt);
4871 #ifdef TARGET_MEM_FUNCTIONS
4872 HOST_WIDE_INT startb, endb;
4873 #endif
4874 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4876 bitlength_rtx = expand_expr (bitlength,
4877 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4879 /* Handle non-range tuple element like [ expr ]. */
4880 if (startbit == NULL_TREE)
4882 startbit = save_expr (endbit);
4883 endbit = startbit;
4886 startbit = convert (sizetype, startbit);
4887 endbit = convert (sizetype, endbit);
4888 if (! integer_zerop (domain_min))
4890 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4891 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4893 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4894 EXPAND_CONST_ADDRESS);
4895 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4896 EXPAND_CONST_ADDRESS);
4898 if (REG_P (target))
4900 targetx
4901 = assign_temp
4902 ((build_qualified_type (type_for_mode (GET_MODE (target), 0),
4903 TYPE_QUAL_CONST)),
4904 0, 1, 1);
4905 emit_move_insn (targetx, target);
4908 else if (GET_CODE (target) == MEM)
4909 targetx = target;
4910 else
4911 abort ();
4913 #ifdef TARGET_MEM_FUNCTIONS
4914 /* Optimization: If startbit and endbit are
4915 constants divisible by BITS_PER_UNIT,
4916 call memset instead. */
4917 if (TREE_CODE (startbit) == INTEGER_CST
4918 && TREE_CODE (endbit) == INTEGER_CST
4919 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4920 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4922 emit_library_call (memset_libfunc, LCT_NORMAL,
4923 VOIDmode, 3,
4924 plus_constant (XEXP (targetx, 0),
4925 startb / BITS_PER_UNIT),
4926 Pmode,
4927 constm1_rtx, TYPE_MODE (integer_type_node),
4928 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4929 TYPE_MODE (sizetype));
4931 else
4932 #endif
4933 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4934 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
4935 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
4936 startbit_rtx, TYPE_MODE (sizetype),
4937 endbit_rtx, TYPE_MODE (sizetype));
4939 if (REG_P (target))
4940 emit_move_insn (target, targetx);
4944 else
4945 abort ();
4948 /* Store the value of EXP (an expression tree)
4949 into a subfield of TARGET which has mode MODE and occupies
4950 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4951 If MODE is VOIDmode, it means that we are storing into a bit-field.
4953 If VALUE_MODE is VOIDmode, return nothing in particular.
4954 UNSIGNEDP is not used in this case.
4956 Otherwise, return an rtx for the value stored. This rtx
4957 has mode VALUE_MODE if that is convenient to do.
4958 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4960 TYPE is the type of the underlying object,
4962 ALIAS_SET is the alias set for the destination. This value will
4963 (in general) be different from that for TARGET, since TARGET is a
4964 reference to the containing structure. */
4966 static rtx
4967 store_field (target, bitsize, bitpos, mode, exp, value_mode, unsignedp, type,
4968 alias_set)
4969 rtx target;
4970 HOST_WIDE_INT bitsize;
4971 HOST_WIDE_INT bitpos;
4972 enum machine_mode mode;
4973 tree exp;
4974 enum machine_mode value_mode;
4975 int unsignedp;
4976 tree type;
4977 int alias_set;
4979 HOST_WIDE_INT width_mask = 0;
4981 if (TREE_CODE (exp) == ERROR_MARK)
4982 return const0_rtx;
4984 /* If we have nothing to store, do nothing unless the expression has
4985 side-effects. */
4986 if (bitsize == 0)
4987 return expand_expr (exp, const0_rtx, VOIDmode, 0);
4988 else if (bitsize >=0 && bitsize < HOST_BITS_PER_WIDE_INT)
4989 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
4991 /* If we are storing into an unaligned field of an aligned union that is
4992 in a register, we may have the mode of TARGET being an integer mode but
4993 MODE == BLKmode. In that case, get an aligned object whose size and
4994 alignment are the same as TARGET and store TARGET into it (we can avoid
4995 the store if the field being stored is the entire width of TARGET). Then
4996 call ourselves recursively to store the field into a BLKmode version of
4997 that object. Finally, load from the object into TARGET. This is not
4998 very efficient in general, but should only be slightly more expensive
4999 than the otherwise-required unaligned accesses. Perhaps this can be
5000 cleaned up later. */
5002 if (mode == BLKmode
5003 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5005 rtx object
5006 = assign_temp
5007 (build_qualified_type (type, TYPE_QUALS (type) | TYPE_QUAL_CONST),
5008 0, 1, 1);
5009 rtx blk_object = adjust_address (object, BLKmode, 0);
5011 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5012 emit_move_insn (object, target);
5014 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5015 alias_set);
5017 emit_move_insn (target, object);
5019 /* We want to return the BLKmode version of the data. */
5020 return blk_object;
5023 if (GET_CODE (target) == CONCAT)
5025 /* We're storing into a struct containing a single __complex. */
5027 if (bitpos != 0)
5028 abort ();
5029 return store_expr (exp, target, 0);
5032 /* If the structure is in a register or if the component
5033 is a bit field, we cannot use addressing to access it.
5034 Use bit-field techniques or SUBREG to store in it. */
5036 if (mode == VOIDmode
5037 || (mode != BLKmode && ! direct_store[(int) mode]
5038 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5039 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5040 || GET_CODE (target) == REG
5041 || GET_CODE (target) == SUBREG
5042 /* If the field isn't aligned enough to store as an ordinary memref,
5043 store it as a bit field. */
5044 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
5045 && (MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode)
5046 || bitpos % GET_MODE_ALIGNMENT (mode)))
5047 /* If the RHS and field are a constant size and the size of the
5048 RHS isn't the same size as the bitfield, we must use bitfield
5049 operations. */
5050 || (bitsize >= 0
5051 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5052 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5054 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5056 /* If BITSIZE is narrower than the size of the type of EXP
5057 we will be narrowing TEMP. Normally, what's wanted are the
5058 low-order bits. However, if EXP's type is a record and this is
5059 big-endian machine, we want the upper BITSIZE bits. */
5060 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5061 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5062 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5063 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5064 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5065 - bitsize),
5066 temp, 1);
5068 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5069 MODE. */
5070 if (mode != VOIDmode && mode != BLKmode
5071 && mode != TYPE_MODE (TREE_TYPE (exp)))
5072 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5074 /* If the modes of TARGET and TEMP are both BLKmode, both
5075 must be in memory and BITPOS must be aligned on a byte
5076 boundary. If so, we simply do a block copy. */
5077 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5079 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5080 || bitpos % BITS_PER_UNIT != 0)
5081 abort ();
5083 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5084 emit_block_move (target, temp,
5085 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5086 / BITS_PER_UNIT));
5088 return value_mode == VOIDmode ? const0_rtx : target;
5091 /* Store the value in the bitfield. */
5092 store_bit_field (target, bitsize, bitpos, mode, temp,
5093 int_size_in_bytes (type));
5095 if (value_mode != VOIDmode)
5097 /* The caller wants an rtx for the value.
5098 If possible, avoid refetching from the bitfield itself. */
5099 if (width_mask != 0
5100 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5102 tree count;
5103 enum machine_mode tmode;
5105 if (unsignedp)
5106 return expand_and (temp,
5107 GEN_INT
5108 (trunc_int_for_mode
5109 (width_mask,
5110 GET_MODE (temp) == VOIDmode
5111 ? value_mode
5112 : GET_MODE (temp))), NULL_RTX);
5114 tmode = GET_MODE (temp);
5115 if (tmode == VOIDmode)
5116 tmode = value_mode;
5117 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5118 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5119 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5122 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5123 NULL_RTX, value_mode, VOIDmode,
5124 int_size_in_bytes (type));
5126 return const0_rtx;
5128 else
5130 rtx addr = XEXP (target, 0);
5131 rtx to_rtx = target;
5133 /* If a value is wanted, it must be the lhs;
5134 so make the address stable for multiple use. */
5136 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5137 && ! CONSTANT_ADDRESS_P (addr)
5138 /* A frame-pointer reference is already stable. */
5139 && ! (GET_CODE (addr) == PLUS
5140 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5141 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5142 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5143 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5145 /* Now build a reference to just the desired component. */
5147 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5149 if (to_rtx == target)
5150 to_rtx = copy_rtx (to_rtx);
5152 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5153 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5154 set_mem_alias_set (to_rtx, alias_set);
5156 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5160 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5161 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5162 codes and find the ultimate containing object, which we return.
5164 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5165 bit position, and *PUNSIGNEDP to the signedness of the field.
5166 If the position of the field is variable, we store a tree
5167 giving the variable offset (in units) in *POFFSET.
5168 This offset is in addition to the bit position.
5169 If the position is not variable, we store 0 in *POFFSET.
5171 If any of the extraction expressions is volatile,
5172 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5174 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5175 is a mode that can be used to access the field. In that case, *PBITSIZE
5176 is redundant.
5178 If the field describes a variable-sized object, *PMODE is set to
5179 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5180 this case, but the address of the object can be found. */
5182 tree
5183 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5184 punsignedp, pvolatilep)
5185 tree exp;
5186 HOST_WIDE_INT *pbitsize;
5187 HOST_WIDE_INT *pbitpos;
5188 tree *poffset;
5189 enum machine_mode *pmode;
5190 int *punsignedp;
5191 int *pvolatilep;
5193 tree size_tree = 0;
5194 enum machine_mode mode = VOIDmode;
5195 tree offset = size_zero_node;
5196 tree bit_offset = bitsize_zero_node;
5197 tree placeholder_ptr = 0;
5198 tree tem;
5200 /* First get the mode, signedness, and size. We do this from just the
5201 outermost expression. */
5202 if (TREE_CODE (exp) == COMPONENT_REF)
5204 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5205 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5206 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5208 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5210 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5212 size_tree = TREE_OPERAND (exp, 1);
5213 *punsignedp = TREE_UNSIGNED (exp);
5215 else
5217 mode = TYPE_MODE (TREE_TYPE (exp));
5218 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5220 if (mode == BLKmode)
5221 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5222 else
5223 *pbitsize = GET_MODE_BITSIZE (mode);
5226 if (size_tree != 0)
5228 if (! host_integerp (size_tree, 1))
5229 mode = BLKmode, *pbitsize = -1;
5230 else
5231 *pbitsize = tree_low_cst (size_tree, 1);
5234 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5235 and find the ultimate containing object. */
5236 while (1)
5238 if (TREE_CODE (exp) == BIT_FIELD_REF)
5239 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5240 else if (TREE_CODE (exp) == COMPONENT_REF)
5242 tree field = TREE_OPERAND (exp, 1);
5243 tree this_offset = DECL_FIELD_OFFSET (field);
5245 /* If this field hasn't been filled in yet, don't go
5246 past it. This should only happen when folding expressions
5247 made during type construction. */
5248 if (this_offset == 0)
5249 break;
5250 else if (! TREE_CONSTANT (this_offset)
5251 && contains_placeholder_p (this_offset))
5252 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5254 offset = size_binop (PLUS_EXPR, offset, this_offset);
5255 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5256 DECL_FIELD_BIT_OFFSET (field));
5258 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5261 else if (TREE_CODE (exp) == ARRAY_REF
5262 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5264 tree index = TREE_OPERAND (exp, 1);
5265 tree array = TREE_OPERAND (exp, 0);
5266 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5267 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5268 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5270 /* We assume all arrays have sizes that are a multiple of a byte.
5271 First subtract the lower bound, if any, in the type of the
5272 index, then convert to sizetype and multiply by the size of the
5273 array element. */
5274 if (low_bound != 0 && ! integer_zerop (low_bound))
5275 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5276 index, low_bound));
5278 /* If the index has a self-referential type, pass it to a
5279 WITH_RECORD_EXPR; if the component size is, pass our
5280 component to one. */
5281 if (! TREE_CONSTANT (index)
5282 && contains_placeholder_p (index))
5283 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5284 if (! TREE_CONSTANT (unit_size)
5285 && contains_placeholder_p (unit_size))
5286 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5288 offset = size_binop (PLUS_EXPR, offset,
5289 size_binop (MULT_EXPR,
5290 convert (sizetype, index),
5291 unit_size));
5294 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5296 tree new = find_placeholder (exp, &placeholder_ptr);
5298 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5299 We might have been called from tree optimization where we
5300 haven't set up an object yet. */
5301 if (new == 0)
5302 break;
5303 else
5304 exp = new;
5306 continue;
5308 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5309 && TREE_CODE (exp) != VIEW_CONVERT_EXPR
5310 && ! ((TREE_CODE (exp) == NOP_EXPR
5311 || TREE_CODE (exp) == CONVERT_EXPR)
5312 && (TYPE_MODE (TREE_TYPE (exp))
5313 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5314 break;
5316 /* If any reference in the chain is volatile, the effect is volatile. */
5317 if (TREE_THIS_VOLATILE (exp))
5318 *pvolatilep = 1;
5320 exp = TREE_OPERAND (exp, 0);
5323 /* If OFFSET is constant, see if we can return the whole thing as a
5324 constant bit position. Otherwise, split it up. */
5325 if (host_integerp (offset, 0)
5326 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5327 bitsize_unit_node))
5328 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5329 && host_integerp (tem, 0))
5330 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5331 else
5332 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5334 *pmode = mode;
5335 return exp;
5338 /* Return 1 if T is an expression that get_inner_reference handles. */
5341 handled_component_p (t)
5342 tree t;
5344 switch (TREE_CODE (t))
5346 case BIT_FIELD_REF:
5347 case COMPONENT_REF:
5348 case ARRAY_REF:
5349 case ARRAY_RANGE_REF:
5350 case NON_LVALUE_EXPR:
5351 case VIEW_CONVERT_EXPR:
5352 return 1;
5354 case NOP_EXPR:
5355 case CONVERT_EXPR:
5356 return (TYPE_MODE (TREE_TYPE (t))
5357 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5359 default:
5360 return 0;
5364 /* Given an rtx VALUE that may contain additions and multiplications, return
5365 an equivalent value that just refers to a register, memory, or constant.
5366 This is done by generating instructions to perform the arithmetic and
5367 returning a pseudo-register containing the value.
5369 The returned value may be a REG, SUBREG, MEM or constant. */
5372 force_operand (value, target)
5373 rtx value, target;
5375 optab binoptab = 0;
5376 /* Use a temporary to force order of execution of calls to
5377 `force_operand'. */
5378 rtx tmp;
5379 rtx op2;
5380 /* Use subtarget as the target for operand 0 of a binary operation. */
5381 rtx subtarget = get_subtarget (target);
5383 /* Check for a PIC address load. */
5384 if (flag_pic
5385 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5386 && XEXP (value, 0) == pic_offset_table_rtx
5387 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5388 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5389 || GET_CODE (XEXP (value, 1)) == CONST))
5391 if (!subtarget)
5392 subtarget = gen_reg_rtx (GET_MODE (value));
5393 emit_move_insn (subtarget, value);
5394 return subtarget;
5397 if (GET_CODE (value) == PLUS)
5398 binoptab = add_optab;
5399 else if (GET_CODE (value) == MINUS)
5400 binoptab = sub_optab;
5401 else if (GET_CODE (value) == MULT)
5403 op2 = XEXP (value, 1);
5404 if (!CONSTANT_P (op2)
5405 && !(GET_CODE (op2) == REG && op2 != subtarget))
5406 subtarget = 0;
5407 tmp = force_operand (XEXP (value, 0), subtarget);
5408 return expand_mult (GET_MODE (value), tmp,
5409 force_operand (op2, NULL_RTX),
5410 target, 1);
5413 if (binoptab)
5415 op2 = XEXP (value, 1);
5416 if (!CONSTANT_P (op2)
5417 && !(GET_CODE (op2) == REG && op2 != subtarget))
5418 subtarget = 0;
5419 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5421 binoptab = add_optab;
5422 op2 = negate_rtx (GET_MODE (value), op2);
5425 /* Check for an addition with OP2 a constant integer and our first
5426 operand a PLUS of a virtual register and something else. In that
5427 case, we want to emit the sum of the virtual register and the
5428 constant first and then add the other value. This allows virtual
5429 register instantiation to simply modify the constant rather than
5430 creating another one around this addition. */
5431 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5432 && GET_CODE (XEXP (value, 0)) == PLUS
5433 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5434 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5435 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5437 rtx temp = expand_binop (GET_MODE (value), binoptab,
5438 XEXP (XEXP (value, 0), 0), op2,
5439 subtarget, 0, OPTAB_LIB_WIDEN);
5440 return expand_binop (GET_MODE (value), binoptab, temp,
5441 force_operand (XEXP (XEXP (value, 0), 1), 0),
5442 target, 0, OPTAB_LIB_WIDEN);
5445 tmp = force_operand (XEXP (value, 0), subtarget);
5446 return expand_binop (GET_MODE (value), binoptab, tmp,
5447 force_operand (op2, NULL_RTX),
5448 target, 0, OPTAB_LIB_WIDEN);
5449 /* We give UNSIGNEDP = 0 to expand_binop
5450 because the only operations we are expanding here are signed ones. */
5453 #ifdef INSN_SCHEDULING
5454 /* On machines that have insn scheduling, we want all memory reference to be
5455 explicit, so we need to deal with such paradoxical SUBREGs. */
5456 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
5457 && (GET_MODE_SIZE (GET_MODE (value))
5458 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5459 value
5460 = simplify_gen_subreg (GET_MODE (value),
5461 force_reg (GET_MODE (SUBREG_REG (value)),
5462 force_operand (SUBREG_REG (value),
5463 NULL_RTX)),
5464 GET_MODE (SUBREG_REG (value)),
5465 SUBREG_BYTE (value));
5466 #endif
5468 return value;
5471 /* Subroutine of expand_expr: return nonzero iff there is no way that
5472 EXP can reference X, which is being modified. TOP_P is nonzero if this
5473 call is going to be used to determine whether we need a temporary
5474 for EXP, as opposed to a recursive call to this function.
5476 It is always safe for this routine to return zero since it merely
5477 searches for optimization opportunities. */
5480 safe_from_p (x, exp, top_p)
5481 rtx x;
5482 tree exp;
5483 int top_p;
5485 rtx exp_rtl = 0;
5486 int i, nops;
5487 static tree save_expr_list;
5489 if (x == 0
5490 /* If EXP has varying size, we MUST use a target since we currently
5491 have no way of allocating temporaries of variable size
5492 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5493 So we assume here that something at a higher level has prevented a
5494 clash. This is somewhat bogus, but the best we can do. Only
5495 do this when X is BLKmode and when we are at the top level. */
5496 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5497 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5498 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5499 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5500 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5501 != INTEGER_CST)
5502 && GET_MODE (x) == BLKmode)
5503 /* If X is in the outgoing argument area, it is always safe. */
5504 || (GET_CODE (x) == MEM
5505 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5506 || (GET_CODE (XEXP (x, 0)) == PLUS
5507 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5508 return 1;
5510 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5511 find the underlying pseudo. */
5512 if (GET_CODE (x) == SUBREG)
5514 x = SUBREG_REG (x);
5515 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5516 return 0;
5519 /* A SAVE_EXPR might appear many times in the expression passed to the
5520 top-level safe_from_p call, and if it has a complex subexpression,
5521 examining it multiple times could result in a combinatorial explosion.
5522 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5523 with optimization took about 28 minutes to compile -- even though it was
5524 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5525 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5526 we have processed. Note that the only test of top_p was above. */
5528 if (top_p)
5530 int rtn;
5531 tree t;
5533 save_expr_list = 0;
5535 rtn = safe_from_p (x, exp, 0);
5537 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5538 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5540 return rtn;
5543 /* Now look at our tree code and possibly recurse. */
5544 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5546 case 'd':
5547 exp_rtl = DECL_RTL_SET_P (exp) ? DECL_RTL (exp) : NULL_RTX;
5548 break;
5550 case 'c':
5551 return 1;
5553 case 'x':
5554 if (TREE_CODE (exp) == TREE_LIST)
5555 return ((TREE_VALUE (exp) == 0
5556 || safe_from_p (x, TREE_VALUE (exp), 0))
5557 && (TREE_CHAIN (exp) == 0
5558 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5559 else if (TREE_CODE (exp) == ERROR_MARK)
5560 return 1; /* An already-visited SAVE_EXPR? */
5561 else
5562 return 0;
5564 case '1':
5565 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5567 case '2':
5568 case '<':
5569 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5570 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5572 case 'e':
5573 case 'r':
5574 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5575 the expression. If it is set, we conflict iff we are that rtx or
5576 both are in memory. Otherwise, we check all operands of the
5577 expression recursively. */
5579 switch (TREE_CODE (exp))
5581 case ADDR_EXPR:
5582 /* If the operand is static or we are static, we can't conflict.
5583 Likewise if we don't conflict with the operand at all. */
5584 if (staticp (TREE_OPERAND (exp, 0))
5585 || TREE_STATIC (exp)
5586 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5587 return 1;
5589 /* Otherwise, the only way this can conflict is if we are taking
5590 the address of a DECL a that address if part of X, which is
5591 very rare. */
5592 exp = TREE_OPERAND (exp, 0);
5593 if (DECL_P (exp))
5595 if (!DECL_RTL_SET_P (exp)
5596 || GET_CODE (DECL_RTL (exp)) != MEM)
5597 return 0;
5598 else
5599 exp_rtl = XEXP (DECL_RTL (exp), 0);
5601 break;
5603 case INDIRECT_REF:
5604 if (GET_CODE (x) == MEM
5605 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5606 get_alias_set (exp)))
5607 return 0;
5608 break;
5610 case CALL_EXPR:
5611 /* Assume that the call will clobber all hard registers and
5612 all of memory. */
5613 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5614 || GET_CODE (x) == MEM)
5615 return 0;
5616 break;
5618 case RTL_EXPR:
5619 /* If a sequence exists, we would have to scan every instruction
5620 in the sequence to see if it was safe. This is probably not
5621 worthwhile. */
5622 if (RTL_EXPR_SEQUENCE (exp))
5623 return 0;
5625 exp_rtl = RTL_EXPR_RTL (exp);
5626 break;
5628 case WITH_CLEANUP_EXPR:
5629 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
5630 break;
5632 case CLEANUP_POINT_EXPR:
5633 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5635 case SAVE_EXPR:
5636 exp_rtl = SAVE_EXPR_RTL (exp);
5637 if (exp_rtl)
5638 break;
5640 /* If we've already scanned this, don't do it again. Otherwise,
5641 show we've scanned it and record for clearing the flag if we're
5642 going on. */
5643 if (TREE_PRIVATE (exp))
5644 return 1;
5646 TREE_PRIVATE (exp) = 1;
5647 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5649 TREE_PRIVATE (exp) = 0;
5650 return 0;
5653 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
5654 return 1;
5656 case BIND_EXPR:
5657 /* The only operand we look at is operand 1. The rest aren't
5658 part of the expression. */
5659 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5661 case METHOD_CALL_EXPR:
5662 /* This takes an rtx argument, but shouldn't appear here. */
5663 abort ();
5665 default:
5666 break;
5669 /* If we have an rtx, we do not need to scan our operands. */
5670 if (exp_rtl)
5671 break;
5673 nops = first_rtl_op (TREE_CODE (exp));
5674 for (i = 0; i < nops; i++)
5675 if (TREE_OPERAND (exp, i) != 0
5676 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5677 return 0;
5679 /* If this is a language-specific tree code, it may require
5680 special handling. */
5681 if ((unsigned int) TREE_CODE (exp)
5682 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5683 && !(*lang_hooks.safe_from_p) (x, exp))
5684 return 0;
5687 /* If we have an rtl, find any enclosed object. Then see if we conflict
5688 with it. */
5689 if (exp_rtl)
5691 if (GET_CODE (exp_rtl) == SUBREG)
5693 exp_rtl = SUBREG_REG (exp_rtl);
5694 if (GET_CODE (exp_rtl) == REG
5695 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5696 return 0;
5699 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5700 are memory and they conflict. */
5701 return ! (rtx_equal_p (x, exp_rtl)
5702 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5703 && true_dependence (exp_rtl, GET_MODE (x), x,
5704 rtx_addr_varies_p)));
5707 /* If we reach here, it is safe. */
5708 return 1;
5711 /* Subroutine of expand_expr: return rtx if EXP is a
5712 variable or parameter; else return 0. */
5714 static rtx
5715 var_rtx (exp)
5716 tree exp;
5718 STRIP_NOPS (exp);
5719 switch (TREE_CODE (exp))
5721 case PARM_DECL:
5722 case VAR_DECL:
5723 return DECL_RTL (exp);
5724 default:
5725 return 0;
5729 #ifdef MAX_INTEGER_COMPUTATION_MODE
5731 void
5732 check_max_integer_computation_mode (exp)
5733 tree exp;
5735 enum tree_code code;
5736 enum machine_mode mode;
5738 /* Strip any NOPs that don't change the mode. */
5739 STRIP_NOPS (exp);
5740 code = TREE_CODE (exp);
5742 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5743 if (code == NOP_EXPR
5744 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5745 return;
5747 /* First check the type of the overall operation. We need only look at
5748 unary, binary and relational operations. */
5749 if (TREE_CODE_CLASS (code) == '1'
5750 || TREE_CODE_CLASS (code) == '2'
5751 || TREE_CODE_CLASS (code) == '<')
5753 mode = TYPE_MODE (TREE_TYPE (exp));
5754 if (GET_MODE_CLASS (mode) == MODE_INT
5755 && mode > MAX_INTEGER_COMPUTATION_MODE)
5756 internal_error ("unsupported wide integer operation");
5759 /* Check operand of a unary op. */
5760 if (TREE_CODE_CLASS (code) == '1')
5762 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5763 if (GET_MODE_CLASS (mode) == MODE_INT
5764 && mode > MAX_INTEGER_COMPUTATION_MODE)
5765 internal_error ("unsupported wide integer operation");
5768 /* Check operands of a binary/comparison op. */
5769 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5771 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5772 if (GET_MODE_CLASS (mode) == MODE_INT
5773 && mode > MAX_INTEGER_COMPUTATION_MODE)
5774 internal_error ("unsupported wide integer operation");
5776 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5777 if (GET_MODE_CLASS (mode) == MODE_INT
5778 && mode > MAX_INTEGER_COMPUTATION_MODE)
5779 internal_error ("unsupported wide integer operation");
5782 #endif
5784 /* Return the highest power of two that EXP is known to be a multiple of.
5785 This is used in updating alignment of MEMs in array references. */
5787 static HOST_WIDE_INT
5788 highest_pow2_factor (exp)
5789 tree exp;
5791 HOST_WIDE_INT c0, c1;
5793 switch (TREE_CODE (exp))
5795 case INTEGER_CST:
5796 /* If the integer is expressable in a HOST_WIDE_INT, we can find the
5797 lowest bit that's a one. If the result is zero, return
5798 BIGGEST_ALIGNMENT. We need to handle this case since we can find it
5799 in a COND_EXPR, a MIN_EXPR, or a MAX_EXPR. If the constant overlows,
5800 we have an erroneous program, so return BIGGEST_ALIGNMENT to avoid any
5801 later ICE. */
5802 if (TREE_CONSTANT_OVERFLOW (exp)
5803 || integer_zerop (exp))
5804 return BIGGEST_ALIGNMENT;
5805 else if (host_integerp (exp, 0))
5807 c0 = tree_low_cst (exp, 0);
5808 c0 = c0 < 0 ? - c0 : c0;
5809 return c0 & -c0;
5811 break;
5813 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
5814 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5815 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5816 return MIN (c0, c1);
5818 case MULT_EXPR:
5819 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5820 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5821 return c0 * c1;
5823 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
5824 case CEIL_DIV_EXPR:
5825 if (integer_pow2p (TREE_OPERAND (exp, 1))
5826 && host_integerp (TREE_OPERAND (exp, 1), 1))
5828 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5829 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
5830 return MAX (1, c0 / c1);
5832 break;
5834 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
5835 case SAVE_EXPR: case WITH_RECORD_EXPR:
5836 return highest_pow2_factor (TREE_OPERAND (exp, 0));
5838 case COMPOUND_EXPR:
5839 return highest_pow2_factor (TREE_OPERAND (exp, 1));
5841 case COND_EXPR:
5842 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5843 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
5844 return MIN (c0, c1);
5846 default:
5847 break;
5850 return 1;
5853 /* Return an object on the placeholder list that matches EXP, a
5854 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
5855 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
5856 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
5857 is a location which initially points to a starting location in the
5858 placeholder list (zero means start of the list) and where a pointer into
5859 the placeholder list at which the object is found is placed. */
5861 tree
5862 find_placeholder (exp, plist)
5863 tree exp;
5864 tree *plist;
5866 tree type = TREE_TYPE (exp);
5867 tree placeholder_expr;
5869 for (placeholder_expr
5870 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
5871 placeholder_expr != 0;
5872 placeholder_expr = TREE_CHAIN (placeholder_expr))
5874 tree need_type = TYPE_MAIN_VARIANT (type);
5875 tree elt;
5877 /* Find the outermost reference that is of the type we want. If none,
5878 see if any object has a type that is a pointer to the type we
5879 want. */
5880 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
5881 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
5882 || TREE_CODE (elt) == COND_EXPR)
5883 ? TREE_OPERAND (elt, 1)
5884 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5885 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5886 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5887 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5888 ? TREE_OPERAND (elt, 0) : 0))
5889 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
5891 if (plist)
5892 *plist = placeholder_expr;
5893 return elt;
5896 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
5898 = ((TREE_CODE (elt) == COMPOUND_EXPR
5899 || TREE_CODE (elt) == COND_EXPR)
5900 ? TREE_OPERAND (elt, 1)
5901 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5902 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5903 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5904 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5905 ? TREE_OPERAND (elt, 0) : 0))
5906 if (POINTER_TYPE_P (TREE_TYPE (elt))
5907 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
5908 == need_type))
5910 if (plist)
5911 *plist = placeholder_expr;
5912 return build1 (INDIRECT_REF, need_type, elt);
5916 return 0;
5919 /* expand_expr: generate code for computing expression EXP.
5920 An rtx for the computed value is returned. The value is never null.
5921 In the case of a void EXP, const0_rtx is returned.
5923 The value may be stored in TARGET if TARGET is nonzero.
5924 TARGET is just a suggestion; callers must assume that
5925 the rtx returned may not be the same as TARGET.
5927 If TARGET is CONST0_RTX, it means that the value will be ignored.
5929 If TMODE is not VOIDmode, it suggests generating the
5930 result in mode TMODE. But this is done only when convenient.
5931 Otherwise, TMODE is ignored and the value generated in its natural mode.
5932 TMODE is just a suggestion; callers must assume that
5933 the rtx returned may not have mode TMODE.
5935 Note that TARGET may have neither TMODE nor MODE. In that case, it
5936 probably will not be used.
5938 If MODIFIER is EXPAND_SUM then when EXP is an addition
5939 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5940 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5941 products as above, or REG or MEM, or constant.
5942 Ordinarily in such cases we would output mul or add instructions
5943 and then return a pseudo reg containing the sum.
5945 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5946 it also marks a label as absolutely required (it can't be dead).
5947 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
5948 This is used for outputting expressions used in initializers.
5950 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5951 with a constant address even if that address is not normally legitimate.
5952 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
5955 expand_expr (exp, target, tmode, modifier)
5956 tree exp;
5957 rtx target;
5958 enum machine_mode tmode;
5959 enum expand_modifier modifier;
5961 rtx op0, op1, temp;
5962 tree type = TREE_TYPE (exp);
5963 int unsignedp = TREE_UNSIGNED (type);
5964 enum machine_mode mode;
5965 enum tree_code code = TREE_CODE (exp);
5966 optab this_optab;
5967 rtx subtarget, original_target;
5968 int ignore;
5969 tree context;
5971 /* Handle ERROR_MARK before anybody tries to access its type. */
5972 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
5974 op0 = CONST0_RTX (tmode);
5975 if (op0 != 0)
5976 return op0;
5977 return const0_rtx;
5980 mode = TYPE_MODE (type);
5981 /* Use subtarget as the target for operand 0 of a binary operation. */
5982 subtarget = get_subtarget (target);
5983 original_target = target;
5984 ignore = (target == const0_rtx
5985 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
5986 || code == CONVERT_EXPR || code == REFERENCE_EXPR
5987 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
5988 && TREE_CODE (type) == VOID_TYPE));
5990 /* If we are going to ignore this result, we need only do something
5991 if there is a side-effect somewhere in the expression. If there
5992 is, short-circuit the most common cases here. Note that we must
5993 not call expand_expr with anything but const0_rtx in case this
5994 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
5996 if (ignore)
5998 if (! TREE_SIDE_EFFECTS (exp))
5999 return const0_rtx;
6001 /* Ensure we reference a volatile object even if value is ignored, but
6002 don't do this if all we are doing is taking its address. */
6003 if (TREE_THIS_VOLATILE (exp)
6004 && TREE_CODE (exp) != FUNCTION_DECL
6005 && mode != VOIDmode && mode != BLKmode
6006 && modifier != EXPAND_CONST_ADDRESS)
6008 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6009 if (GET_CODE (temp) == MEM)
6010 temp = copy_to_reg (temp);
6011 return const0_rtx;
6014 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6015 || code == INDIRECT_REF || code == BUFFER_REF)
6016 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6017 modifier);
6019 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6020 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6022 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6023 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6024 return const0_rtx;
6026 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6027 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6028 /* If the second operand has no side effects, just evaluate
6029 the first. */
6030 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6031 modifier);
6032 else if (code == BIT_FIELD_REF)
6034 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6035 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6036 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6037 return const0_rtx;
6040 target = 0;
6043 #ifdef MAX_INTEGER_COMPUTATION_MODE
6044 /* Only check stuff here if the mode we want is different from the mode
6045 of the expression; if it's the same, check_max_integer_computiation_mode
6046 will handle it. Do we really need to check this stuff at all? */
6048 if (target
6049 && GET_MODE (target) != mode
6050 && TREE_CODE (exp) != INTEGER_CST
6051 && TREE_CODE (exp) != PARM_DECL
6052 && TREE_CODE (exp) != ARRAY_REF
6053 && TREE_CODE (exp) != ARRAY_RANGE_REF
6054 && TREE_CODE (exp) != COMPONENT_REF
6055 && TREE_CODE (exp) != BIT_FIELD_REF
6056 && TREE_CODE (exp) != INDIRECT_REF
6057 && TREE_CODE (exp) != CALL_EXPR
6058 && TREE_CODE (exp) != VAR_DECL
6059 && TREE_CODE (exp) != RTL_EXPR)
6061 enum machine_mode mode = GET_MODE (target);
6063 if (GET_MODE_CLASS (mode) == MODE_INT
6064 && mode > MAX_INTEGER_COMPUTATION_MODE)
6065 internal_error ("unsupported wide integer operation");
6068 if (tmode != mode
6069 && TREE_CODE (exp) != INTEGER_CST
6070 && TREE_CODE (exp) != PARM_DECL
6071 && TREE_CODE (exp) != ARRAY_REF
6072 && TREE_CODE (exp) != ARRAY_RANGE_REF
6073 && TREE_CODE (exp) != COMPONENT_REF
6074 && TREE_CODE (exp) != BIT_FIELD_REF
6075 && TREE_CODE (exp) != INDIRECT_REF
6076 && TREE_CODE (exp) != VAR_DECL
6077 && TREE_CODE (exp) != CALL_EXPR
6078 && TREE_CODE (exp) != RTL_EXPR
6079 && GET_MODE_CLASS (tmode) == MODE_INT
6080 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6081 internal_error ("unsupported wide integer operation");
6083 check_max_integer_computation_mode (exp);
6084 #endif
6086 /* If will do cse, generate all results into pseudo registers
6087 since 1) that allows cse to find more things
6088 and 2) otherwise cse could produce an insn the machine
6089 cannot support. And exception is a CONSTRUCTOR into a multi-word
6090 MEM: that's much more likely to be most efficient into the MEM. */
6092 if (! cse_not_expected && mode != BLKmode && target
6093 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6094 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD))
6095 target = subtarget;
6097 switch (code)
6099 case LABEL_DECL:
6101 tree function = decl_function_context (exp);
6102 /* Handle using a label in a containing function. */
6103 if (function != current_function_decl
6104 && function != inline_function_decl && function != 0)
6106 struct function *p = find_function_data (function);
6107 p->expr->x_forced_labels
6108 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6109 p->expr->x_forced_labels);
6111 else
6113 if (modifier == EXPAND_INITIALIZER)
6114 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6115 label_rtx (exp),
6116 forced_labels);
6119 temp = gen_rtx_MEM (FUNCTION_MODE,
6120 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6121 if (function != current_function_decl
6122 && function != inline_function_decl && function != 0)
6123 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6124 return temp;
6127 case PARM_DECL:
6128 if (DECL_RTL (exp) == 0)
6130 error_with_decl (exp, "prior parameter's size depends on `%s'");
6131 return CONST0_RTX (mode);
6134 /* ... fall through ... */
6136 case VAR_DECL:
6137 /* If a static var's type was incomplete when the decl was written,
6138 but the type is complete now, lay out the decl now. */
6139 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6140 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6142 rtx value = DECL_RTL_IF_SET (exp);
6144 layout_decl (exp, 0);
6146 /* If the RTL was already set, update its mode and memory
6147 attributes. */
6148 if (value != 0)
6150 PUT_MODE (value, DECL_MODE (exp));
6151 SET_DECL_RTL (exp, 0);
6152 set_mem_attributes (value, exp, 1);
6153 SET_DECL_RTL (exp, value);
6157 /* ... fall through ... */
6159 case FUNCTION_DECL:
6160 case RESULT_DECL:
6161 if (DECL_RTL (exp) == 0)
6162 abort ();
6164 /* Ensure variable marked as used even if it doesn't go through
6165 a parser. If it hasn't be used yet, write out an external
6166 definition. */
6167 if (! TREE_USED (exp))
6169 assemble_external (exp);
6170 TREE_USED (exp) = 1;
6173 /* Show we haven't gotten RTL for this yet. */
6174 temp = 0;
6176 /* Handle variables inherited from containing functions. */
6177 context = decl_function_context (exp);
6179 /* We treat inline_function_decl as an alias for the current function
6180 because that is the inline function whose vars, types, etc.
6181 are being merged into the current function.
6182 See expand_inline_function. */
6184 if (context != 0 && context != current_function_decl
6185 && context != inline_function_decl
6186 /* If var is static, we don't need a static chain to access it. */
6187 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6188 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6190 rtx addr;
6192 /* Mark as non-local and addressable. */
6193 DECL_NONLOCAL (exp) = 1;
6194 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6195 abort ();
6196 mark_addressable (exp);
6197 if (GET_CODE (DECL_RTL (exp)) != MEM)
6198 abort ();
6199 addr = XEXP (DECL_RTL (exp), 0);
6200 if (GET_CODE (addr) == MEM)
6201 addr
6202 = replace_equiv_address (addr,
6203 fix_lexical_addr (XEXP (addr, 0), exp));
6204 else
6205 addr = fix_lexical_addr (addr, exp);
6207 temp = replace_equiv_address (DECL_RTL (exp), addr);
6210 /* This is the case of an array whose size is to be determined
6211 from its initializer, while the initializer is still being parsed.
6212 See expand_decl. */
6214 else if (GET_CODE (DECL_RTL (exp)) == MEM
6215 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6216 temp = validize_mem (DECL_RTL (exp));
6218 /* If DECL_RTL is memory, we are in the normal case and either
6219 the address is not valid or it is not a register and -fforce-addr
6220 is specified, get the address into a register. */
6222 else if (GET_CODE (DECL_RTL (exp)) == MEM
6223 && modifier != EXPAND_CONST_ADDRESS
6224 && modifier != EXPAND_SUM
6225 && modifier != EXPAND_INITIALIZER
6226 && (! memory_address_p (DECL_MODE (exp),
6227 XEXP (DECL_RTL (exp), 0))
6228 || (flag_force_addr
6229 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6230 temp = replace_equiv_address (DECL_RTL (exp),
6231 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6233 /* If we got something, return it. But first, set the alignment
6234 if the address is a register. */
6235 if (temp != 0)
6237 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6238 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6240 return temp;
6243 /* If the mode of DECL_RTL does not match that of the decl, it
6244 must be a promoted value. We return a SUBREG of the wanted mode,
6245 but mark it so that we know that it was already extended. */
6247 if (GET_CODE (DECL_RTL (exp)) == REG
6248 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6250 /* Get the signedness used for this variable. Ensure we get the
6251 same mode we got when the variable was declared. */
6252 if (GET_MODE (DECL_RTL (exp))
6253 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
6254 abort ();
6256 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6257 SUBREG_PROMOTED_VAR_P (temp) = 1;
6258 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6259 return temp;
6262 return DECL_RTL (exp);
6264 case INTEGER_CST:
6265 return immed_double_const (TREE_INT_CST_LOW (exp),
6266 TREE_INT_CST_HIGH (exp), mode);
6268 case CONST_DECL:
6269 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
6271 case REAL_CST:
6272 /* If optimized, generate immediate CONST_DOUBLE
6273 which will be turned into memory by reload if necessary.
6275 We used to force a register so that loop.c could see it. But
6276 this does not allow gen_* patterns to perform optimizations with
6277 the constants. It also produces two insns in cases like "x = 1.0;".
6278 On most machines, floating-point constants are not permitted in
6279 many insns, so we'd end up copying it to a register in any case.
6281 Now, we do the copying in expand_binop, if appropriate. */
6282 return immed_real_const (exp);
6284 case COMPLEX_CST:
6285 case STRING_CST:
6286 if (! TREE_CST_RTL (exp))
6287 output_constant_def (exp, 1);
6289 /* TREE_CST_RTL probably contains a constant address.
6290 On RISC machines where a constant address isn't valid,
6291 make some insns to get that address into a register. */
6292 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6293 && modifier != EXPAND_CONST_ADDRESS
6294 && modifier != EXPAND_INITIALIZER
6295 && modifier != EXPAND_SUM
6296 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6297 || (flag_force_addr
6298 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6299 return replace_equiv_address (TREE_CST_RTL (exp),
6300 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6301 return TREE_CST_RTL (exp);
6303 case EXPR_WITH_FILE_LOCATION:
6305 rtx to_return;
6306 const char *saved_input_filename = input_filename;
6307 int saved_lineno = lineno;
6308 input_filename = EXPR_WFL_FILENAME (exp);
6309 lineno = EXPR_WFL_LINENO (exp);
6310 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6311 emit_line_note (input_filename, lineno);
6312 /* Possibly avoid switching back and forth here. */
6313 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6314 input_filename = saved_input_filename;
6315 lineno = saved_lineno;
6316 return to_return;
6319 case SAVE_EXPR:
6320 context = decl_function_context (exp);
6322 /* If this SAVE_EXPR was at global context, assume we are an
6323 initialization function and move it into our context. */
6324 if (context == 0)
6325 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6327 /* We treat inline_function_decl as an alias for the current function
6328 because that is the inline function whose vars, types, etc.
6329 are being merged into the current function.
6330 See expand_inline_function. */
6331 if (context == current_function_decl || context == inline_function_decl)
6332 context = 0;
6334 /* If this is non-local, handle it. */
6335 if (context)
6337 /* The following call just exists to abort if the context is
6338 not of a containing function. */
6339 find_function_data (context);
6341 temp = SAVE_EXPR_RTL (exp);
6342 if (temp && GET_CODE (temp) == REG)
6344 put_var_into_stack (exp);
6345 temp = SAVE_EXPR_RTL (exp);
6347 if (temp == 0 || GET_CODE (temp) != MEM)
6348 abort ();
6349 return
6350 replace_equiv_address (temp,
6351 fix_lexical_addr (XEXP (temp, 0), exp));
6353 if (SAVE_EXPR_RTL (exp) == 0)
6355 if (mode == VOIDmode)
6356 temp = const0_rtx;
6357 else
6358 temp = assign_temp (build_qualified_type (type,
6359 (TYPE_QUALS (type)
6360 | TYPE_QUAL_CONST)),
6361 3, 0, 0);
6363 SAVE_EXPR_RTL (exp) = temp;
6364 if (!optimize && GET_CODE (temp) == REG)
6365 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6366 save_expr_regs);
6368 /* If the mode of TEMP does not match that of the expression, it
6369 must be a promoted value. We pass store_expr a SUBREG of the
6370 wanted mode but mark it so that we know that it was already
6371 extended. Note that `unsignedp' was modified above in
6372 this case. */
6374 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6376 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6377 SUBREG_PROMOTED_VAR_P (temp) = 1;
6378 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6381 if (temp == const0_rtx)
6382 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6383 else
6384 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6386 TREE_USED (exp) = 1;
6389 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6390 must be a promoted value. We return a SUBREG of the wanted mode,
6391 but mark it so that we know that it was already extended. */
6393 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6394 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6396 /* Compute the signedness and make the proper SUBREG. */
6397 promote_mode (type, mode, &unsignedp, 0);
6398 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6399 SUBREG_PROMOTED_VAR_P (temp) = 1;
6400 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6401 return temp;
6404 return SAVE_EXPR_RTL (exp);
6406 case UNSAVE_EXPR:
6408 rtx temp;
6409 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6410 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6411 return temp;
6414 case PLACEHOLDER_EXPR:
6416 tree old_list = placeholder_list;
6417 tree placeholder_expr = 0;
6419 exp = find_placeholder (exp, &placeholder_expr);
6420 if (exp == 0)
6421 abort ();
6423 placeholder_list = TREE_CHAIN (placeholder_expr);
6424 temp = expand_expr (exp, original_target, tmode, modifier);
6425 placeholder_list = old_list;
6426 return temp;
6429 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6430 abort ();
6432 case WITH_RECORD_EXPR:
6433 /* Put the object on the placeholder list, expand our first operand,
6434 and pop the list. */
6435 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6436 placeholder_list);
6437 target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
6438 modifier);
6439 placeholder_list = TREE_CHAIN (placeholder_list);
6440 return target;
6442 case GOTO_EXPR:
6443 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6444 expand_goto (TREE_OPERAND (exp, 0));
6445 else
6446 expand_computed_goto (TREE_OPERAND (exp, 0));
6447 return const0_rtx;
6449 case EXIT_EXPR:
6450 expand_exit_loop_if_false (NULL,
6451 invert_truthvalue (TREE_OPERAND (exp, 0)));
6452 return const0_rtx;
6454 case LABELED_BLOCK_EXPR:
6455 if (LABELED_BLOCK_BODY (exp))
6456 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6457 /* Should perhaps use expand_label, but this is simpler and safer. */
6458 do_pending_stack_adjust ();
6459 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6460 return const0_rtx;
6462 case EXIT_BLOCK_EXPR:
6463 if (EXIT_BLOCK_RETURN (exp))
6464 sorry ("returned value in block_exit_expr");
6465 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6466 return const0_rtx;
6468 case LOOP_EXPR:
6469 push_temp_slots ();
6470 expand_start_loop (1);
6471 expand_expr_stmt (TREE_OPERAND (exp, 0));
6472 expand_end_loop ();
6473 pop_temp_slots ();
6475 return const0_rtx;
6477 case BIND_EXPR:
6479 tree vars = TREE_OPERAND (exp, 0);
6480 int vars_need_expansion = 0;
6482 /* Need to open a binding contour here because
6483 if there are any cleanups they must be contained here. */
6484 expand_start_bindings (2);
6486 /* Mark the corresponding BLOCK for output in its proper place. */
6487 if (TREE_OPERAND (exp, 2) != 0
6488 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6489 insert_block (TREE_OPERAND (exp, 2));
6491 /* If VARS have not yet been expanded, expand them now. */
6492 while (vars)
6494 if (!DECL_RTL_SET_P (vars))
6496 vars_need_expansion = 1;
6497 expand_decl (vars);
6499 expand_decl_init (vars);
6500 vars = TREE_CHAIN (vars);
6503 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
6505 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6507 return temp;
6510 case RTL_EXPR:
6511 if (RTL_EXPR_SEQUENCE (exp))
6513 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6514 abort ();
6515 emit_insns (RTL_EXPR_SEQUENCE (exp));
6516 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6518 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6519 free_temps_for_rtl_expr (exp);
6520 return RTL_EXPR_RTL (exp);
6522 case CONSTRUCTOR:
6523 /* If we don't need the result, just ensure we evaluate any
6524 subexpressions. */
6525 if (ignore)
6527 tree elt;
6529 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6530 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6532 return const0_rtx;
6535 /* All elts simple constants => refer to a constant in memory. But
6536 if this is a non-BLKmode mode, let it store a field at a time
6537 since that should make a CONST_INT or CONST_DOUBLE when we
6538 fold. Likewise, if we have a target we can use, it is best to
6539 store directly into the target unless the type is large enough
6540 that memcpy will be used. If we are making an initializer and
6541 all operands are constant, put it in memory as well. */
6542 else if ((TREE_STATIC (exp)
6543 && ((mode == BLKmode
6544 && ! (target != 0 && safe_from_p (target, exp, 1)))
6545 || TREE_ADDRESSABLE (exp)
6546 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6547 && (! MOVE_BY_PIECES_P
6548 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6549 TYPE_ALIGN (type)))
6550 && ! mostly_zeros_p (exp))))
6551 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6553 rtx constructor = output_constant_def (exp, 1);
6555 if (modifier != EXPAND_CONST_ADDRESS
6556 && modifier != EXPAND_INITIALIZER
6557 && modifier != EXPAND_SUM)
6558 constructor = validize_mem (constructor);
6560 return constructor;
6562 else
6564 /* Handle calls that pass values in multiple non-contiguous
6565 locations. The Irix 6 ABI has examples of this. */
6566 if (target == 0 || ! safe_from_p (target, exp, 1)
6567 || GET_CODE (target) == PARALLEL)
6568 target
6569 = assign_temp (build_qualified_type (type,
6570 (TYPE_QUALS (type)
6571 | (TREE_READONLY (exp)
6572 * TYPE_QUAL_CONST))),
6573 0, TREE_ADDRESSABLE (exp), 1);
6575 store_constructor (exp, target, 0,
6576 int_size_in_bytes (TREE_TYPE (exp)));
6577 return target;
6580 case INDIRECT_REF:
6582 tree exp1 = TREE_OPERAND (exp, 0);
6583 tree index;
6584 tree string = string_constant (exp1, &index);
6586 /* Try to optimize reads from const strings. */
6587 if (string
6588 && TREE_CODE (string) == STRING_CST
6589 && TREE_CODE (index) == INTEGER_CST
6590 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6591 && GET_MODE_CLASS (mode) == MODE_INT
6592 && GET_MODE_SIZE (mode) == 1
6593 && modifier != EXPAND_WRITE)
6594 return
6595 GEN_INT (TREE_STRING_POINTER (string)[TREE_INT_CST_LOW (index)]);
6597 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6598 op0 = memory_address (mode, op0);
6599 temp = gen_rtx_MEM (mode, op0);
6600 set_mem_attributes (temp, exp, 0);
6602 /* If we are writing to this object and its type is a record with
6603 readonly fields, we must mark it as readonly so it will
6604 conflict with readonly references to those fields. */
6605 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
6606 RTX_UNCHANGING_P (temp) = 1;
6608 return temp;
6611 case ARRAY_REF:
6612 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6613 abort ();
6616 tree array = TREE_OPERAND (exp, 0);
6617 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6618 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6619 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6620 HOST_WIDE_INT i;
6622 /* Optimize the special-case of a zero lower bound.
6624 We convert the low_bound to sizetype to avoid some problems
6625 with constant folding. (E.g. suppose the lower bound is 1,
6626 and its mode is QI. Without the conversion, (ARRAY
6627 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6628 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6630 if (! integer_zerop (low_bound))
6631 index = size_diffop (index, convert (sizetype, low_bound));
6633 /* Fold an expression like: "foo"[2].
6634 This is not done in fold so it won't happen inside &.
6635 Don't fold if this is for wide characters since it's too
6636 difficult to do correctly and this is a very rare case. */
6638 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6639 && TREE_CODE (array) == STRING_CST
6640 && TREE_CODE (index) == INTEGER_CST
6641 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6642 && GET_MODE_CLASS (mode) == MODE_INT
6643 && GET_MODE_SIZE (mode) == 1)
6644 return
6645 GEN_INT (TREE_STRING_POINTER (array)[TREE_INT_CST_LOW (index)]);
6647 /* If this is a constant index into a constant array,
6648 just get the value from the array. Handle both the cases when
6649 we have an explicit constructor and when our operand is a variable
6650 that was declared const. */
6652 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6653 && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6654 && TREE_CODE (index) == INTEGER_CST
6655 && 0 > compare_tree_int (index,
6656 list_length (CONSTRUCTOR_ELTS
6657 (TREE_OPERAND (exp, 0)))))
6659 tree elem;
6661 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6662 i = TREE_INT_CST_LOW (index);
6663 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6666 if (elem)
6667 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6668 modifier);
6671 else if (optimize >= 1
6672 && modifier != EXPAND_CONST_ADDRESS
6673 && modifier != EXPAND_INITIALIZER
6674 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6675 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6676 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6678 if (TREE_CODE (index) == INTEGER_CST)
6680 tree init = DECL_INITIAL (array);
6682 if (TREE_CODE (init) == CONSTRUCTOR)
6684 tree elem;
6686 for (elem = CONSTRUCTOR_ELTS (init);
6687 (elem
6688 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6689 elem = TREE_CHAIN (elem))
6692 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6693 return expand_expr (fold (TREE_VALUE (elem)), target,
6694 tmode, modifier);
6696 else if (TREE_CODE (init) == STRING_CST
6697 && 0 > compare_tree_int (index,
6698 TREE_STRING_LENGTH (init)))
6700 tree type = TREE_TYPE (TREE_TYPE (init));
6701 enum machine_mode mode = TYPE_MODE (type);
6703 if (GET_MODE_CLASS (mode) == MODE_INT
6704 && GET_MODE_SIZE (mode) == 1)
6705 return (GEN_INT
6706 (TREE_STRING_POINTER
6707 (init)[TREE_INT_CST_LOW (index)]));
6712 /* Fall through. */
6714 case COMPONENT_REF:
6715 case BIT_FIELD_REF:
6716 case ARRAY_RANGE_REF:
6717 /* If the operand is a CONSTRUCTOR, we can just extract the
6718 appropriate field if it is present. Don't do this if we have
6719 already written the data since we want to refer to that copy
6720 and varasm.c assumes that's what we'll do. */
6721 if (code == COMPONENT_REF
6722 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6723 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6725 tree elt;
6727 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6728 elt = TREE_CHAIN (elt))
6729 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6730 /* We can normally use the value of the field in the
6731 CONSTRUCTOR. However, if this is a bitfield in
6732 an integral mode that we can fit in a HOST_WIDE_INT,
6733 we must mask only the number of bits in the bitfield,
6734 since this is done implicitly by the constructor. If
6735 the bitfield does not meet either of those conditions,
6736 we can't do this optimization. */
6737 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6738 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6739 == MODE_INT)
6740 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6741 <= HOST_BITS_PER_WIDE_INT))))
6743 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6744 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6746 HOST_WIDE_INT bitsize
6747 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6749 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6751 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6752 op0 = expand_and (op0, op1, target);
6754 else
6756 enum machine_mode imode
6757 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6758 tree count
6759 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6762 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6763 target, 0);
6764 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6765 target, 0);
6769 return op0;
6774 enum machine_mode mode1;
6775 HOST_WIDE_INT bitsize, bitpos;
6776 tree offset;
6777 int volatilep = 0;
6778 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6779 &mode1, &unsignedp, &volatilep);
6780 rtx orig_op0;
6782 /* If we got back the original object, something is wrong. Perhaps
6783 we are evaluating an expression too early. In any event, don't
6784 infinitely recurse. */
6785 if (tem == exp)
6786 abort ();
6788 /* If TEM's type is a union of variable size, pass TARGET to the inner
6789 computation, since it will need a temporary and TARGET is known
6790 to have to do. This occurs in unchecked conversion in Ada. */
6792 orig_op0 = op0
6793 = expand_expr (tem,
6794 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6795 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6796 != INTEGER_CST)
6797 ? target : NULL_RTX),
6798 VOIDmode,
6799 (modifier == EXPAND_INITIALIZER
6800 || modifier == EXPAND_CONST_ADDRESS)
6801 ? modifier : EXPAND_NORMAL);
6803 /* If this is a constant, put it into a register if it is a
6804 legitimate constant and OFFSET is 0 and memory if it isn't. */
6805 if (CONSTANT_P (op0))
6807 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6808 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6809 && offset == 0)
6810 op0 = force_reg (mode, op0);
6811 else
6812 op0 = validize_mem (force_const_mem (mode, op0));
6815 if (offset != 0)
6817 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
6819 /* If this object is in a register, put it into memory.
6820 This case can't occur in C, but can in Ada if we have
6821 unchecked conversion of an expression from a scalar type to
6822 an array or record type. */
6823 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6824 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
6826 /* If the operand is a SAVE_EXPR, we can deal with this by
6827 forcing the SAVE_EXPR into memory. */
6828 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
6830 put_var_into_stack (TREE_OPERAND (exp, 0));
6831 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
6833 else
6835 tree nt
6836 = build_qualified_type (TREE_TYPE (tem),
6837 (TYPE_QUALS (TREE_TYPE (tem))
6838 | TYPE_QUAL_CONST));
6839 rtx memloc = assign_temp (nt, 1, 1, 1);
6841 emit_move_insn (memloc, op0);
6842 op0 = memloc;
6846 if (GET_CODE (op0) != MEM)
6847 abort ();
6849 if (GET_MODE (offset_rtx) != ptr_mode)
6850 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6852 #ifdef POINTERS_EXTEND_UNSIGNED
6853 if (GET_MODE (offset_rtx) != Pmode)
6854 offset_rtx = convert_memory_address (Pmode, offset_rtx);
6855 #endif
6857 /* A constant address in OP0 can have VOIDmode, we must not try
6858 to call force_reg for that case. Avoid that case. */
6859 if (GET_CODE (op0) == MEM
6860 && GET_MODE (op0) == BLKmode
6861 && GET_MODE (XEXP (op0, 0)) != VOIDmode
6862 && bitsize != 0
6863 && (bitpos % bitsize) == 0
6864 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6865 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
6867 rtx temp = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
6869 if (GET_CODE (XEXP (temp, 0)) == REG)
6870 op0 = temp;
6871 else
6872 op0 = (replace_equiv_address
6873 (op0,
6874 force_reg (GET_MODE (XEXP (temp, 0)),
6875 XEXP (temp, 0))));
6876 bitpos = 0;
6879 op0 = offset_address (op0, offset_rtx,
6880 highest_pow2_factor (offset));
6883 /* Don't forget about volatility even if this is a bitfield. */
6884 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6886 if (op0 == orig_op0)
6887 op0 = copy_rtx (op0);
6889 MEM_VOLATILE_P (op0) = 1;
6892 /* In cases where an aligned union has an unaligned object
6893 as a field, we might be extracting a BLKmode value from
6894 an integer-mode (e.g., SImode) object. Handle this case
6895 by doing the extract into an object as wide as the field
6896 (which we know to be the width of a basic mode), then
6897 storing into memory, and changing the mode to BLKmode. */
6898 if (mode1 == VOIDmode
6899 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6900 || (mode1 != BLKmode && ! direct_load[(int) mode1]
6901 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6902 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
6903 && modifier != EXPAND_CONST_ADDRESS
6904 && modifier != EXPAND_INITIALIZER)
6905 /* If the field isn't aligned enough to fetch as a memref,
6906 fetch it as a bit field. */
6907 || (mode1 != BLKmode
6908 && SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))
6909 && ((TYPE_ALIGN (TREE_TYPE (tem))
6910 < GET_MODE_ALIGNMENT (mode))
6911 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
6912 /* If the type and the field are a constant size and the
6913 size of the type isn't the same size as the bitfield,
6914 we must use bitfield operations. */
6915 || (bitsize >= 0
6916 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
6917 == INTEGER_CST)
6918 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
6919 bitsize)))
6921 enum machine_mode ext_mode = mode;
6923 if (ext_mode == BLKmode
6924 && ! (target != 0 && GET_CODE (op0) == MEM
6925 && GET_CODE (target) == MEM
6926 && bitpos % BITS_PER_UNIT == 0))
6927 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6929 if (ext_mode == BLKmode)
6931 /* In this case, BITPOS must start at a byte boundary and
6932 TARGET, if specified, must be a MEM. */
6933 if (GET_CODE (op0) != MEM
6934 || (target != 0 && GET_CODE (target) != MEM)
6935 || bitpos % BITS_PER_UNIT != 0)
6936 abort ();
6938 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
6939 if (target == 0)
6940 target = assign_temp (type, 0, 1, 1);
6942 emit_block_move (target, op0,
6943 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6944 / BITS_PER_UNIT));
6946 return target;
6949 op0 = validize_mem (op0);
6951 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
6952 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
6954 op0 = extract_bit_field (op0, bitsize, bitpos,
6955 unsignedp, target, ext_mode, ext_mode,
6956 int_size_in_bytes (TREE_TYPE (tem)));
6958 /* If the result is a record type and BITSIZE is narrower than
6959 the mode of OP0, an integral mode, and this is a big endian
6960 machine, we must put the field into the high-order bits. */
6961 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
6962 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
6963 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
6964 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
6965 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
6966 - bitsize),
6967 op0, 1);
6969 if (mode == BLKmode)
6971 rtx new = assign_temp (build_qualified_type
6972 (type_for_mode (ext_mode, 0),
6973 TYPE_QUAL_CONST), 0, 1, 1);
6975 emit_move_insn (new, op0);
6976 op0 = copy_rtx (new);
6977 PUT_MODE (op0, BLKmode);
6978 set_mem_attributes (op0, exp, 1);
6981 return op0;
6984 /* If the result is BLKmode, use that to access the object
6985 now as well. */
6986 if (mode == BLKmode)
6987 mode1 = BLKmode;
6989 /* Get a reference to just this component. */
6990 if (modifier == EXPAND_CONST_ADDRESS
6991 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6992 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
6993 else
6994 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
6996 if (op0 == orig_op0)
6997 op0 = copy_rtx (op0);
6999 set_mem_attributes (op0, exp, 0);
7000 if (GET_CODE (XEXP (op0, 0)) == REG)
7001 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7003 MEM_VOLATILE_P (op0) |= volatilep;
7004 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7005 || modifier == EXPAND_CONST_ADDRESS
7006 || modifier == EXPAND_INITIALIZER)
7007 return op0;
7008 else if (target == 0)
7009 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7011 convert_move (target, op0, unsignedp);
7012 return target;
7015 case VTABLE_REF:
7017 rtx insn, before = get_last_insn (), vtbl_ref;
7019 /* Evaluate the interior expression. */
7020 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7021 tmode, modifier);
7023 /* Get or create an instruction off which to hang a note. */
7024 if (REG_P (subtarget))
7026 target = subtarget;
7027 insn = get_last_insn ();
7028 if (insn == before)
7029 abort ();
7030 if (! INSN_P (insn))
7031 insn = prev_nonnote_insn (insn);
7033 else
7035 target = gen_reg_rtx (GET_MODE (subtarget));
7036 insn = emit_move_insn (target, subtarget);
7039 /* Collect the data for the note. */
7040 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7041 vtbl_ref = plus_constant (vtbl_ref,
7042 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7043 /* Discard the initial CONST that was added. */
7044 vtbl_ref = XEXP (vtbl_ref, 0);
7046 REG_NOTES (insn)
7047 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7049 return target;
7052 /* Intended for a reference to a buffer of a file-object in Pascal.
7053 But it's not certain that a special tree code will really be
7054 necessary for these. INDIRECT_REF might work for them. */
7055 case BUFFER_REF:
7056 abort ();
7058 case IN_EXPR:
7060 /* Pascal set IN expression.
7062 Algorithm:
7063 rlo = set_low - (set_low%bits_per_word);
7064 the_word = set [ (index - rlo)/bits_per_word ];
7065 bit_index = index % bits_per_word;
7066 bitmask = 1 << bit_index;
7067 return !!(the_word & bitmask); */
7069 tree set = TREE_OPERAND (exp, 0);
7070 tree index = TREE_OPERAND (exp, 1);
7071 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7072 tree set_type = TREE_TYPE (set);
7073 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7074 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7075 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7076 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7077 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7078 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7079 rtx setaddr = XEXP (setval, 0);
7080 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7081 rtx rlow;
7082 rtx diff, quo, rem, addr, bit, result;
7084 /* If domain is empty, answer is no. Likewise if index is constant
7085 and out of bounds. */
7086 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7087 && TREE_CODE (set_low_bound) == INTEGER_CST
7088 && tree_int_cst_lt (set_high_bound, set_low_bound))
7089 || (TREE_CODE (index) == INTEGER_CST
7090 && TREE_CODE (set_low_bound) == INTEGER_CST
7091 && tree_int_cst_lt (index, set_low_bound))
7092 || (TREE_CODE (set_high_bound) == INTEGER_CST
7093 && TREE_CODE (index) == INTEGER_CST
7094 && tree_int_cst_lt (set_high_bound, index))))
7095 return const0_rtx;
7097 if (target == 0)
7098 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7100 /* If we get here, we have to generate the code for both cases
7101 (in range and out of range). */
7103 op0 = gen_label_rtx ();
7104 op1 = gen_label_rtx ();
7106 if (! (GET_CODE (index_val) == CONST_INT
7107 && GET_CODE (lo_r) == CONST_INT))
7108 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7109 GET_MODE (index_val), iunsignedp, op1);
7111 if (! (GET_CODE (index_val) == CONST_INT
7112 && GET_CODE (hi_r) == CONST_INT))
7113 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7114 GET_MODE (index_val), iunsignedp, op1);
7116 /* Calculate the element number of bit zero in the first word
7117 of the set. */
7118 if (GET_CODE (lo_r) == CONST_INT)
7119 rlow = GEN_INT (INTVAL (lo_r)
7120 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7121 else
7122 rlow = expand_binop (index_mode, and_optab, lo_r,
7123 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7124 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7126 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7127 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7129 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7130 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7131 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7132 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7134 addr = memory_address (byte_mode,
7135 expand_binop (index_mode, add_optab, diff,
7136 setaddr, NULL_RTX, iunsignedp,
7137 OPTAB_LIB_WIDEN));
7139 /* Extract the bit we want to examine. */
7140 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7141 gen_rtx_MEM (byte_mode, addr),
7142 make_tree (TREE_TYPE (index), rem),
7143 NULL_RTX, 1);
7144 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7145 GET_MODE (target) == byte_mode ? target : 0,
7146 1, OPTAB_LIB_WIDEN);
7148 if (result != target)
7149 convert_move (target, result, 1);
7151 /* Output the code to handle the out-of-range case. */
7152 emit_jump (op0);
7153 emit_label (op1);
7154 emit_move_insn (target, const0_rtx);
7155 emit_label (op0);
7156 return target;
7159 case WITH_CLEANUP_EXPR:
7160 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7162 WITH_CLEANUP_EXPR_RTL (exp)
7163 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7164 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 1));
7166 /* That's it for this cleanup. */
7167 TREE_OPERAND (exp, 1) = 0;
7169 return WITH_CLEANUP_EXPR_RTL (exp);
7171 case CLEANUP_POINT_EXPR:
7173 /* Start a new binding layer that will keep track of all cleanup
7174 actions to be performed. */
7175 expand_start_bindings (2);
7177 target_temp_slot_level = temp_slot_level;
7179 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7180 /* If we're going to use this value, load it up now. */
7181 if (! ignore)
7182 op0 = force_not_mem (op0);
7183 preserve_temp_slots (op0);
7184 expand_end_bindings (NULL_TREE, 0, 0);
7186 return op0;
7188 case CALL_EXPR:
7189 /* Check for a built-in function. */
7190 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7191 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7192 == FUNCTION_DECL)
7193 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7195 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7196 == BUILT_IN_FRONTEND)
7197 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7198 else
7199 return expand_builtin (exp, target, subtarget, tmode, ignore);
7202 return expand_call (exp, target, ignore);
7204 case NON_LVALUE_EXPR:
7205 case NOP_EXPR:
7206 case CONVERT_EXPR:
7207 case REFERENCE_EXPR:
7208 if (TREE_OPERAND (exp, 0) == error_mark_node)
7209 return const0_rtx;
7211 if (TREE_CODE (type) == UNION_TYPE)
7213 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7215 /* If both input and output are BLKmode, this conversion isn't doing
7216 anything except possibly changing memory attribute. */
7217 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7219 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7220 modifier);
7222 result = copy_rtx (result);
7223 set_mem_attributes (result, exp, 0);
7224 return result;
7227 if (target == 0)
7228 target = assign_temp (type, 0, 1, 1);
7230 if (GET_CODE (target) == MEM)
7231 /* Store data into beginning of memory target. */
7232 store_expr (TREE_OPERAND (exp, 0),
7233 adjust_address (target, TYPE_MODE (valtype), 0), 0);
7235 else if (GET_CODE (target) == REG)
7236 /* Store this field into a union of the proper type. */
7237 store_field (target,
7238 MIN ((int_size_in_bytes (TREE_TYPE
7239 (TREE_OPERAND (exp, 0)))
7240 * BITS_PER_UNIT),
7241 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7242 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7243 VOIDmode, 0, type, 0);
7244 else
7245 abort ();
7247 /* Return the entire union. */
7248 return target;
7251 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7253 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7254 modifier);
7256 /* If the signedness of the conversion differs and OP0 is
7257 a promoted SUBREG, clear that indication since we now
7258 have to do the proper extension. */
7259 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7260 && GET_CODE (op0) == SUBREG)
7261 SUBREG_PROMOTED_VAR_P (op0) = 0;
7263 return op0;
7266 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
7267 if (GET_MODE (op0) == mode)
7268 return op0;
7270 /* If OP0 is a constant, just convert it into the proper mode. */
7271 if (CONSTANT_P (op0))
7272 return
7273 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7274 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7276 if (modifier == EXPAND_INITIALIZER)
7277 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7279 if (target == 0)
7280 return
7281 convert_to_mode (mode, op0,
7282 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7283 else
7284 convert_move (target, op0,
7285 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7286 return target;
7288 case VIEW_CONVERT_EXPR:
7289 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7291 /* If the input and output modes are both the same, we are done.
7292 Otherwise, if neither mode is BLKmode and both are within a word, we
7293 can use gen_lowpart. If neither is true, make sure the operand is
7294 in memory and convert the MEM to the new mode. */
7295 if (TYPE_MODE (type) == GET_MODE (op0))
7297 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7298 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7299 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7300 op0 = gen_lowpart (TYPE_MODE (type), op0);
7301 else if (GET_CODE (op0) != MEM)
7303 /* If the operand is not a MEM, force it into memory. Since we
7304 are going to be be changing the mode of the MEM, don't call
7305 force_const_mem for constants because we don't allow pool
7306 constants to change mode. */
7307 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7309 if (TREE_ADDRESSABLE (exp))
7310 abort ();
7312 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7313 target
7314 = assign_stack_temp_for_type
7315 (TYPE_MODE (inner_type),
7316 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7318 emit_move_insn (target, op0);
7319 op0 = target;
7322 /* At this point, OP0 is in the correct mode. If the output type is such
7323 that the operand is known to be aligned, indicate that it is.
7324 Otherwise, we need only be concerned about alignment for non-BLKmode
7325 results. */
7326 if (GET_CODE (op0) == MEM)
7328 op0 = copy_rtx (op0);
7330 if (TYPE_ALIGN_OK (type))
7331 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7332 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7333 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7335 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7336 HOST_WIDE_INT temp_size
7337 = MAX (int_size_in_bytes (inner_type),
7338 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7339 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7340 temp_size, 0, type);
7341 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7343 if (TREE_ADDRESSABLE (exp))
7344 abort ();
7346 if (GET_MODE (op0) == BLKmode)
7347 emit_block_move (new_with_op0_mode, op0,
7348 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))));
7349 else
7350 emit_move_insn (new_with_op0_mode, op0);
7352 op0 = new;
7355 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7358 return op0;
7360 case PLUS_EXPR:
7361 /* We come here from MINUS_EXPR when the second operand is a
7362 constant. */
7363 plus_expr:
7364 this_optab = ! unsignedp && flag_trapv
7365 && (GET_MODE_CLASS (mode) == MODE_INT)
7366 ? addv_optab : add_optab;
7368 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7369 something else, make sure we add the register to the constant and
7370 then to the other thing. This case can occur during strength
7371 reduction and doing it this way will produce better code if the
7372 frame pointer or argument pointer is eliminated.
7374 fold-const.c will ensure that the constant is always in the inner
7375 PLUS_EXPR, so the only case we need to do anything about is if
7376 sp, ap, or fp is our second argument, in which case we must swap
7377 the innermost first argument and our second argument. */
7379 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7380 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7381 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7382 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7383 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7384 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7386 tree t = TREE_OPERAND (exp, 1);
7388 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7389 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7392 /* If the result is to be ptr_mode and we are adding an integer to
7393 something, we might be forming a constant. So try to use
7394 plus_constant. If it produces a sum and we can't accept it,
7395 use force_operand. This allows P = &ARR[const] to generate
7396 efficient code on machines where a SYMBOL_REF is not a valid
7397 address.
7399 If this is an EXPAND_SUM call, always return the sum. */
7400 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7401 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7403 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7404 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7405 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7407 rtx constant_part;
7409 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7410 EXPAND_SUM);
7411 /* Use immed_double_const to ensure that the constant is
7412 truncated according to the mode of OP1, then sign extended
7413 to a HOST_WIDE_INT. Using the constant directly can result
7414 in non-canonical RTL in a 64x32 cross compile. */
7415 constant_part
7416 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7417 (HOST_WIDE_INT) 0,
7418 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7419 op1 = plus_constant (op1, INTVAL (constant_part));
7420 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7421 op1 = force_operand (op1, target);
7422 return op1;
7425 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7426 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7427 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7429 rtx constant_part;
7431 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7432 EXPAND_SUM);
7433 if (! CONSTANT_P (op0))
7435 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7436 VOIDmode, modifier);
7437 /* Don't go to both_summands if modifier
7438 says it's not right to return a PLUS. */
7439 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7440 goto binop2;
7441 goto both_summands;
7443 /* Use immed_double_const to ensure that the constant is
7444 truncated according to the mode of OP1, then sign extended
7445 to a HOST_WIDE_INT. Using the constant directly can result
7446 in non-canonical RTL in a 64x32 cross compile. */
7447 constant_part
7448 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7449 (HOST_WIDE_INT) 0,
7450 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7451 op0 = plus_constant (op0, INTVAL (constant_part));
7452 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7453 op0 = force_operand (op0, target);
7454 return op0;
7458 /* No sense saving up arithmetic to be done
7459 if it's all in the wrong mode to form part of an address.
7460 And force_operand won't know whether to sign-extend or
7461 zero-extend. */
7462 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7463 || mode != ptr_mode)
7464 goto binop;
7466 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7467 subtarget = 0;
7469 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
7470 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
7472 both_summands:
7473 /* Make sure any term that's a sum with a constant comes last. */
7474 if (GET_CODE (op0) == PLUS
7475 && CONSTANT_P (XEXP (op0, 1)))
7477 temp = op0;
7478 op0 = op1;
7479 op1 = temp;
7481 /* If adding to a sum including a constant,
7482 associate it to put the constant outside. */
7483 if (GET_CODE (op1) == PLUS
7484 && CONSTANT_P (XEXP (op1, 1)))
7486 rtx constant_term = const0_rtx;
7488 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7489 if (temp != 0)
7490 op0 = temp;
7491 /* Ensure that MULT comes first if there is one. */
7492 else if (GET_CODE (op0) == MULT)
7493 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7494 else
7495 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7497 /* Let's also eliminate constants from op0 if possible. */
7498 op0 = eliminate_constant_term (op0, &constant_term);
7500 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7501 their sum should be a constant. Form it into OP1, since the
7502 result we want will then be OP0 + OP1. */
7504 temp = simplify_binary_operation (PLUS, mode, constant_term,
7505 XEXP (op1, 1));
7506 if (temp != 0)
7507 op1 = temp;
7508 else
7509 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7512 /* Put a constant term last and put a multiplication first. */
7513 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7514 temp = op1, op1 = op0, op0 = temp;
7516 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7517 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7519 case MINUS_EXPR:
7520 /* For initializers, we are allowed to return a MINUS of two
7521 symbolic constants. Here we handle all cases when both operands
7522 are constant. */
7523 /* Handle difference of two symbolic constants,
7524 for the sake of an initializer. */
7525 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7526 && really_constant_p (TREE_OPERAND (exp, 0))
7527 && really_constant_p (TREE_OPERAND (exp, 1)))
7529 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
7530 modifier);
7531 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
7532 modifier);
7534 /* If the last operand is a CONST_INT, use plus_constant of
7535 the negated constant. Else make the MINUS. */
7536 if (GET_CODE (op1) == CONST_INT)
7537 return plus_constant (op0, - INTVAL (op1));
7538 else
7539 return gen_rtx_MINUS (mode, op0, op1);
7541 /* Convert A - const to A + (-const). */
7542 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7544 tree negated = fold (build1 (NEGATE_EXPR, type,
7545 TREE_OPERAND (exp, 1)));
7547 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7548 /* If we can't negate the constant in TYPE, leave it alone and
7549 expand_binop will negate it for us. We used to try to do it
7550 here in the signed version of TYPE, but that doesn't work
7551 on POINTER_TYPEs. */;
7552 else
7554 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7555 goto plus_expr;
7558 this_optab = ! unsignedp && flag_trapv
7559 && (GET_MODE_CLASS(mode) == MODE_INT)
7560 ? subv_optab : sub_optab;
7561 goto binop;
7563 case MULT_EXPR:
7564 /* If first operand is constant, swap them.
7565 Thus the following special case checks need only
7566 check the second operand. */
7567 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7569 tree t1 = TREE_OPERAND (exp, 0);
7570 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7571 TREE_OPERAND (exp, 1) = t1;
7574 /* Attempt to return something suitable for generating an
7575 indexed address, for machines that support that. */
7577 if (modifier == EXPAND_SUM && mode == ptr_mode
7578 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7579 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7581 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7582 EXPAND_SUM);
7584 /* Apply distributive law if OP0 is x+c. */
7585 if (GET_CODE (op0) == PLUS
7586 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7587 return
7588 gen_rtx_PLUS
7589 (mode,
7590 gen_rtx_MULT
7591 (mode, XEXP (op0, 0),
7592 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7593 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7594 * INTVAL (XEXP (op0, 1))));
7596 if (GET_CODE (op0) != REG)
7597 op0 = force_operand (op0, NULL_RTX);
7598 if (GET_CODE (op0) != REG)
7599 op0 = copy_to_mode_reg (mode, op0);
7601 return
7602 gen_rtx_MULT (mode, op0,
7603 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7606 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7607 subtarget = 0;
7609 /* Check for multiplying things that have been extended
7610 from a narrower type. If this machine supports multiplying
7611 in that narrower type with a result in the desired type,
7612 do it that way, and avoid the explicit type-conversion. */
7613 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7614 && TREE_CODE (type) == INTEGER_TYPE
7615 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7616 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7617 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7618 && int_fits_type_p (TREE_OPERAND (exp, 1),
7619 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7620 /* Don't use a widening multiply if a shift will do. */
7621 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7622 > HOST_BITS_PER_WIDE_INT)
7623 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7625 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7626 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7628 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7629 /* If both operands are extended, they must either both
7630 be zero-extended or both be sign-extended. */
7631 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7633 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7635 enum machine_mode innermode
7636 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7637 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7638 ? smul_widen_optab : umul_widen_optab);
7639 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7640 ? umul_widen_optab : smul_widen_optab);
7641 if (mode == GET_MODE_WIDER_MODE (innermode))
7643 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7645 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7646 NULL_RTX, VOIDmode, 0);
7647 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7648 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7649 VOIDmode, 0);
7650 else
7651 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7652 NULL_RTX, VOIDmode, 0);
7653 goto binop2;
7655 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7656 && innermode == word_mode)
7658 rtx htem;
7659 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7660 NULL_RTX, VOIDmode, 0);
7661 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7662 op1 = convert_modes (innermode, mode,
7663 expand_expr (TREE_OPERAND (exp, 1),
7664 NULL_RTX, VOIDmode, 0),
7665 unsignedp);
7666 else
7667 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7668 NULL_RTX, VOIDmode, 0);
7669 temp = expand_binop (mode, other_optab, op0, op1, target,
7670 unsignedp, OPTAB_LIB_WIDEN);
7671 htem = expand_mult_highpart_adjust (innermode,
7672 gen_highpart (innermode, temp),
7673 op0, op1,
7674 gen_highpart (innermode, temp),
7675 unsignedp);
7676 emit_move_insn (gen_highpart (innermode, temp), htem);
7677 return temp;
7681 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7682 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7683 return expand_mult (mode, op0, op1, target, unsignedp);
7685 case TRUNC_DIV_EXPR:
7686 case FLOOR_DIV_EXPR:
7687 case CEIL_DIV_EXPR:
7688 case ROUND_DIV_EXPR:
7689 case EXACT_DIV_EXPR:
7690 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7691 subtarget = 0;
7692 /* Possible optimization: compute the dividend with EXPAND_SUM
7693 then if the divisor is constant can optimize the case
7694 where some terms of the dividend have coeffs divisible by it. */
7695 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7696 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7697 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7699 case RDIV_EXPR:
7700 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7701 expensive divide. If not, combine will rebuild the original
7702 computation. */
7703 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7704 && !real_onep (TREE_OPERAND (exp, 0)))
7705 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7706 build (RDIV_EXPR, type,
7707 build_real (type, dconst1),
7708 TREE_OPERAND (exp, 1))),
7709 target, tmode, unsignedp);
7710 this_optab = sdiv_optab;
7711 goto binop;
7713 case TRUNC_MOD_EXPR:
7714 case FLOOR_MOD_EXPR:
7715 case CEIL_MOD_EXPR:
7716 case ROUND_MOD_EXPR:
7717 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7718 subtarget = 0;
7719 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7720 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7721 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7723 case FIX_ROUND_EXPR:
7724 case FIX_FLOOR_EXPR:
7725 case FIX_CEIL_EXPR:
7726 abort (); /* Not used for C. */
7728 case FIX_TRUNC_EXPR:
7729 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7730 if (target == 0)
7731 target = gen_reg_rtx (mode);
7732 expand_fix (target, op0, unsignedp);
7733 return target;
7735 case FLOAT_EXPR:
7736 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7737 if (target == 0)
7738 target = gen_reg_rtx (mode);
7739 /* expand_float can't figure out what to do if FROM has VOIDmode.
7740 So give it the correct mode. With -O, cse will optimize this. */
7741 if (GET_MODE (op0) == VOIDmode)
7742 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7743 op0);
7744 expand_float (target, op0,
7745 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7746 return target;
7748 case NEGATE_EXPR:
7749 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7750 temp = expand_unop (mode,
7751 ! unsignedp && flag_trapv
7752 && (GET_MODE_CLASS(mode) == MODE_INT)
7753 ? negv_optab : neg_optab, op0, target, 0);
7754 if (temp == 0)
7755 abort ();
7756 return temp;
7758 case ABS_EXPR:
7759 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7761 /* Handle complex values specially. */
7762 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7763 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7764 return expand_complex_abs (mode, op0, target, unsignedp);
7766 /* Unsigned abs is simply the operand. Testing here means we don't
7767 risk generating incorrect code below. */
7768 if (TREE_UNSIGNED (type))
7769 return op0;
7771 return expand_abs (mode, op0, target, unsignedp,
7772 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7774 case MAX_EXPR:
7775 case MIN_EXPR:
7776 target = original_target;
7777 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7778 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7779 || GET_MODE (target) != mode
7780 || (GET_CODE (target) == REG
7781 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7782 target = gen_reg_rtx (mode);
7783 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7784 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7786 /* First try to do it with a special MIN or MAX instruction.
7787 If that does not win, use a conditional jump to select the proper
7788 value. */
7789 this_optab = (TREE_UNSIGNED (type)
7790 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7791 : (code == MIN_EXPR ? smin_optab : smax_optab));
7793 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7794 OPTAB_WIDEN);
7795 if (temp != 0)
7796 return temp;
7798 /* At this point, a MEM target is no longer useful; we will get better
7799 code without it. */
7801 if (GET_CODE (target) == MEM)
7802 target = gen_reg_rtx (mode);
7804 if (target != op0)
7805 emit_move_insn (target, op0);
7807 op0 = gen_label_rtx ();
7809 /* If this mode is an integer too wide to compare properly,
7810 compare word by word. Rely on cse to optimize constant cases. */
7811 if (GET_MODE_CLASS (mode) == MODE_INT
7812 && ! can_compare_p (GE, mode, ccp_jump))
7814 if (code == MAX_EXPR)
7815 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7816 target, op1, NULL_RTX, op0);
7817 else
7818 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7819 op1, target, NULL_RTX, op0);
7821 else
7823 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7824 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7825 unsignedp, mode, NULL_RTX, NULL_RTX,
7826 op0);
7828 emit_move_insn (target, op1);
7829 emit_label (op0);
7830 return target;
7832 case BIT_NOT_EXPR:
7833 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7834 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7835 if (temp == 0)
7836 abort ();
7837 return temp;
7839 case FFS_EXPR:
7840 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7841 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7842 if (temp == 0)
7843 abort ();
7844 return temp;
7846 /* ??? Can optimize bitwise operations with one arg constant.
7847 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7848 and (a bitwise1 b) bitwise2 b (etc)
7849 but that is probably not worth while. */
7851 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7852 boolean values when we want in all cases to compute both of them. In
7853 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7854 as actual zero-or-1 values and then bitwise anding. In cases where
7855 there cannot be any side effects, better code would be made by
7856 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7857 how to recognize those cases. */
7859 case TRUTH_AND_EXPR:
7860 case BIT_AND_EXPR:
7861 this_optab = and_optab;
7862 goto binop;
7864 case TRUTH_OR_EXPR:
7865 case BIT_IOR_EXPR:
7866 this_optab = ior_optab;
7867 goto binop;
7869 case TRUTH_XOR_EXPR:
7870 case BIT_XOR_EXPR:
7871 this_optab = xor_optab;
7872 goto binop;
7874 case LSHIFT_EXPR:
7875 case RSHIFT_EXPR:
7876 case LROTATE_EXPR:
7877 case RROTATE_EXPR:
7878 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7879 subtarget = 0;
7880 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7881 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7882 unsignedp);
7884 /* Could determine the answer when only additive constants differ. Also,
7885 the addition of one can be handled by changing the condition. */
7886 case LT_EXPR:
7887 case LE_EXPR:
7888 case GT_EXPR:
7889 case GE_EXPR:
7890 case EQ_EXPR:
7891 case NE_EXPR:
7892 case UNORDERED_EXPR:
7893 case ORDERED_EXPR:
7894 case UNLT_EXPR:
7895 case UNLE_EXPR:
7896 case UNGT_EXPR:
7897 case UNGE_EXPR:
7898 case UNEQ_EXPR:
7899 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7900 if (temp != 0)
7901 return temp;
7903 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7904 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7905 && original_target
7906 && GET_CODE (original_target) == REG
7907 && (GET_MODE (original_target)
7908 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7910 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7911 VOIDmode, 0);
7913 if (temp != original_target)
7914 temp = copy_to_reg (temp);
7916 op1 = gen_label_rtx ();
7917 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7918 GET_MODE (temp), unsignedp, op1);
7919 emit_move_insn (temp, const1_rtx);
7920 emit_label (op1);
7921 return temp;
7924 /* If no set-flag instruction, must generate a conditional
7925 store into a temporary variable. Drop through
7926 and handle this like && and ||. */
7928 case TRUTH_ANDIF_EXPR:
7929 case TRUTH_ORIF_EXPR:
7930 if (! ignore
7931 && (target == 0 || ! safe_from_p (target, exp, 1)
7932 /* Make sure we don't have a hard reg (such as function's return
7933 value) live across basic blocks, if not optimizing. */
7934 || (!optimize && GET_CODE (target) == REG
7935 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7936 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7938 if (target)
7939 emit_clr_insn (target);
7941 op1 = gen_label_rtx ();
7942 jumpifnot (exp, op1);
7944 if (target)
7945 emit_0_to_1_insn (target);
7947 emit_label (op1);
7948 return ignore ? const0_rtx : target;
7950 case TRUTH_NOT_EXPR:
7951 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7952 /* The parser is careful to generate TRUTH_NOT_EXPR
7953 only with operands that are always zero or one. */
7954 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7955 target, 1, OPTAB_LIB_WIDEN);
7956 if (temp == 0)
7957 abort ();
7958 return temp;
7960 case COMPOUND_EXPR:
7961 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7962 emit_queue ();
7963 return expand_expr (TREE_OPERAND (exp, 1),
7964 (ignore ? const0_rtx : target),
7965 VOIDmode, 0);
7967 case COND_EXPR:
7968 /* If we would have a "singleton" (see below) were it not for a
7969 conversion in each arm, bring that conversion back out. */
7970 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7971 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
7972 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
7973 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
7975 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
7976 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
7978 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
7979 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
7980 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
7981 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
7982 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
7983 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
7984 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
7985 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
7986 return expand_expr (build1 (NOP_EXPR, type,
7987 build (COND_EXPR, TREE_TYPE (iftrue),
7988 TREE_OPERAND (exp, 0),
7989 iftrue, iffalse)),
7990 target, tmode, modifier);
7994 /* Note that COND_EXPRs whose type is a structure or union
7995 are required to be constructed to contain assignments of
7996 a temporary variable, so that we can evaluate them here
7997 for side effect only. If type is void, we must do likewise. */
7999 /* If an arm of the branch requires a cleanup,
8000 only that cleanup is performed. */
8002 tree singleton = 0;
8003 tree binary_op = 0, unary_op = 0;
8005 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8006 convert it to our mode, if necessary. */
8007 if (integer_onep (TREE_OPERAND (exp, 1))
8008 && integer_zerop (TREE_OPERAND (exp, 2))
8009 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8011 if (ignore)
8013 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8014 modifier);
8015 return const0_rtx;
8018 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8019 if (GET_MODE (op0) == mode)
8020 return op0;
8022 if (target == 0)
8023 target = gen_reg_rtx (mode);
8024 convert_move (target, op0, unsignedp);
8025 return target;
8028 /* Check for X ? A + B : A. If we have this, we can copy A to the
8029 output and conditionally add B. Similarly for unary operations.
8030 Don't do this if X has side-effects because those side effects
8031 might affect A or B and the "?" operation is a sequence point in
8032 ANSI. (operand_equal_p tests for side effects.) */
8034 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8035 && operand_equal_p (TREE_OPERAND (exp, 2),
8036 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8037 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8038 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8039 && operand_equal_p (TREE_OPERAND (exp, 1),
8040 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8041 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8042 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8043 && operand_equal_p (TREE_OPERAND (exp, 2),
8044 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8045 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8046 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8047 && operand_equal_p (TREE_OPERAND (exp, 1),
8048 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8049 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8051 /* If we are not to produce a result, we have no target. Otherwise,
8052 if a target was specified use it; it will not be used as an
8053 intermediate target unless it is safe. If no target, use a
8054 temporary. */
8056 if (ignore)
8057 temp = 0;
8058 else if (original_target
8059 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8060 || (singleton && GET_CODE (original_target) == REG
8061 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8062 && original_target == var_rtx (singleton)))
8063 && GET_MODE (original_target) == mode
8064 #ifdef HAVE_conditional_move
8065 && (! can_conditionally_move_p (mode)
8066 || GET_CODE (original_target) == REG
8067 || TREE_ADDRESSABLE (type))
8068 #endif
8069 && (GET_CODE (original_target) != MEM
8070 || TREE_ADDRESSABLE (type)))
8071 temp = original_target;
8072 else if (TREE_ADDRESSABLE (type))
8073 abort ();
8074 else
8075 temp = assign_temp (type, 0, 0, 1);
8077 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8078 do the test of X as a store-flag operation, do this as
8079 A + ((X != 0) << log C). Similarly for other simple binary
8080 operators. Only do for C == 1 if BRANCH_COST is low. */
8081 if (temp && singleton && binary_op
8082 && (TREE_CODE (binary_op) == PLUS_EXPR
8083 || TREE_CODE (binary_op) == MINUS_EXPR
8084 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8085 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8086 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8087 : integer_onep (TREE_OPERAND (binary_op, 1)))
8088 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8090 rtx result;
8091 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8092 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8093 ? addv_optab : add_optab)
8094 : TREE_CODE (binary_op) == MINUS_EXPR
8095 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8096 ? subv_optab : sub_optab)
8097 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8098 : xor_optab);
8100 /* If we had X ? A : A + 1, do this as A + (X == 0).
8102 We have to invert the truth value here and then put it
8103 back later if do_store_flag fails. We cannot simply copy
8104 TREE_OPERAND (exp, 0) to another variable and modify that
8105 because invert_truthvalue can modify the tree pointed to
8106 by its argument. */
8107 if (singleton == TREE_OPERAND (exp, 1))
8108 TREE_OPERAND (exp, 0)
8109 = invert_truthvalue (TREE_OPERAND (exp, 0));
8111 result = do_store_flag (TREE_OPERAND (exp, 0),
8112 (safe_from_p (temp, singleton, 1)
8113 ? temp : NULL_RTX),
8114 mode, BRANCH_COST <= 1);
8116 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8117 result = expand_shift (LSHIFT_EXPR, mode, result,
8118 build_int_2 (tree_log2
8119 (TREE_OPERAND
8120 (binary_op, 1)),
8122 (safe_from_p (temp, singleton, 1)
8123 ? temp : NULL_RTX), 0);
8125 if (result)
8127 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8128 return expand_binop (mode, boptab, op1, result, temp,
8129 unsignedp, OPTAB_LIB_WIDEN);
8131 else if (singleton == TREE_OPERAND (exp, 1))
8132 TREE_OPERAND (exp, 0)
8133 = invert_truthvalue (TREE_OPERAND (exp, 0));
8136 do_pending_stack_adjust ();
8137 NO_DEFER_POP;
8138 op0 = gen_label_rtx ();
8140 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8142 if (temp != 0)
8144 /* If the target conflicts with the other operand of the
8145 binary op, we can't use it. Also, we can't use the target
8146 if it is a hard register, because evaluating the condition
8147 might clobber it. */
8148 if ((binary_op
8149 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8150 || (GET_CODE (temp) == REG
8151 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8152 temp = gen_reg_rtx (mode);
8153 store_expr (singleton, temp, 0);
8155 else
8156 expand_expr (singleton,
8157 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8158 if (singleton == TREE_OPERAND (exp, 1))
8159 jumpif (TREE_OPERAND (exp, 0), op0);
8160 else
8161 jumpifnot (TREE_OPERAND (exp, 0), op0);
8163 start_cleanup_deferral ();
8164 if (binary_op && temp == 0)
8165 /* Just touch the other operand. */
8166 expand_expr (TREE_OPERAND (binary_op, 1),
8167 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8168 else if (binary_op)
8169 store_expr (build (TREE_CODE (binary_op), type,
8170 make_tree (type, temp),
8171 TREE_OPERAND (binary_op, 1)),
8172 temp, 0);
8173 else
8174 store_expr (build1 (TREE_CODE (unary_op), type,
8175 make_tree (type, temp)),
8176 temp, 0);
8177 op1 = op0;
8179 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8180 comparison operator. If we have one of these cases, set the
8181 output to A, branch on A (cse will merge these two references),
8182 then set the output to FOO. */
8183 else if (temp
8184 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8185 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8186 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8187 TREE_OPERAND (exp, 1), 0)
8188 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8189 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8190 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8192 if (GET_CODE (temp) == REG
8193 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8194 temp = gen_reg_rtx (mode);
8195 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8196 jumpif (TREE_OPERAND (exp, 0), op0);
8198 start_cleanup_deferral ();
8199 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8200 op1 = op0;
8202 else if (temp
8203 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8204 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8205 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8206 TREE_OPERAND (exp, 2), 0)
8207 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8208 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8209 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8211 if (GET_CODE (temp) == REG
8212 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8213 temp = gen_reg_rtx (mode);
8214 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8215 jumpifnot (TREE_OPERAND (exp, 0), op0);
8217 start_cleanup_deferral ();
8218 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8219 op1 = op0;
8221 else
8223 op1 = gen_label_rtx ();
8224 jumpifnot (TREE_OPERAND (exp, 0), op0);
8226 start_cleanup_deferral ();
8228 /* One branch of the cond can be void, if it never returns. For
8229 example A ? throw : E */
8230 if (temp != 0
8231 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8232 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8233 else
8234 expand_expr (TREE_OPERAND (exp, 1),
8235 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8236 end_cleanup_deferral ();
8237 emit_queue ();
8238 emit_jump_insn (gen_jump (op1));
8239 emit_barrier ();
8240 emit_label (op0);
8241 start_cleanup_deferral ();
8242 if (temp != 0
8243 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8244 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8245 else
8246 expand_expr (TREE_OPERAND (exp, 2),
8247 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8250 end_cleanup_deferral ();
8252 emit_queue ();
8253 emit_label (op1);
8254 OK_DEFER_POP;
8256 return temp;
8259 case TARGET_EXPR:
8261 /* Something needs to be initialized, but we didn't know
8262 where that thing was when building the tree. For example,
8263 it could be the return value of a function, or a parameter
8264 to a function which lays down in the stack, or a temporary
8265 variable which must be passed by reference.
8267 We guarantee that the expression will either be constructed
8268 or copied into our original target. */
8270 tree slot = TREE_OPERAND (exp, 0);
8271 tree cleanups = NULL_TREE;
8272 tree exp1;
8274 if (TREE_CODE (slot) != VAR_DECL)
8275 abort ();
8277 if (! ignore)
8278 target = original_target;
8280 /* Set this here so that if we get a target that refers to a
8281 register variable that's already been used, put_reg_into_stack
8282 knows that it should fix up those uses. */
8283 TREE_USED (slot) = 1;
8285 if (target == 0)
8287 if (DECL_RTL_SET_P (slot))
8289 target = DECL_RTL (slot);
8290 /* If we have already expanded the slot, so don't do
8291 it again. (mrs) */
8292 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8293 return target;
8295 else
8297 target = assign_temp (type, 2, 0, 1);
8298 /* All temp slots at this level must not conflict. */
8299 preserve_temp_slots (target);
8300 SET_DECL_RTL (slot, target);
8301 if (TREE_ADDRESSABLE (slot))
8302 put_var_into_stack (slot);
8304 /* Since SLOT is not known to the called function
8305 to belong to its stack frame, we must build an explicit
8306 cleanup. This case occurs when we must build up a reference
8307 to pass the reference as an argument. In this case,
8308 it is very likely that such a reference need not be
8309 built here. */
8311 if (TREE_OPERAND (exp, 2) == 0)
8312 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
8313 cleanups = TREE_OPERAND (exp, 2);
8316 else
8318 /* This case does occur, when expanding a parameter which
8319 needs to be constructed on the stack. The target
8320 is the actual stack address that we want to initialize.
8321 The function we call will perform the cleanup in this case. */
8323 /* If we have already assigned it space, use that space,
8324 not target that we were passed in, as our target
8325 parameter is only a hint. */
8326 if (DECL_RTL_SET_P (slot))
8328 target = DECL_RTL (slot);
8329 /* If we have already expanded the slot, so don't do
8330 it again. (mrs) */
8331 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8332 return target;
8334 else
8336 SET_DECL_RTL (slot, target);
8337 /* If we must have an addressable slot, then make sure that
8338 the RTL that we just stored in slot is OK. */
8339 if (TREE_ADDRESSABLE (slot))
8340 put_var_into_stack (slot);
8344 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8345 /* Mark it as expanded. */
8346 TREE_OPERAND (exp, 1) = NULL_TREE;
8348 store_expr (exp1, target, 0);
8350 expand_decl_cleanup (NULL_TREE, cleanups);
8352 return target;
8355 case INIT_EXPR:
8357 tree lhs = TREE_OPERAND (exp, 0);
8358 tree rhs = TREE_OPERAND (exp, 1);
8360 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8361 return temp;
8364 case MODIFY_EXPR:
8366 /* If lhs is complex, expand calls in rhs before computing it.
8367 That's so we don't compute a pointer and save it over a
8368 call. If lhs is simple, compute it first so we can give it
8369 as a target if the rhs is just a call. This avoids an
8370 extra temp and copy and that prevents a partial-subsumption
8371 which makes bad code. Actually we could treat
8372 component_ref's of vars like vars. */
8374 tree lhs = TREE_OPERAND (exp, 0);
8375 tree rhs = TREE_OPERAND (exp, 1);
8377 temp = 0;
8379 /* Check for |= or &= of a bitfield of size one into another bitfield
8380 of size 1. In this case, (unless we need the result of the
8381 assignment) we can do this more efficiently with a
8382 test followed by an assignment, if necessary.
8384 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8385 things change so we do, this code should be enhanced to
8386 support it. */
8387 if (ignore
8388 && TREE_CODE (lhs) == COMPONENT_REF
8389 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8390 || TREE_CODE (rhs) == BIT_AND_EXPR)
8391 && TREE_OPERAND (rhs, 0) == lhs
8392 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8393 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8394 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8396 rtx label = gen_label_rtx ();
8398 do_jump (TREE_OPERAND (rhs, 1),
8399 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8400 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8401 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8402 (TREE_CODE (rhs) == BIT_IOR_EXPR
8403 ? integer_one_node
8404 : integer_zero_node)),
8405 0, 0);
8406 do_pending_stack_adjust ();
8407 emit_label (label);
8408 return const0_rtx;
8411 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8413 return temp;
8416 case RETURN_EXPR:
8417 if (!TREE_OPERAND (exp, 0))
8418 expand_null_return ();
8419 else
8420 expand_return (TREE_OPERAND (exp, 0));
8421 return const0_rtx;
8423 case PREINCREMENT_EXPR:
8424 case PREDECREMENT_EXPR:
8425 return expand_increment (exp, 0, ignore);
8427 case POSTINCREMENT_EXPR:
8428 case POSTDECREMENT_EXPR:
8429 /* Faster to treat as pre-increment if result is not used. */
8430 return expand_increment (exp, ! ignore, ignore);
8432 case ADDR_EXPR:
8433 /* Are we taking the address of a nested function? */
8434 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8435 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8436 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8437 && ! TREE_STATIC (exp))
8439 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8440 op0 = force_operand (op0, target);
8442 /* If we are taking the address of something erroneous, just
8443 return a zero. */
8444 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8445 return const0_rtx;
8446 /* If we are taking the address of a constant and are at the
8447 top level, we have to use output_constant_def since we can't
8448 call force_const_mem at top level. */
8449 else if (cfun == 0
8450 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8451 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
8452 == 'c')))
8453 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
8454 else
8456 /* We make sure to pass const0_rtx down if we came in with
8457 ignore set, to avoid doing the cleanups twice for something. */
8458 op0 = expand_expr (TREE_OPERAND (exp, 0),
8459 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8460 (modifier == EXPAND_INITIALIZER
8461 ? modifier : EXPAND_CONST_ADDRESS));
8463 /* If we are going to ignore the result, OP0 will have been set
8464 to const0_rtx, so just return it. Don't get confused and
8465 think we are taking the address of the constant. */
8466 if (ignore)
8467 return op0;
8469 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8470 clever and returns a REG when given a MEM. */
8471 op0 = protect_from_queue (op0, 1);
8473 /* We would like the object in memory. If it is a constant, we can
8474 have it be statically allocated into memory. For a non-constant,
8475 we need to allocate some memory and store the value into it. */
8477 if (CONSTANT_P (op0))
8478 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8479 op0);
8480 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8481 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8482 || GET_CODE (op0) == PARALLEL)
8484 /* If the operand is a SAVE_EXPR, we can deal with this by
8485 forcing the SAVE_EXPR into memory. */
8486 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
8488 put_var_into_stack (TREE_OPERAND (exp, 0));
8489 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
8491 else
8493 /* If this object is in a register, it can't be BLKmode. */
8494 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8495 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8497 if (GET_CODE (op0) == PARALLEL)
8498 /* Handle calls that pass values in multiple
8499 non-contiguous locations. The Irix 6 ABI has examples
8500 of this. */
8501 emit_group_store (memloc, op0,
8502 int_size_in_bytes (inner_type));
8503 else
8504 emit_move_insn (memloc, op0);
8506 op0 = memloc;
8510 if (GET_CODE (op0) != MEM)
8511 abort ();
8513 mark_temp_addr_taken (op0);
8514 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8516 op0 = XEXP (op0, 0);
8517 #ifdef POINTERS_EXTEND_UNSIGNED
8518 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8519 && mode == ptr_mode)
8520 op0 = convert_memory_address (ptr_mode, op0);
8521 #endif
8522 return op0;
8525 /* If OP0 is not aligned as least as much as the type requires, we
8526 need to make a temporary, copy OP0 to it, and take the address of
8527 the temporary. We want to use the alignment of the type, not of
8528 the operand. Note that this is incorrect for FUNCTION_TYPE, but
8529 the test for BLKmode means that can't happen. The test for
8530 BLKmode is because we never make mis-aligned MEMs with
8531 non-BLKmode.
8533 We don't need to do this at all if the machine doesn't have
8534 strict alignment. */
8535 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
8536 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
8537 > MEM_ALIGN (op0))
8538 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
8540 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8541 rtx new
8542 = assign_stack_temp_for_type
8543 (TYPE_MODE (inner_type),
8544 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
8545 : int_size_in_bytes (inner_type),
8546 1, build_qualified_type (inner_type,
8547 (TYPE_QUALS (inner_type)
8548 | TYPE_QUAL_CONST)));
8550 if (TYPE_ALIGN_OK (inner_type))
8551 abort ();
8553 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)));
8554 op0 = new;
8557 op0 = force_operand (XEXP (op0, 0), target);
8560 if (flag_force_addr
8561 && GET_CODE (op0) != REG
8562 && modifier != EXPAND_CONST_ADDRESS
8563 && modifier != EXPAND_INITIALIZER
8564 && modifier != EXPAND_SUM)
8565 op0 = force_reg (Pmode, op0);
8567 if (GET_CODE (op0) == REG
8568 && ! REG_USERVAR_P (op0))
8569 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8571 #ifdef POINTERS_EXTEND_UNSIGNED
8572 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8573 && mode == ptr_mode)
8574 op0 = convert_memory_address (ptr_mode, op0);
8575 #endif
8577 return op0;
8579 case ENTRY_VALUE_EXPR:
8580 abort ();
8582 /* COMPLEX type for Extended Pascal & Fortran */
8583 case COMPLEX_EXPR:
8585 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8586 rtx insns;
8588 /* Get the rtx code of the operands. */
8589 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8590 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8592 if (! target)
8593 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8595 start_sequence ();
8597 /* Move the real (op0) and imaginary (op1) parts to their location. */
8598 emit_move_insn (gen_realpart (mode, target), op0);
8599 emit_move_insn (gen_imagpart (mode, target), op1);
8601 insns = get_insns ();
8602 end_sequence ();
8604 /* Complex construction should appear as a single unit. */
8605 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8606 each with a separate pseudo as destination.
8607 It's not correct for flow to treat them as a unit. */
8608 if (GET_CODE (target) != CONCAT)
8609 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8610 else
8611 emit_insns (insns);
8613 return target;
8616 case REALPART_EXPR:
8617 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8618 return gen_realpart (mode, op0);
8620 case IMAGPART_EXPR:
8621 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8622 return gen_imagpart (mode, op0);
8624 case CONJ_EXPR:
8626 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8627 rtx imag_t;
8628 rtx insns;
8630 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8632 if (! target)
8633 target = gen_reg_rtx (mode);
8635 start_sequence ();
8637 /* Store the realpart and the negated imagpart to target. */
8638 emit_move_insn (gen_realpart (partmode, target),
8639 gen_realpart (partmode, op0));
8641 imag_t = gen_imagpart (partmode, target);
8642 temp = expand_unop (partmode,
8643 ! unsignedp && flag_trapv
8644 && (GET_MODE_CLASS(partmode) == MODE_INT)
8645 ? negv_optab : neg_optab,
8646 gen_imagpart (partmode, op0), imag_t, 0);
8647 if (temp != imag_t)
8648 emit_move_insn (imag_t, temp);
8650 insns = get_insns ();
8651 end_sequence ();
8653 /* Conjugate should appear as a single unit
8654 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8655 each with a separate pseudo as destination.
8656 It's not correct for flow to treat them as a unit. */
8657 if (GET_CODE (target) != CONCAT)
8658 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8659 else
8660 emit_insns (insns);
8662 return target;
8665 case TRY_CATCH_EXPR:
8667 tree handler = TREE_OPERAND (exp, 1);
8669 expand_eh_region_start ();
8671 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8673 expand_eh_region_end_cleanup (handler);
8675 return op0;
8678 case TRY_FINALLY_EXPR:
8680 tree try_block = TREE_OPERAND (exp, 0);
8681 tree finally_block = TREE_OPERAND (exp, 1);
8682 rtx finally_label = gen_label_rtx ();
8683 rtx done_label = gen_label_rtx ();
8684 rtx return_link = gen_reg_rtx (Pmode);
8685 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8686 (tree) finally_label, (tree) return_link);
8687 TREE_SIDE_EFFECTS (cleanup) = 1;
8689 /* Start a new binding layer that will keep track of all cleanup
8690 actions to be performed. */
8691 expand_start_bindings (2);
8693 target_temp_slot_level = temp_slot_level;
8695 expand_decl_cleanup (NULL_TREE, cleanup);
8696 op0 = expand_expr (try_block, target, tmode, modifier);
8698 preserve_temp_slots (op0);
8699 expand_end_bindings (NULL_TREE, 0, 0);
8700 emit_jump (done_label);
8701 emit_label (finally_label);
8702 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8703 emit_indirect_jump (return_link);
8704 emit_label (done_label);
8705 return op0;
8708 case GOTO_SUBROUTINE_EXPR:
8710 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8711 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8712 rtx return_address = gen_label_rtx ();
8713 emit_move_insn (return_link,
8714 gen_rtx_LABEL_REF (Pmode, return_address));
8715 emit_jump (subr);
8716 emit_label (return_address);
8717 return const0_rtx;
8720 case VA_ARG_EXPR:
8721 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8723 case EXC_PTR_EXPR:
8724 return get_exception_pointer (cfun);
8726 case FDESC_EXPR:
8727 /* Function descriptors are not valid except for as
8728 initialization constants, and should not be expanded. */
8729 abort ();
8731 default:
8732 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8735 /* Here to do an ordinary binary operator, generating an instruction
8736 from the optab already placed in `this_optab'. */
8737 binop:
8738 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8739 subtarget = 0;
8740 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8741 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8742 binop2:
8743 temp = expand_binop (mode, this_optab, op0, op1, target,
8744 unsignedp, OPTAB_LIB_WIDEN);
8745 if (temp == 0)
8746 abort ();
8747 return temp;
8750 /* Return the tree node if a ARG corresponds to a string constant or zero
8751 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
8752 in bytes within the string that ARG is accessing. The type of the
8753 offset will be `sizetype'. */
8755 tree
8756 string_constant (arg, ptr_offset)
8757 tree arg;
8758 tree *ptr_offset;
8760 STRIP_NOPS (arg);
8762 if (TREE_CODE (arg) == ADDR_EXPR
8763 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8765 *ptr_offset = size_zero_node;
8766 return TREE_OPERAND (arg, 0);
8768 else if (TREE_CODE (arg) == PLUS_EXPR)
8770 tree arg0 = TREE_OPERAND (arg, 0);
8771 tree arg1 = TREE_OPERAND (arg, 1);
8773 STRIP_NOPS (arg0);
8774 STRIP_NOPS (arg1);
8776 if (TREE_CODE (arg0) == ADDR_EXPR
8777 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
8779 *ptr_offset = convert (sizetype, arg1);
8780 return TREE_OPERAND (arg0, 0);
8782 else if (TREE_CODE (arg1) == ADDR_EXPR
8783 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
8785 *ptr_offset = convert (sizetype, arg0);
8786 return TREE_OPERAND (arg1, 0);
8790 return 0;
8793 /* Expand code for a post- or pre- increment or decrement
8794 and return the RTX for the result.
8795 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
8797 static rtx
8798 expand_increment (exp, post, ignore)
8799 tree exp;
8800 int post, ignore;
8802 rtx op0, op1;
8803 rtx temp, value;
8804 tree incremented = TREE_OPERAND (exp, 0);
8805 optab this_optab = add_optab;
8806 int icode;
8807 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
8808 int op0_is_copy = 0;
8809 int single_insn = 0;
8810 /* 1 means we can't store into OP0 directly,
8811 because it is a subreg narrower than a word,
8812 and we don't dare clobber the rest of the word. */
8813 int bad_subreg = 0;
8815 /* Stabilize any component ref that might need to be
8816 evaluated more than once below. */
8817 if (!post
8818 || TREE_CODE (incremented) == BIT_FIELD_REF
8819 || (TREE_CODE (incremented) == COMPONENT_REF
8820 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
8821 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
8822 incremented = stabilize_reference (incremented);
8823 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
8824 ones into save exprs so that they don't accidentally get evaluated
8825 more than once by the code below. */
8826 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
8827 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
8828 incremented = save_expr (incremented);
8830 /* Compute the operands as RTX.
8831 Note whether OP0 is the actual lvalue or a copy of it:
8832 I believe it is a copy iff it is a register or subreg
8833 and insns were generated in computing it. */
8835 temp = get_last_insn ();
8836 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
8838 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
8839 in place but instead must do sign- or zero-extension during assignment,
8840 so we copy it into a new register and let the code below use it as
8841 a copy.
8843 Note that we can safely modify this SUBREG since it is know not to be
8844 shared (it was made by the expand_expr call above). */
8846 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
8848 if (post)
8849 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
8850 else
8851 bad_subreg = 1;
8853 else if (GET_CODE (op0) == SUBREG
8854 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
8856 /* We cannot increment this SUBREG in place. If we are
8857 post-incrementing, get a copy of the old value. Otherwise,
8858 just mark that we cannot increment in place. */
8859 if (post)
8860 op0 = copy_to_reg (op0);
8861 else
8862 bad_subreg = 1;
8865 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
8866 && temp != get_last_insn ());
8867 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8869 /* Decide whether incrementing or decrementing. */
8870 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
8871 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8872 this_optab = sub_optab;
8874 /* Convert decrement by a constant into a negative increment. */
8875 if (this_optab == sub_optab
8876 && GET_CODE (op1) == CONST_INT)
8878 op1 = GEN_INT (-INTVAL (op1));
8879 this_optab = add_optab;
8882 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
8883 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
8885 /* For a preincrement, see if we can do this with a single instruction. */
8886 if (!post)
8888 icode = (int) this_optab->handlers[(int) mode].insn_code;
8889 if (icode != (int) CODE_FOR_nothing
8890 /* Make sure that OP0 is valid for operands 0 and 1
8891 of the insn we want to queue. */
8892 && (*insn_data[icode].operand[0].predicate) (op0, mode)
8893 && (*insn_data[icode].operand[1].predicate) (op0, mode)
8894 && (*insn_data[icode].operand[2].predicate) (op1, mode))
8895 single_insn = 1;
8898 /* If OP0 is not the actual lvalue, but rather a copy in a register,
8899 then we cannot just increment OP0. We must therefore contrive to
8900 increment the original value. Then, for postincrement, we can return
8901 OP0 since it is a copy of the old value. For preincrement, expand here
8902 unless we can do it with a single insn.
8904 Likewise if storing directly into OP0 would clobber high bits
8905 we need to preserve (bad_subreg). */
8906 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
8908 /* This is the easiest way to increment the value wherever it is.
8909 Problems with multiple evaluation of INCREMENTED are prevented
8910 because either (1) it is a component_ref or preincrement,
8911 in which case it was stabilized above, or (2) it is an array_ref
8912 with constant index in an array in a register, which is
8913 safe to reevaluate. */
8914 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
8915 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8916 ? MINUS_EXPR : PLUS_EXPR),
8917 TREE_TYPE (exp),
8918 incremented,
8919 TREE_OPERAND (exp, 1));
8921 while (TREE_CODE (incremented) == NOP_EXPR
8922 || TREE_CODE (incremented) == CONVERT_EXPR)
8924 newexp = convert (TREE_TYPE (incremented), newexp);
8925 incremented = TREE_OPERAND (incremented, 0);
8928 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
8929 return post ? op0 : temp;
8932 if (post)
8934 /* We have a true reference to the value in OP0.
8935 If there is an insn to add or subtract in this mode, queue it.
8936 Queueing the increment insn avoids the register shuffling
8937 that often results if we must increment now and first save
8938 the old value for subsequent use. */
8940 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
8941 op0 = stabilize (op0);
8942 #endif
8944 icode = (int) this_optab->handlers[(int) mode].insn_code;
8945 if (icode != (int) CODE_FOR_nothing
8946 /* Make sure that OP0 is valid for operands 0 and 1
8947 of the insn we want to queue. */
8948 && (*insn_data[icode].operand[0].predicate) (op0, mode)
8949 && (*insn_data[icode].operand[1].predicate) (op0, mode))
8951 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
8952 op1 = force_reg (mode, op1);
8954 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
8956 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
8958 rtx addr = (general_operand (XEXP (op0, 0), mode)
8959 ? force_reg (Pmode, XEXP (op0, 0))
8960 : copy_to_reg (XEXP (op0, 0)));
8961 rtx temp, result;
8963 op0 = replace_equiv_address (op0, addr);
8964 temp = force_reg (GET_MODE (op0), op0);
8965 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
8966 op1 = force_reg (mode, op1);
8968 /* The increment queue is LIFO, thus we have to `queue'
8969 the instructions in reverse order. */
8970 enqueue_insn (op0, gen_move_insn (op0, temp));
8971 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
8972 return result;
8976 /* Preincrement, or we can't increment with one simple insn. */
8977 if (post)
8978 /* Save a copy of the value before inc or dec, to return it later. */
8979 temp = value = copy_to_reg (op0);
8980 else
8981 /* Arrange to return the incremented value. */
8982 /* Copy the rtx because expand_binop will protect from the queue,
8983 and the results of that would be invalid for us to return
8984 if our caller does emit_queue before using our result. */
8985 temp = copy_rtx (value = op0);
8987 /* Increment however we can. */
8988 op1 = expand_binop (mode, this_optab, value, op1, op0,
8989 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
8991 /* Make sure the value is stored into OP0. */
8992 if (op1 != op0)
8993 emit_move_insn (op0, op1);
8995 return temp;
8998 /* At the start of a function, record that we have no previously-pushed
8999 arguments waiting to be popped. */
9001 void
9002 init_pending_stack_adjust ()
9004 pending_stack_adjust = 0;
9007 /* When exiting from function, if safe, clear out any pending stack adjust
9008 so the adjustment won't get done.
9010 Note, if the current function calls alloca, then it must have a
9011 frame pointer regardless of the value of flag_omit_frame_pointer. */
9013 void
9014 clear_pending_stack_adjust ()
9016 #ifdef EXIT_IGNORE_STACK
9017 if (optimize > 0
9018 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9019 && EXIT_IGNORE_STACK
9020 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9021 && ! flag_inline_functions)
9023 stack_pointer_delta -= pending_stack_adjust,
9024 pending_stack_adjust = 0;
9026 #endif
9029 /* Pop any previously-pushed arguments that have not been popped yet. */
9031 void
9032 do_pending_stack_adjust ()
9034 if (inhibit_defer_pop == 0)
9036 if (pending_stack_adjust != 0)
9037 adjust_stack (GEN_INT (pending_stack_adjust));
9038 pending_stack_adjust = 0;
9042 /* Expand conditional expressions. */
9044 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9045 LABEL is an rtx of code CODE_LABEL, in this function and all the
9046 functions here. */
9048 void
9049 jumpifnot (exp, label)
9050 tree exp;
9051 rtx label;
9053 do_jump (exp, label, NULL_RTX);
9056 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9058 void
9059 jumpif (exp, label)
9060 tree exp;
9061 rtx label;
9063 do_jump (exp, NULL_RTX, label);
9066 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9067 the result is zero, or IF_TRUE_LABEL if the result is one.
9068 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9069 meaning fall through in that case.
9071 do_jump always does any pending stack adjust except when it does not
9072 actually perform a jump. An example where there is no jump
9073 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9075 This function is responsible for optimizing cases such as
9076 &&, || and comparison operators in EXP. */
9078 void
9079 do_jump (exp, if_false_label, if_true_label)
9080 tree exp;
9081 rtx if_false_label, if_true_label;
9083 enum tree_code code = TREE_CODE (exp);
9084 /* Some cases need to create a label to jump to
9085 in order to properly fall through.
9086 These cases set DROP_THROUGH_LABEL nonzero. */
9087 rtx drop_through_label = 0;
9088 rtx temp;
9089 int i;
9090 tree type;
9091 enum machine_mode mode;
9093 #ifdef MAX_INTEGER_COMPUTATION_MODE
9094 check_max_integer_computation_mode (exp);
9095 #endif
9097 emit_queue ();
9099 switch (code)
9101 case ERROR_MARK:
9102 break;
9104 case INTEGER_CST:
9105 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9106 if (temp)
9107 emit_jump (temp);
9108 break;
9110 #if 0
9111 /* This is not true with #pragma weak */
9112 case ADDR_EXPR:
9113 /* The address of something can never be zero. */
9114 if (if_true_label)
9115 emit_jump (if_true_label);
9116 break;
9117 #endif
9119 case NOP_EXPR:
9120 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9121 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9122 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
9123 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
9124 goto normal;
9125 case CONVERT_EXPR:
9126 /* If we are narrowing the operand, we have to do the compare in the
9127 narrower mode. */
9128 if ((TYPE_PRECISION (TREE_TYPE (exp))
9129 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9130 goto normal;
9131 case NON_LVALUE_EXPR:
9132 case REFERENCE_EXPR:
9133 case ABS_EXPR:
9134 case NEGATE_EXPR:
9135 case LROTATE_EXPR:
9136 case RROTATE_EXPR:
9137 /* These cannot change zero->non-zero or vice versa. */
9138 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9139 break;
9141 case WITH_RECORD_EXPR:
9142 /* Put the object on the placeholder list, recurse through our first
9143 operand, and pop the list. */
9144 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9145 placeholder_list);
9146 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9147 placeholder_list = TREE_CHAIN (placeholder_list);
9148 break;
9150 #if 0
9151 /* This is never less insns than evaluating the PLUS_EXPR followed by
9152 a test and can be longer if the test is eliminated. */
9153 case PLUS_EXPR:
9154 /* Reduce to minus. */
9155 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9156 TREE_OPERAND (exp, 0),
9157 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9158 TREE_OPERAND (exp, 1))));
9159 /* Process as MINUS. */
9160 #endif
9162 case MINUS_EXPR:
9163 /* Non-zero iff operands of minus differ. */
9164 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9165 TREE_OPERAND (exp, 0),
9166 TREE_OPERAND (exp, 1)),
9167 NE, NE, if_false_label, if_true_label);
9168 break;
9170 case BIT_AND_EXPR:
9171 /* If we are AND'ing with a small constant, do this comparison in the
9172 smallest type that fits. If the machine doesn't have comparisons
9173 that small, it will be converted back to the wider comparison.
9174 This helps if we are testing the sign bit of a narrower object.
9175 combine can't do this for us because it can't know whether a
9176 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9178 if (! SLOW_BYTE_ACCESS
9179 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9180 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9181 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9182 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9183 && (type = type_for_mode (mode, 1)) != 0
9184 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9185 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9186 != CODE_FOR_nothing))
9188 do_jump (convert (type, exp), if_false_label, if_true_label);
9189 break;
9191 goto normal;
9193 case TRUTH_NOT_EXPR:
9194 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9195 break;
9197 case TRUTH_ANDIF_EXPR:
9198 if (if_false_label == 0)
9199 if_false_label = drop_through_label = gen_label_rtx ();
9200 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9201 start_cleanup_deferral ();
9202 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9203 end_cleanup_deferral ();
9204 break;
9206 case TRUTH_ORIF_EXPR:
9207 if (if_true_label == 0)
9208 if_true_label = drop_through_label = gen_label_rtx ();
9209 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9210 start_cleanup_deferral ();
9211 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9212 end_cleanup_deferral ();
9213 break;
9215 case COMPOUND_EXPR:
9216 push_temp_slots ();
9217 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9218 preserve_temp_slots (NULL_RTX);
9219 free_temp_slots ();
9220 pop_temp_slots ();
9221 emit_queue ();
9222 do_pending_stack_adjust ();
9223 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9224 break;
9226 case COMPONENT_REF:
9227 case BIT_FIELD_REF:
9228 case ARRAY_REF:
9229 case ARRAY_RANGE_REF:
9231 HOST_WIDE_INT bitsize, bitpos;
9232 int unsignedp;
9233 enum machine_mode mode;
9234 tree type;
9235 tree offset;
9236 int volatilep = 0;
9238 /* Get description of this reference. We don't actually care
9239 about the underlying object here. */
9240 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9241 &unsignedp, &volatilep);
9243 type = type_for_size (bitsize, unsignedp);
9244 if (! SLOW_BYTE_ACCESS
9245 && type != 0 && bitsize >= 0
9246 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9247 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9248 != CODE_FOR_nothing))
9250 do_jump (convert (type, exp), if_false_label, if_true_label);
9251 break;
9253 goto normal;
9256 case COND_EXPR:
9257 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9258 if (integer_onep (TREE_OPERAND (exp, 1))
9259 && integer_zerop (TREE_OPERAND (exp, 2)))
9260 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9262 else if (integer_zerop (TREE_OPERAND (exp, 1))
9263 && integer_onep (TREE_OPERAND (exp, 2)))
9264 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9266 else
9268 rtx label1 = gen_label_rtx ();
9269 drop_through_label = gen_label_rtx ();
9271 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9273 start_cleanup_deferral ();
9274 /* Now the THEN-expression. */
9275 do_jump (TREE_OPERAND (exp, 1),
9276 if_false_label ? if_false_label : drop_through_label,
9277 if_true_label ? if_true_label : drop_through_label);
9278 /* In case the do_jump just above never jumps. */
9279 do_pending_stack_adjust ();
9280 emit_label (label1);
9282 /* Now the ELSE-expression. */
9283 do_jump (TREE_OPERAND (exp, 2),
9284 if_false_label ? if_false_label : drop_through_label,
9285 if_true_label ? if_true_label : drop_through_label);
9286 end_cleanup_deferral ();
9288 break;
9290 case EQ_EXPR:
9292 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9294 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9295 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9297 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9298 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9299 do_jump
9300 (fold
9301 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9302 fold (build (EQ_EXPR, TREE_TYPE (exp),
9303 fold (build1 (REALPART_EXPR,
9304 TREE_TYPE (inner_type),
9305 exp0)),
9306 fold (build1 (REALPART_EXPR,
9307 TREE_TYPE (inner_type),
9308 exp1)))),
9309 fold (build (EQ_EXPR, TREE_TYPE (exp),
9310 fold (build1 (IMAGPART_EXPR,
9311 TREE_TYPE (inner_type),
9312 exp0)),
9313 fold (build1 (IMAGPART_EXPR,
9314 TREE_TYPE (inner_type),
9315 exp1)))))),
9316 if_false_label, if_true_label);
9319 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9320 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9322 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9323 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9324 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9325 else
9326 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9327 break;
9330 case NE_EXPR:
9332 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9334 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9335 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9337 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9338 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9339 do_jump
9340 (fold
9341 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9342 fold (build (NE_EXPR, TREE_TYPE (exp),
9343 fold (build1 (REALPART_EXPR,
9344 TREE_TYPE (inner_type),
9345 exp0)),
9346 fold (build1 (REALPART_EXPR,
9347 TREE_TYPE (inner_type),
9348 exp1)))),
9349 fold (build (NE_EXPR, TREE_TYPE (exp),
9350 fold (build1 (IMAGPART_EXPR,
9351 TREE_TYPE (inner_type),
9352 exp0)),
9353 fold (build1 (IMAGPART_EXPR,
9354 TREE_TYPE (inner_type),
9355 exp1)))))),
9356 if_false_label, if_true_label);
9359 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9360 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9362 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9363 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9364 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9365 else
9366 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9367 break;
9370 case LT_EXPR:
9371 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9372 if (GET_MODE_CLASS (mode) == MODE_INT
9373 && ! can_compare_p (LT, mode, ccp_jump))
9374 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9375 else
9376 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9377 break;
9379 case LE_EXPR:
9380 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9381 if (GET_MODE_CLASS (mode) == MODE_INT
9382 && ! can_compare_p (LE, mode, ccp_jump))
9383 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9384 else
9385 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9386 break;
9388 case GT_EXPR:
9389 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9390 if (GET_MODE_CLASS (mode) == MODE_INT
9391 && ! can_compare_p (GT, mode, ccp_jump))
9392 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9393 else
9394 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9395 break;
9397 case GE_EXPR:
9398 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9399 if (GET_MODE_CLASS (mode) == MODE_INT
9400 && ! can_compare_p (GE, mode, ccp_jump))
9401 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9402 else
9403 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9404 break;
9406 case UNORDERED_EXPR:
9407 case ORDERED_EXPR:
9409 enum rtx_code cmp, rcmp;
9410 int do_rev;
9412 if (code == UNORDERED_EXPR)
9413 cmp = UNORDERED, rcmp = ORDERED;
9414 else
9415 cmp = ORDERED, rcmp = UNORDERED;
9416 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9418 do_rev = 0;
9419 if (! can_compare_p (cmp, mode, ccp_jump)
9420 && (can_compare_p (rcmp, mode, ccp_jump)
9421 /* If the target doesn't provide either UNORDERED or ORDERED
9422 comparisons, canonicalize on UNORDERED for the library. */
9423 || rcmp == UNORDERED))
9424 do_rev = 1;
9426 if (! do_rev)
9427 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9428 else
9429 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9431 break;
9434 enum rtx_code rcode1;
9435 enum tree_code tcode2;
9437 case UNLT_EXPR:
9438 rcode1 = UNLT;
9439 tcode2 = LT_EXPR;
9440 goto unordered_bcc;
9441 case UNLE_EXPR:
9442 rcode1 = UNLE;
9443 tcode2 = LE_EXPR;
9444 goto unordered_bcc;
9445 case UNGT_EXPR:
9446 rcode1 = UNGT;
9447 tcode2 = GT_EXPR;
9448 goto unordered_bcc;
9449 case UNGE_EXPR:
9450 rcode1 = UNGE;
9451 tcode2 = GE_EXPR;
9452 goto unordered_bcc;
9453 case UNEQ_EXPR:
9454 rcode1 = UNEQ;
9455 tcode2 = EQ_EXPR;
9456 goto unordered_bcc;
9458 unordered_bcc:
9459 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9460 if (can_compare_p (rcode1, mode, ccp_jump))
9461 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9462 if_true_label);
9463 else
9465 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9466 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9467 tree cmp0, cmp1;
9469 /* If the target doesn't support combined unordered
9470 compares, decompose into UNORDERED + comparison. */
9471 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9472 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9473 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9474 do_jump (exp, if_false_label, if_true_label);
9477 break;
9479 /* Special case:
9480 __builtin_expect (<test>, 0) and
9481 __builtin_expect (<test>, 1)
9483 We need to do this here, so that <test> is not converted to a SCC
9484 operation on machines that use condition code registers and COMPARE
9485 like the PowerPC, and then the jump is done based on whether the SCC
9486 operation produced a 1 or 0. */
9487 case CALL_EXPR:
9488 /* Check for a built-in function. */
9489 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
9491 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
9492 tree arglist = TREE_OPERAND (exp, 1);
9494 if (TREE_CODE (fndecl) == FUNCTION_DECL
9495 && DECL_BUILT_IN (fndecl)
9496 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
9497 && arglist != NULL_TREE
9498 && TREE_CHAIN (arglist) != NULL_TREE)
9500 rtx seq = expand_builtin_expect_jump (exp, if_false_label,
9501 if_true_label);
9503 if (seq != NULL_RTX)
9505 emit_insn (seq);
9506 return;
9510 /* fall through and generate the normal code. */
9512 default:
9513 normal:
9514 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9515 #if 0
9516 /* This is not needed any more and causes poor code since it causes
9517 comparisons and tests from non-SI objects to have different code
9518 sequences. */
9519 /* Copy to register to avoid generating bad insns by cse
9520 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9521 if (!cse_not_expected && GET_CODE (temp) == MEM)
9522 temp = copy_to_reg (temp);
9523 #endif
9524 do_pending_stack_adjust ();
9525 /* Do any postincrements in the expression that was tested. */
9526 emit_queue ();
9528 if (GET_CODE (temp) == CONST_INT
9529 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
9530 || GET_CODE (temp) == LABEL_REF)
9532 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9533 if (target)
9534 emit_jump (target);
9536 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9537 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9538 /* Note swapping the labels gives us not-equal. */
9539 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9540 else if (GET_MODE (temp) != VOIDmode)
9541 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9542 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9543 GET_MODE (temp), NULL_RTX,
9544 if_false_label, if_true_label);
9545 else
9546 abort ();
9549 if (drop_through_label)
9551 /* If do_jump produces code that might be jumped around,
9552 do any stack adjusts from that code, before the place
9553 where control merges in. */
9554 do_pending_stack_adjust ();
9555 emit_label (drop_through_label);
9559 /* Given a comparison expression EXP for values too wide to be compared
9560 with one insn, test the comparison and jump to the appropriate label.
9561 The code of EXP is ignored; we always test GT if SWAP is 0,
9562 and LT if SWAP is 1. */
9564 static void
9565 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9566 tree exp;
9567 int swap;
9568 rtx if_false_label, if_true_label;
9570 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9571 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9572 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9573 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9575 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9578 /* Compare OP0 with OP1, word at a time, in mode MODE.
9579 UNSIGNEDP says to do unsigned comparison.
9580 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9582 void
9583 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9584 enum machine_mode mode;
9585 int unsignedp;
9586 rtx op0, op1;
9587 rtx if_false_label, if_true_label;
9589 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9590 rtx drop_through_label = 0;
9591 int i;
9593 if (! if_true_label || ! if_false_label)
9594 drop_through_label = gen_label_rtx ();
9595 if (! if_true_label)
9596 if_true_label = drop_through_label;
9597 if (! if_false_label)
9598 if_false_label = drop_through_label;
9600 /* Compare a word at a time, high order first. */
9601 for (i = 0; i < nwords; i++)
9603 rtx op0_word, op1_word;
9605 if (WORDS_BIG_ENDIAN)
9607 op0_word = operand_subword_force (op0, i, mode);
9608 op1_word = operand_subword_force (op1, i, mode);
9610 else
9612 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9613 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9616 /* All but high-order word must be compared as unsigned. */
9617 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9618 (unsignedp || i > 0), word_mode, NULL_RTX,
9619 NULL_RTX, if_true_label);
9621 /* Consider lower words only if these are equal. */
9622 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9623 NULL_RTX, NULL_RTX, if_false_label);
9626 if (if_false_label)
9627 emit_jump (if_false_label);
9628 if (drop_through_label)
9629 emit_label (drop_through_label);
9632 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9633 with one insn, test the comparison and jump to the appropriate label. */
9635 static void
9636 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9637 tree exp;
9638 rtx if_false_label, if_true_label;
9640 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9641 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9642 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9643 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9644 int i;
9645 rtx drop_through_label = 0;
9647 if (! if_false_label)
9648 drop_through_label = if_false_label = gen_label_rtx ();
9650 for (i = 0; i < nwords; i++)
9651 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
9652 operand_subword_force (op1, i, mode),
9653 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9654 word_mode, NULL_RTX, if_false_label, NULL_RTX);
9656 if (if_true_label)
9657 emit_jump (if_true_label);
9658 if (drop_through_label)
9659 emit_label (drop_through_label);
9662 /* Jump according to whether OP0 is 0.
9663 We assume that OP0 has an integer mode that is too wide
9664 for the available compare insns. */
9666 void
9667 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9668 rtx op0;
9669 rtx if_false_label, if_true_label;
9671 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9672 rtx part;
9673 int i;
9674 rtx drop_through_label = 0;
9676 /* The fastest way of doing this comparison on almost any machine is to
9677 "or" all the words and compare the result. If all have to be loaded
9678 from memory and this is a very wide item, it's possible this may
9679 be slower, but that's highly unlikely. */
9681 part = gen_reg_rtx (word_mode);
9682 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
9683 for (i = 1; i < nwords && part != 0; i++)
9684 part = expand_binop (word_mode, ior_optab, part,
9685 operand_subword_force (op0, i, GET_MODE (op0)),
9686 part, 1, OPTAB_WIDEN);
9688 if (part != 0)
9690 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
9691 NULL_RTX, if_false_label, if_true_label);
9693 return;
9696 /* If we couldn't do the "or" simply, do this with a series of compares. */
9697 if (! if_false_label)
9698 drop_through_label = if_false_label = gen_label_rtx ();
9700 for (i = 0; i < nwords; i++)
9701 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
9702 const0_rtx, EQ, 1, word_mode, NULL_RTX,
9703 if_false_label, NULL_RTX);
9705 if (if_true_label)
9706 emit_jump (if_true_label);
9708 if (drop_through_label)
9709 emit_label (drop_through_label);
9712 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
9713 (including code to compute the values to be compared)
9714 and set (CC0) according to the result.
9715 The decision as to signed or unsigned comparison must be made by the caller.
9717 We force a stack adjustment unless there are currently
9718 things pushed on the stack that aren't yet used.
9720 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9721 compared. */
9724 compare_from_rtx (op0, op1, code, unsignedp, mode, size)
9725 rtx op0, op1;
9726 enum rtx_code code;
9727 int unsignedp;
9728 enum machine_mode mode;
9729 rtx size;
9731 rtx tem;
9733 /* If one operand is constant, make it the second one. Only do this
9734 if the other operand is not constant as well. */
9736 if (swap_commutative_operands_p (op0, op1))
9738 tem = op0;
9739 op0 = op1;
9740 op1 = tem;
9741 code = swap_condition (code);
9744 if (flag_force_mem)
9746 op0 = force_not_mem (op0);
9747 op1 = force_not_mem (op1);
9750 do_pending_stack_adjust ();
9752 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9753 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9754 return tem;
9756 #if 0
9757 /* There's no need to do this now that combine.c can eliminate lots of
9758 sign extensions. This can be less efficient in certain cases on other
9759 machines. */
9761 /* If this is a signed equality comparison, we can do it as an
9762 unsigned comparison since zero-extension is cheaper than sign
9763 extension and comparisons with zero are done as unsigned. This is
9764 the case even on machines that can do fast sign extension, since
9765 zero-extension is easier to combine with other operations than
9766 sign-extension is. If we are comparing against a constant, we must
9767 convert it to what it would look like unsigned. */
9768 if ((code == EQ || code == NE) && ! unsignedp
9769 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9771 if (GET_CODE (op1) == CONST_INT
9772 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9773 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9774 unsignedp = 1;
9776 #endif
9778 emit_cmp_insn (op0, op1, code, size, mode, unsignedp);
9780 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
9783 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
9784 The decision as to signed or unsigned comparison must be made by the caller.
9786 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9787 compared. */
9789 void
9790 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size,
9791 if_false_label, if_true_label)
9792 rtx op0, op1;
9793 enum rtx_code code;
9794 int unsignedp;
9795 enum machine_mode mode;
9796 rtx size;
9797 rtx if_false_label, if_true_label;
9799 rtx tem;
9800 int dummy_true_label = 0;
9802 /* Reverse the comparison if that is safe and we want to jump if it is
9803 false. */
9804 if (! if_true_label && ! FLOAT_MODE_P (mode))
9806 if_true_label = if_false_label;
9807 if_false_label = 0;
9808 code = reverse_condition (code);
9811 /* If one operand is constant, make it the second one. Only do this
9812 if the other operand is not constant as well. */
9814 if (swap_commutative_operands_p (op0, op1))
9816 tem = op0;
9817 op0 = op1;
9818 op1 = tem;
9819 code = swap_condition (code);
9822 if (flag_force_mem)
9824 op0 = force_not_mem (op0);
9825 op1 = force_not_mem (op1);
9828 do_pending_stack_adjust ();
9830 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9831 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9833 if (tem == const_true_rtx)
9835 if (if_true_label)
9836 emit_jump (if_true_label);
9838 else
9840 if (if_false_label)
9841 emit_jump (if_false_label);
9843 return;
9846 #if 0
9847 /* There's no need to do this now that combine.c can eliminate lots of
9848 sign extensions. This can be less efficient in certain cases on other
9849 machines. */
9851 /* If this is a signed equality comparison, we can do it as an
9852 unsigned comparison since zero-extension is cheaper than sign
9853 extension and comparisons with zero are done as unsigned. This is
9854 the case even on machines that can do fast sign extension, since
9855 zero-extension is easier to combine with other operations than
9856 sign-extension is. If we are comparing against a constant, we must
9857 convert it to what it would look like unsigned. */
9858 if ((code == EQ || code == NE) && ! unsignedp
9859 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9861 if (GET_CODE (op1) == CONST_INT
9862 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9863 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9864 unsignedp = 1;
9866 #endif
9868 if (! if_true_label)
9870 dummy_true_label = 1;
9871 if_true_label = gen_label_rtx ();
9874 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp,
9875 if_true_label);
9877 if (if_false_label)
9878 emit_jump (if_false_label);
9879 if (dummy_true_label)
9880 emit_label (if_true_label);
9883 /* Generate code for a comparison expression EXP (including code to compute
9884 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
9885 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
9886 generated code will drop through.
9887 SIGNED_CODE should be the rtx operation for this comparison for
9888 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
9890 We force a stack adjustment unless there are currently
9891 things pushed on the stack that aren't yet used. */
9893 static void
9894 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
9895 if_true_label)
9896 tree exp;
9897 enum rtx_code signed_code, unsigned_code;
9898 rtx if_false_label, if_true_label;
9900 rtx op0, op1;
9901 tree type;
9902 enum machine_mode mode;
9903 int unsignedp;
9904 enum rtx_code code;
9906 /* Don't crash if the comparison was erroneous. */
9907 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9908 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
9909 return;
9911 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9912 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
9913 return;
9915 type = TREE_TYPE (TREE_OPERAND (exp, 0));
9916 mode = TYPE_MODE (type);
9917 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
9918 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
9919 || (GET_MODE_BITSIZE (mode)
9920 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
9921 1)))))))
9923 /* op0 might have been replaced by promoted constant, in which
9924 case the type of second argument should be used. */
9925 type = TREE_TYPE (TREE_OPERAND (exp, 1));
9926 mode = TYPE_MODE (type);
9928 unsignedp = TREE_UNSIGNED (type);
9929 code = unsignedp ? unsigned_code : signed_code;
9931 #ifdef HAVE_canonicalize_funcptr_for_compare
9932 /* If function pointers need to be "canonicalized" before they can
9933 be reliably compared, then canonicalize them. */
9934 if (HAVE_canonicalize_funcptr_for_compare
9935 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9936 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9937 == FUNCTION_TYPE))
9939 rtx new_op0 = gen_reg_rtx (mode);
9941 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
9942 op0 = new_op0;
9945 if (HAVE_canonicalize_funcptr_for_compare
9946 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9947 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9948 == FUNCTION_TYPE))
9950 rtx new_op1 = gen_reg_rtx (mode);
9952 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
9953 op1 = new_op1;
9955 #endif
9957 /* Do any postincrements in the expression that was tested. */
9958 emit_queue ();
9960 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
9961 ((mode == BLKmode)
9962 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
9963 if_false_label, if_true_label);
9966 /* Generate code to calculate EXP using a store-flag instruction
9967 and return an rtx for the result. EXP is either a comparison
9968 or a TRUTH_NOT_EXPR whose operand is a comparison.
9970 If TARGET is nonzero, store the result there if convenient.
9972 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
9973 cheap.
9975 Return zero if there is no suitable set-flag instruction
9976 available on this machine.
9978 Once expand_expr has been called on the arguments of the comparison,
9979 we are committed to doing the store flag, since it is not safe to
9980 re-evaluate the expression. We emit the store-flag insn by calling
9981 emit_store_flag, but only expand the arguments if we have a reason
9982 to believe that emit_store_flag will be successful. If we think that
9983 it will, but it isn't, we have to simulate the store-flag with a
9984 set/jump/set sequence. */
9986 static rtx
9987 do_store_flag (exp, target, mode, only_cheap)
9988 tree exp;
9989 rtx target;
9990 enum machine_mode mode;
9991 int only_cheap;
9993 enum rtx_code code;
9994 tree arg0, arg1, type;
9995 tree tem;
9996 enum machine_mode operand_mode;
9997 int invert = 0;
9998 int unsignedp;
9999 rtx op0, op1;
10000 enum insn_code icode;
10001 rtx subtarget = target;
10002 rtx result, label;
10004 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10005 result at the end. We can't simply invert the test since it would
10006 have already been inverted if it were valid. This case occurs for
10007 some floating-point comparisons. */
10009 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10010 invert = 1, exp = TREE_OPERAND (exp, 0);
10012 arg0 = TREE_OPERAND (exp, 0);
10013 arg1 = TREE_OPERAND (exp, 1);
10015 /* Don't crash if the comparison was erroneous. */
10016 if (arg0 == error_mark_node || arg1 == error_mark_node)
10017 return const0_rtx;
10019 type = TREE_TYPE (arg0);
10020 operand_mode = TYPE_MODE (type);
10021 unsignedp = TREE_UNSIGNED (type);
10023 /* We won't bother with BLKmode store-flag operations because it would mean
10024 passing a lot of information to emit_store_flag. */
10025 if (operand_mode == BLKmode)
10026 return 0;
10028 /* We won't bother with store-flag operations involving function pointers
10029 when function pointers must be canonicalized before comparisons. */
10030 #ifdef HAVE_canonicalize_funcptr_for_compare
10031 if (HAVE_canonicalize_funcptr_for_compare
10032 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10033 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10034 == FUNCTION_TYPE))
10035 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10036 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10037 == FUNCTION_TYPE))))
10038 return 0;
10039 #endif
10041 STRIP_NOPS (arg0);
10042 STRIP_NOPS (arg1);
10044 /* Get the rtx comparison code to use. We know that EXP is a comparison
10045 operation of some type. Some comparisons against 1 and -1 can be
10046 converted to comparisons with zero. Do so here so that the tests
10047 below will be aware that we have a comparison with zero. These
10048 tests will not catch constants in the first operand, but constants
10049 are rarely passed as the first operand. */
10051 switch (TREE_CODE (exp))
10053 case EQ_EXPR:
10054 code = EQ;
10055 break;
10056 case NE_EXPR:
10057 code = NE;
10058 break;
10059 case LT_EXPR:
10060 if (integer_onep (arg1))
10061 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10062 else
10063 code = unsignedp ? LTU : LT;
10064 break;
10065 case LE_EXPR:
10066 if (! unsignedp && integer_all_onesp (arg1))
10067 arg1 = integer_zero_node, code = LT;
10068 else
10069 code = unsignedp ? LEU : LE;
10070 break;
10071 case GT_EXPR:
10072 if (! unsignedp && integer_all_onesp (arg1))
10073 arg1 = integer_zero_node, code = GE;
10074 else
10075 code = unsignedp ? GTU : GT;
10076 break;
10077 case GE_EXPR:
10078 if (integer_onep (arg1))
10079 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10080 else
10081 code = unsignedp ? GEU : GE;
10082 break;
10084 case UNORDERED_EXPR:
10085 code = UNORDERED;
10086 break;
10087 case ORDERED_EXPR:
10088 code = ORDERED;
10089 break;
10090 case UNLT_EXPR:
10091 code = UNLT;
10092 break;
10093 case UNLE_EXPR:
10094 code = UNLE;
10095 break;
10096 case UNGT_EXPR:
10097 code = UNGT;
10098 break;
10099 case UNGE_EXPR:
10100 code = UNGE;
10101 break;
10102 case UNEQ_EXPR:
10103 code = UNEQ;
10104 break;
10106 default:
10107 abort ();
10110 /* Put a constant second. */
10111 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10113 tem = arg0; arg0 = arg1; arg1 = tem;
10114 code = swap_condition (code);
10117 /* If this is an equality or inequality test of a single bit, we can
10118 do this by shifting the bit being tested to the low-order bit and
10119 masking the result with the constant 1. If the condition was EQ,
10120 we xor it with 1. This does not require an scc insn and is faster
10121 than an scc insn even if we have it. */
10123 if ((code == NE || code == EQ)
10124 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10125 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10127 tree inner = TREE_OPERAND (arg0, 0);
10128 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10129 int ops_unsignedp;
10131 /* If INNER is a right shift of a constant and it plus BITNUM does
10132 not overflow, adjust BITNUM and INNER. */
10134 if (TREE_CODE (inner) == RSHIFT_EXPR
10135 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10136 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10137 && bitnum < TYPE_PRECISION (type)
10138 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10139 bitnum - TYPE_PRECISION (type)))
10141 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10142 inner = TREE_OPERAND (inner, 0);
10145 /* If we are going to be able to omit the AND below, we must do our
10146 operations as unsigned. If we must use the AND, we have a choice.
10147 Normally unsigned is faster, but for some machines signed is. */
10148 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10149 #ifdef LOAD_EXTEND_OP
10150 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10151 #else
10153 #endif
10156 if (! get_subtarget (subtarget)
10157 || GET_MODE (subtarget) != operand_mode
10158 || ! safe_from_p (subtarget, inner, 1))
10159 subtarget = 0;
10161 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10163 if (bitnum != 0)
10164 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10165 size_int (bitnum), subtarget, ops_unsignedp);
10167 if (GET_MODE (op0) != mode)
10168 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10170 if ((code == EQ && ! invert) || (code == NE && invert))
10171 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10172 ops_unsignedp, OPTAB_LIB_WIDEN);
10174 /* Put the AND last so it can combine with more things. */
10175 if (bitnum != TYPE_PRECISION (type) - 1)
10176 op0 = expand_and (op0, const1_rtx, subtarget);
10178 return op0;
10181 /* Now see if we are likely to be able to do this. Return if not. */
10182 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10183 return 0;
10185 icode = setcc_gen_code[(int) code];
10186 if (icode == CODE_FOR_nothing
10187 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10189 /* We can only do this if it is one of the special cases that
10190 can be handled without an scc insn. */
10191 if ((code == LT && integer_zerop (arg1))
10192 || (! only_cheap && code == GE && integer_zerop (arg1)))
10194 else if (BRANCH_COST >= 0
10195 && ! only_cheap && (code == NE || code == EQ)
10196 && TREE_CODE (type) != REAL_TYPE
10197 && ((abs_optab->handlers[(int) operand_mode].insn_code
10198 != CODE_FOR_nothing)
10199 || (ffs_optab->handlers[(int) operand_mode].insn_code
10200 != CODE_FOR_nothing)))
10202 else
10203 return 0;
10206 if (! get_subtarget (target)
10207 || GET_MODE (subtarget) != operand_mode
10208 || ! safe_from_p (subtarget, arg1, 1))
10209 subtarget = 0;
10211 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10212 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10214 if (target == 0)
10215 target = gen_reg_rtx (mode);
10217 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10218 because, if the emit_store_flag does anything it will succeed and
10219 OP0 and OP1 will not be used subsequently. */
10221 result = emit_store_flag (target, code,
10222 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10223 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10224 operand_mode, unsignedp, 1);
10226 if (result)
10228 if (invert)
10229 result = expand_binop (mode, xor_optab, result, const1_rtx,
10230 result, 0, OPTAB_LIB_WIDEN);
10231 return result;
10234 /* If this failed, we have to do this with set/compare/jump/set code. */
10235 if (GET_CODE (target) != REG
10236 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10237 target = gen_reg_rtx (GET_MODE (target));
10239 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10240 result = compare_from_rtx (op0, op1, code, unsignedp,
10241 operand_mode, NULL_RTX);
10242 if (GET_CODE (result) == CONST_INT)
10243 return (((result == const0_rtx && ! invert)
10244 || (result != const0_rtx && invert))
10245 ? const0_rtx : const1_rtx);
10247 /* The code of RESULT may not match CODE if compare_from_rtx
10248 decided to swap its operands and reverse the original code.
10250 We know that compare_from_rtx returns either a CONST_INT or
10251 a new comparison code, so it is safe to just extract the
10252 code from RESULT. */
10253 code = GET_CODE (result);
10255 label = gen_label_rtx ();
10256 if (bcc_gen_fctn[(int) code] == 0)
10257 abort ();
10259 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10260 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10261 emit_label (label);
10263 return target;
10267 /* Stubs in case we haven't got a casesi insn. */
10268 #ifndef HAVE_casesi
10269 # define HAVE_casesi 0
10270 # define gen_casesi(a, b, c, d, e) (0)
10271 # define CODE_FOR_casesi CODE_FOR_nothing
10272 #endif
10274 /* If the machine does not have a case insn that compares the bounds,
10275 this means extra overhead for dispatch tables, which raises the
10276 threshold for using them. */
10277 #ifndef CASE_VALUES_THRESHOLD
10278 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10279 #endif /* CASE_VALUES_THRESHOLD */
10281 unsigned int
10282 case_values_threshold ()
10284 return CASE_VALUES_THRESHOLD;
10287 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10288 0 otherwise (i.e. if there is no casesi instruction). */
10290 try_casesi (index_type, index_expr, minval, range,
10291 table_label, default_label)
10292 tree index_type, index_expr, minval, range;
10293 rtx table_label ATTRIBUTE_UNUSED;
10294 rtx default_label;
10296 enum machine_mode index_mode = SImode;
10297 int index_bits = GET_MODE_BITSIZE (index_mode);
10298 rtx op1, op2, index;
10299 enum machine_mode op_mode;
10301 if (! HAVE_casesi)
10302 return 0;
10304 /* Convert the index to SImode. */
10305 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10307 enum machine_mode omode = TYPE_MODE (index_type);
10308 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
10310 /* We must handle the endpoints in the original mode. */
10311 index_expr = build (MINUS_EXPR, index_type,
10312 index_expr, minval);
10313 minval = integer_zero_node;
10314 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10315 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10316 omode, 1, default_label);
10317 /* Now we can safely truncate. */
10318 index = convert_to_mode (index_mode, index, 0);
10320 else
10322 if (TYPE_MODE (index_type) != index_mode)
10324 index_expr = convert (type_for_size (index_bits, 0),
10325 index_expr);
10326 index_type = TREE_TYPE (index_expr);
10329 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10331 emit_queue ();
10332 index = protect_from_queue (index, 0);
10333 do_pending_stack_adjust ();
10335 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10336 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10337 (index, op_mode))
10338 index = copy_to_mode_reg (op_mode, index);
10340 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10342 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10343 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10344 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
10345 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10346 (op1, op_mode))
10347 op1 = copy_to_mode_reg (op_mode, op1);
10349 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10351 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10352 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10353 op2, TREE_UNSIGNED (TREE_TYPE (range)));
10354 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10355 (op2, op_mode))
10356 op2 = copy_to_mode_reg (op_mode, op2);
10358 emit_jump_insn (gen_casesi (index, op1, op2,
10359 table_label, default_label));
10360 return 1;
10363 /* Attempt to generate a tablejump instruction; same concept. */
10364 #ifndef HAVE_tablejump
10365 #define HAVE_tablejump 0
10366 #define gen_tablejump(x, y) (0)
10367 #endif
10369 /* Subroutine of the next function.
10371 INDEX is the value being switched on, with the lowest value
10372 in the table already subtracted.
10373 MODE is its expected mode (needed if INDEX is constant).
10374 RANGE is the length of the jump table.
10375 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10377 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10378 index value is out of range. */
10380 static void
10381 do_tablejump (index, mode, range, table_label, default_label)
10382 rtx index, range, table_label, default_label;
10383 enum machine_mode mode;
10385 rtx temp, vector;
10387 /* Do an unsigned comparison (in the proper mode) between the index
10388 expression and the value which represents the length of the range.
10389 Since we just finished subtracting the lower bound of the range
10390 from the index expression, this comparison allows us to simultaneously
10391 check that the original index expression value is both greater than
10392 or equal to the minimum value of the range and less than or equal to
10393 the maximum value of the range. */
10395 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10396 default_label);
10398 /* If index is in range, it must fit in Pmode.
10399 Convert to Pmode so we can index with it. */
10400 if (mode != Pmode)
10401 index = convert_to_mode (Pmode, index, 1);
10403 /* Don't let a MEM slip thru, because then INDEX that comes
10404 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10405 and break_out_memory_refs will go to work on it and mess it up. */
10406 #ifdef PIC_CASE_VECTOR_ADDRESS
10407 if (flag_pic && GET_CODE (index) != REG)
10408 index = copy_to_mode_reg (Pmode, index);
10409 #endif
10411 /* If flag_force_addr were to affect this address
10412 it could interfere with the tricky assumptions made
10413 about addresses that contain label-refs,
10414 which may be valid only very near the tablejump itself. */
10415 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10416 GET_MODE_SIZE, because this indicates how large insns are. The other
10417 uses should all be Pmode, because they are addresses. This code
10418 could fail if addresses and insns are not the same size. */
10419 index = gen_rtx_PLUS (Pmode,
10420 gen_rtx_MULT (Pmode, index,
10421 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10422 gen_rtx_LABEL_REF (Pmode, table_label));
10423 #ifdef PIC_CASE_VECTOR_ADDRESS
10424 if (flag_pic)
10425 index = PIC_CASE_VECTOR_ADDRESS (index);
10426 else
10427 #endif
10428 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10429 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10430 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10431 RTX_UNCHANGING_P (vector) = 1;
10432 convert_move (temp, vector, 0);
10434 emit_jump_insn (gen_tablejump (temp, table_label));
10436 /* If we are generating PIC code or if the table is PC-relative, the
10437 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10438 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10439 emit_barrier ();
10443 try_tablejump (index_type, index_expr, minval, range,
10444 table_label, default_label)
10445 tree index_type, index_expr, minval, range;
10446 rtx table_label, default_label;
10448 rtx index;
10450 if (! HAVE_tablejump)
10451 return 0;
10453 index_expr = fold (build (MINUS_EXPR, index_type,
10454 convert (index_type, index_expr),
10455 convert (index_type, minval)));
10456 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10457 emit_queue ();
10458 index = protect_from_queue (index, 0);
10459 do_pending_stack_adjust ();
10461 do_tablejump (index, TYPE_MODE (index_type),
10462 convert_modes (TYPE_MODE (index_type),
10463 TYPE_MODE (TREE_TYPE (range)),
10464 expand_expr (range, NULL_RTX,
10465 VOIDmode, 0),
10466 TREE_UNSIGNED (TREE_TYPE (range))),
10467 table_label, default_label);
10468 return 1;