* config/alpha/x-vms (USE_COLLECT2): Set to empty.
[official-gcc.git] / gcc / expr.c
blobbad461ef066d2f39f2f29089aed2cd75abe502db
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "obstack.h"
28 #include "flags.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "except.h"
32 #include "function.h"
33 #include "insn-config.h"
34 #include "insn-attr.h"
35 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
36 #include "expr.h"
37 #include "optabs.h"
38 #include "libfuncs.h"
39 #include "recog.h"
40 #include "reload.h"
41 #include "output.h"
42 #include "typeclass.h"
43 #include "toplev.h"
44 #include "ggc.h"
45 #include "langhooks.h"
46 #include "intl.h"
47 #include "tm_p.h"
49 /* Decide whether a function's arguments should be processed
50 from first to last or from last to first.
52 They should if the stack and args grow in opposite directions, but
53 only if we have push insns. */
55 #ifdef PUSH_ROUNDING
57 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
58 #define PUSH_ARGS_REVERSED /* If it's last to first. */
59 #endif
61 #endif
63 #ifndef STACK_PUSH_CODE
64 #ifdef STACK_GROWS_DOWNWARD
65 #define STACK_PUSH_CODE PRE_DEC
66 #else
67 #define STACK_PUSH_CODE PRE_INC
68 #endif
69 #endif
71 /* Assume that case vectors are not pc-relative. */
72 #ifndef CASE_VECTOR_PC_RELATIVE
73 #define CASE_VECTOR_PC_RELATIVE 0
74 #endif
76 /* If this is nonzero, we do not bother generating VOLATILE
77 around volatile memory references, and we are willing to
78 output indirect addresses. If cse is to follow, we reject
79 indirect addresses so a useful potential cse is generated;
80 if it is used only once, instruction combination will produce
81 the same indirect address eventually. */
82 int cse_not_expected;
84 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
85 static tree placeholder_list = 0;
87 /* This structure is used by move_by_pieces to describe the move to
88 be performed. */
89 struct move_by_pieces
91 rtx to;
92 rtx to_addr;
93 int autinc_to;
94 int explicit_inc_to;
95 rtx from;
96 rtx from_addr;
97 int autinc_from;
98 int explicit_inc_from;
99 unsigned HOST_WIDE_INT len;
100 HOST_WIDE_INT offset;
101 int reverse;
104 /* This structure is used by store_by_pieces to describe the clear to
105 be performed. */
107 struct store_by_pieces
109 rtx to;
110 rtx to_addr;
111 int autinc_to;
112 int explicit_inc_to;
113 unsigned HOST_WIDE_INT len;
114 HOST_WIDE_INT offset;
115 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
116 PTR constfundata;
117 int reverse;
120 extern struct obstack permanent_obstack;
122 static rtx enqueue_insn PARAMS ((rtx, rtx));
123 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
124 PARAMS ((unsigned HOST_WIDE_INT,
125 unsigned int));
126 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
127 struct move_by_pieces *));
128 static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
129 enum machine_mode));
130 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
131 unsigned int));
132 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
133 unsigned int));
134 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
135 enum machine_mode,
136 struct store_by_pieces *));
137 static rtx get_subtarget PARAMS ((rtx));
138 static int is_zeros_p PARAMS ((tree));
139 static int mostly_zeros_p PARAMS ((tree));
140 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
141 HOST_WIDE_INT, enum machine_mode,
142 tree, tree, int, int));
143 static void store_constructor PARAMS ((tree, rtx, int, HOST_WIDE_INT));
144 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
145 HOST_WIDE_INT, enum machine_mode,
146 tree, enum machine_mode, int, tree,
147 int));
148 static rtx var_rtx PARAMS ((tree));
149 static HOST_WIDE_INT highest_pow2_factor PARAMS ((tree));
150 static rtx expand_increment PARAMS ((tree, int, int));
151 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
152 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
153 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
154 rtx, rtx));
155 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
156 #ifdef PUSH_ROUNDING
157 static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
158 #endif
159 static void do_tablejump PARAMS ((rtx, enum machine_mode, rtx, rtx, rtx));
161 /* Record for each mode whether we can move a register directly to or
162 from an object of that mode in memory. If we can't, we won't try
163 to use that mode directly when accessing a field of that mode. */
165 static char direct_load[NUM_MACHINE_MODES];
166 static char direct_store[NUM_MACHINE_MODES];
168 /* If a memory-to-memory move would take MOVE_RATIO or more simple
169 move-instruction sequences, we will do a movstr or libcall instead. */
171 #ifndef MOVE_RATIO
172 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
173 #define MOVE_RATIO 2
174 #else
175 /* If we are optimizing for space (-Os), cut down the default move ratio. */
176 #define MOVE_RATIO (optimize_size ? 3 : 15)
177 #endif
178 #endif
180 /* This macro is used to determine whether move_by_pieces should be called
181 to perform a structure copy. */
182 #ifndef MOVE_BY_PIECES_P
183 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
184 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
185 #endif
187 /* This array records the insn_code of insns to perform block moves. */
188 enum insn_code movstr_optab[NUM_MACHINE_MODES];
190 /* This array records the insn_code of insns to perform block clears. */
191 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
193 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
195 #ifndef SLOW_UNALIGNED_ACCESS
196 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
197 #endif
199 /* This is run once per compilation to set up which modes can be used
200 directly in memory and to initialize the block move optab. */
202 void
203 init_expr_once ()
205 rtx insn, pat;
206 enum machine_mode mode;
207 int num_clobbers;
208 rtx mem, mem1;
210 start_sequence ();
212 /* Try indexing by frame ptr and try by stack ptr.
213 It is known that on the Convex the stack ptr isn't a valid index.
214 With luck, one or the other is valid on any machine. */
215 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
216 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
218 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
219 pat = PATTERN (insn);
221 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
222 mode = (enum machine_mode) ((int) mode + 1))
224 int regno;
225 rtx reg;
227 direct_load[(int) mode] = direct_store[(int) mode] = 0;
228 PUT_MODE (mem, mode);
229 PUT_MODE (mem1, mode);
231 /* See if there is some register that can be used in this mode and
232 directly loaded or stored from memory. */
234 if (mode != VOIDmode && mode != BLKmode)
235 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
236 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
237 regno++)
239 if (! HARD_REGNO_MODE_OK (regno, mode))
240 continue;
242 reg = gen_rtx_REG (mode, regno);
244 SET_SRC (pat) = mem;
245 SET_DEST (pat) = reg;
246 if (recog (pat, insn, &num_clobbers) >= 0)
247 direct_load[(int) mode] = 1;
249 SET_SRC (pat) = mem1;
250 SET_DEST (pat) = reg;
251 if (recog (pat, insn, &num_clobbers) >= 0)
252 direct_load[(int) mode] = 1;
254 SET_SRC (pat) = reg;
255 SET_DEST (pat) = mem;
256 if (recog (pat, insn, &num_clobbers) >= 0)
257 direct_store[(int) mode] = 1;
259 SET_SRC (pat) = reg;
260 SET_DEST (pat) = mem1;
261 if (recog (pat, insn, &num_clobbers) >= 0)
262 direct_store[(int) mode] = 1;
266 end_sequence ();
269 /* This is run at the start of compiling a function. */
271 void
272 init_expr ()
274 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
276 pending_chain = 0;
277 pending_stack_adjust = 0;
278 stack_pointer_delta = 0;
279 inhibit_defer_pop = 0;
280 saveregs_value = 0;
281 apply_args_value = 0;
282 forced_labels = 0;
285 void
286 mark_expr_status (p)
287 struct expr_status *p;
289 if (p == NULL)
290 return;
292 ggc_mark_rtx (p->x_saveregs_value);
293 ggc_mark_rtx (p->x_apply_args_value);
294 ggc_mark_rtx (p->x_forced_labels);
297 void
298 free_expr_status (f)
299 struct function *f;
301 free (f->expr);
302 f->expr = NULL;
305 /* Small sanity check that the queue is empty at the end of a function. */
307 void
308 finish_expr_for_function ()
310 if (pending_chain)
311 abort ();
314 /* Manage the queue of increment instructions to be output
315 for POSTINCREMENT_EXPR expressions, etc. */
317 /* Queue up to increment (or change) VAR later. BODY says how:
318 BODY should be the same thing you would pass to emit_insn
319 to increment right away. It will go to emit_insn later on.
321 The value is a QUEUED expression to be used in place of VAR
322 where you want to guarantee the pre-incrementation value of VAR. */
324 static rtx
325 enqueue_insn (var, body)
326 rtx var, body;
328 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
329 body, pending_chain);
330 return pending_chain;
333 /* Use protect_from_queue to convert a QUEUED expression
334 into something that you can put immediately into an instruction.
335 If the queued incrementation has not happened yet,
336 protect_from_queue returns the variable itself.
337 If the incrementation has happened, protect_from_queue returns a temp
338 that contains a copy of the old value of the variable.
340 Any time an rtx which might possibly be a QUEUED is to be put
341 into an instruction, it must be passed through protect_from_queue first.
342 QUEUED expressions are not meaningful in instructions.
344 Do not pass a value through protect_from_queue and then hold
345 on to it for a while before putting it in an instruction!
346 If the queue is flushed in between, incorrect code will result. */
349 protect_from_queue (x, modify)
350 rtx x;
351 int modify;
353 RTX_CODE code = GET_CODE (x);
355 #if 0 /* A QUEUED can hang around after the queue is forced out. */
356 /* Shortcut for most common case. */
357 if (pending_chain == 0)
358 return x;
359 #endif
361 if (code != QUEUED)
363 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
364 use of autoincrement. Make a copy of the contents of the memory
365 location rather than a copy of the address, but not if the value is
366 of mode BLKmode. Don't modify X in place since it might be
367 shared. */
368 if (code == MEM && GET_MODE (x) != BLKmode
369 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
371 rtx y = XEXP (x, 0);
372 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
374 if (QUEUED_INSN (y))
376 rtx temp = gen_reg_rtx (GET_MODE (x));
378 emit_insn_before (gen_move_insn (temp, new),
379 QUEUED_INSN (y));
380 return temp;
383 /* Copy the address into a pseudo, so that the returned value
384 remains correct across calls to emit_queue. */
385 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
388 /* Otherwise, recursively protect the subexpressions of all
389 the kinds of rtx's that can contain a QUEUED. */
390 if (code == MEM)
392 rtx tem = protect_from_queue (XEXP (x, 0), 0);
393 if (tem != XEXP (x, 0))
395 x = copy_rtx (x);
396 XEXP (x, 0) = tem;
399 else if (code == PLUS || code == MULT)
401 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
402 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
403 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
405 x = copy_rtx (x);
406 XEXP (x, 0) = new0;
407 XEXP (x, 1) = new1;
410 return x;
412 /* If the increment has not happened, use the variable itself. Copy it
413 into a new pseudo so that the value remains correct across calls to
414 emit_queue. */
415 if (QUEUED_INSN (x) == 0)
416 return copy_to_reg (QUEUED_VAR (x));
417 /* If the increment has happened and a pre-increment copy exists,
418 use that copy. */
419 if (QUEUED_COPY (x) != 0)
420 return QUEUED_COPY (x);
421 /* The increment has happened but we haven't set up a pre-increment copy.
422 Set one up now, and use it. */
423 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
424 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
425 QUEUED_INSN (x));
426 return QUEUED_COPY (x);
429 /* Return nonzero if X contains a QUEUED expression:
430 if it contains anything that will be altered by a queued increment.
431 We handle only combinations of MEM, PLUS, MINUS and MULT operators
432 since memory addresses generally contain only those. */
435 queued_subexp_p (x)
436 rtx x;
438 enum rtx_code code = GET_CODE (x);
439 switch (code)
441 case QUEUED:
442 return 1;
443 case MEM:
444 return queued_subexp_p (XEXP (x, 0));
445 case MULT:
446 case PLUS:
447 case MINUS:
448 return (queued_subexp_p (XEXP (x, 0))
449 || queued_subexp_p (XEXP (x, 1)));
450 default:
451 return 0;
455 /* Perform all the pending incrementations. */
457 void
458 emit_queue ()
460 rtx p;
461 while ((p = pending_chain))
463 rtx body = QUEUED_BODY (p);
465 if (GET_CODE (body) == SEQUENCE)
467 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
468 emit_insn (QUEUED_BODY (p));
470 else
471 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
472 pending_chain = QUEUED_NEXT (p);
476 /* Copy data from FROM to TO, where the machine modes are not the same.
477 Both modes may be integer, or both may be floating.
478 UNSIGNEDP should be nonzero if FROM is an unsigned type.
479 This causes zero-extension instead of sign-extension. */
481 void
482 convert_move (to, from, unsignedp)
483 rtx to, from;
484 int unsignedp;
486 enum machine_mode to_mode = GET_MODE (to);
487 enum machine_mode from_mode = GET_MODE (from);
488 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
489 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
490 enum insn_code code;
491 rtx libcall;
493 /* rtx code for making an equivalent value. */
494 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
496 to = protect_from_queue (to, 1);
497 from = protect_from_queue (from, 0);
499 if (to_real != from_real)
500 abort ();
502 /* If FROM is a SUBREG that indicates that we have already done at least
503 the required extension, strip it. We don't handle such SUBREGs as
504 TO here. */
506 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
507 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
508 >= GET_MODE_SIZE (to_mode))
509 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
510 from = gen_lowpart (to_mode, from), from_mode = to_mode;
512 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
513 abort ();
515 if (to_mode == from_mode
516 || (from_mode == VOIDmode && CONSTANT_P (from)))
518 emit_move_insn (to, from);
519 return;
522 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
524 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
525 abort ();
527 if (VECTOR_MODE_P (to_mode))
528 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
529 else
530 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
532 emit_move_insn (to, from);
533 return;
536 if (to_real != from_real)
537 abort ();
539 if (to_real)
541 rtx value, insns;
543 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
545 /* Try converting directly if the insn is supported. */
546 if ((code = can_extend_p (to_mode, from_mode, 0))
547 != CODE_FOR_nothing)
549 emit_unop_insn (code, to, from, UNKNOWN);
550 return;
554 #ifdef HAVE_trunchfqf2
555 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
557 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
558 return;
560 #endif
561 #ifdef HAVE_trunctqfqf2
562 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
564 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
565 return;
567 #endif
568 #ifdef HAVE_truncsfqf2
569 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
571 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
572 return;
574 #endif
575 #ifdef HAVE_truncdfqf2
576 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
578 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
579 return;
581 #endif
582 #ifdef HAVE_truncxfqf2
583 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
585 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
586 return;
588 #endif
589 #ifdef HAVE_trunctfqf2
590 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
592 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
593 return;
595 #endif
597 #ifdef HAVE_trunctqfhf2
598 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
600 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
601 return;
603 #endif
604 #ifdef HAVE_truncsfhf2
605 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
607 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
608 return;
610 #endif
611 #ifdef HAVE_truncdfhf2
612 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
614 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
615 return;
617 #endif
618 #ifdef HAVE_truncxfhf2
619 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
621 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
622 return;
624 #endif
625 #ifdef HAVE_trunctfhf2
626 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
628 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
629 return;
631 #endif
633 #ifdef HAVE_truncsftqf2
634 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
636 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
637 return;
639 #endif
640 #ifdef HAVE_truncdftqf2
641 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
643 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
644 return;
646 #endif
647 #ifdef HAVE_truncxftqf2
648 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
650 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
651 return;
653 #endif
654 #ifdef HAVE_trunctftqf2
655 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
657 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
658 return;
660 #endif
662 #ifdef HAVE_truncdfsf2
663 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
665 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
666 return;
668 #endif
669 #ifdef HAVE_truncxfsf2
670 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
672 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
673 return;
675 #endif
676 #ifdef HAVE_trunctfsf2
677 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
679 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
680 return;
682 #endif
683 #ifdef HAVE_truncxfdf2
684 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
686 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
687 return;
689 #endif
690 #ifdef HAVE_trunctfdf2
691 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
693 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
694 return;
696 #endif
698 libcall = (rtx) 0;
699 switch (from_mode)
701 case SFmode:
702 switch (to_mode)
704 case DFmode:
705 libcall = extendsfdf2_libfunc;
706 break;
708 case XFmode:
709 libcall = extendsfxf2_libfunc;
710 break;
712 case TFmode:
713 libcall = extendsftf2_libfunc;
714 break;
716 default:
717 break;
719 break;
721 case DFmode:
722 switch (to_mode)
724 case SFmode:
725 libcall = truncdfsf2_libfunc;
726 break;
728 case XFmode:
729 libcall = extenddfxf2_libfunc;
730 break;
732 case TFmode:
733 libcall = extenddftf2_libfunc;
734 break;
736 default:
737 break;
739 break;
741 case XFmode:
742 switch (to_mode)
744 case SFmode:
745 libcall = truncxfsf2_libfunc;
746 break;
748 case DFmode:
749 libcall = truncxfdf2_libfunc;
750 break;
752 default:
753 break;
755 break;
757 case TFmode:
758 switch (to_mode)
760 case SFmode:
761 libcall = trunctfsf2_libfunc;
762 break;
764 case DFmode:
765 libcall = trunctfdf2_libfunc;
766 break;
768 default:
769 break;
771 break;
773 default:
774 break;
777 if (libcall == (rtx) 0)
778 /* This conversion is not implemented yet. */
779 abort ();
781 start_sequence ();
782 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
783 1, from, from_mode);
784 insns = get_insns ();
785 end_sequence ();
786 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
787 from));
788 return;
791 /* Now both modes are integers. */
793 /* Handle expanding beyond a word. */
794 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
795 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
797 rtx insns;
798 rtx lowpart;
799 rtx fill_value;
800 rtx lowfrom;
801 int i;
802 enum machine_mode lowpart_mode;
803 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
805 /* Try converting directly if the insn is supported. */
806 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
807 != CODE_FOR_nothing)
809 /* If FROM is a SUBREG, put it into a register. Do this
810 so that we always generate the same set of insns for
811 better cse'ing; if an intermediate assignment occurred,
812 we won't be doing the operation directly on the SUBREG. */
813 if (optimize > 0 && GET_CODE (from) == SUBREG)
814 from = force_reg (from_mode, from);
815 emit_unop_insn (code, to, from, equiv_code);
816 return;
818 /* Next, try converting via full word. */
819 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
820 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
821 != CODE_FOR_nothing))
823 if (GET_CODE (to) == REG)
824 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
825 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
826 emit_unop_insn (code, to,
827 gen_lowpart (word_mode, to), equiv_code);
828 return;
831 /* No special multiword conversion insn; do it by hand. */
832 start_sequence ();
834 /* Since we will turn this into a no conflict block, we must ensure
835 that the source does not overlap the target. */
837 if (reg_overlap_mentioned_p (to, from))
838 from = force_reg (from_mode, from);
840 /* Get a copy of FROM widened to a word, if necessary. */
841 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
842 lowpart_mode = word_mode;
843 else
844 lowpart_mode = from_mode;
846 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
848 lowpart = gen_lowpart (lowpart_mode, to);
849 emit_move_insn (lowpart, lowfrom);
851 /* Compute the value to put in each remaining word. */
852 if (unsignedp)
853 fill_value = const0_rtx;
854 else
856 #ifdef HAVE_slt
857 if (HAVE_slt
858 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
859 && STORE_FLAG_VALUE == -1)
861 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
862 lowpart_mode, 0);
863 fill_value = gen_reg_rtx (word_mode);
864 emit_insn (gen_slt (fill_value));
866 else
867 #endif
869 fill_value
870 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
871 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
872 NULL_RTX, 0);
873 fill_value = convert_to_mode (word_mode, fill_value, 1);
877 /* Fill the remaining words. */
878 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
880 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
881 rtx subword = operand_subword (to, index, 1, to_mode);
883 if (subword == 0)
884 abort ();
886 if (fill_value != subword)
887 emit_move_insn (subword, fill_value);
890 insns = get_insns ();
891 end_sequence ();
893 emit_no_conflict_block (insns, to, from, NULL_RTX,
894 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
895 return;
898 /* Truncating multi-word to a word or less. */
899 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
900 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
902 if (!((GET_CODE (from) == MEM
903 && ! MEM_VOLATILE_P (from)
904 && direct_load[(int) to_mode]
905 && ! mode_dependent_address_p (XEXP (from, 0)))
906 || GET_CODE (from) == REG
907 || GET_CODE (from) == SUBREG))
908 from = force_reg (from_mode, from);
909 convert_move (to, gen_lowpart (word_mode, from), 0);
910 return;
913 /* Handle pointer conversion. */ /* SPEE 900220. */
914 if (to_mode == PQImode)
916 if (from_mode != QImode)
917 from = convert_to_mode (QImode, from, unsignedp);
919 #ifdef HAVE_truncqipqi2
920 if (HAVE_truncqipqi2)
922 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
923 return;
925 #endif /* HAVE_truncqipqi2 */
926 abort ();
929 if (from_mode == PQImode)
931 if (to_mode != QImode)
933 from = convert_to_mode (QImode, from, unsignedp);
934 from_mode = QImode;
936 else
938 #ifdef HAVE_extendpqiqi2
939 if (HAVE_extendpqiqi2)
941 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
942 return;
944 #endif /* HAVE_extendpqiqi2 */
945 abort ();
949 if (to_mode == PSImode)
951 if (from_mode != SImode)
952 from = convert_to_mode (SImode, from, unsignedp);
954 #ifdef HAVE_truncsipsi2
955 if (HAVE_truncsipsi2)
957 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
958 return;
960 #endif /* HAVE_truncsipsi2 */
961 abort ();
964 if (from_mode == PSImode)
966 if (to_mode != SImode)
968 from = convert_to_mode (SImode, from, unsignedp);
969 from_mode = SImode;
971 else
973 #ifdef HAVE_extendpsisi2
974 if (! unsignedp && HAVE_extendpsisi2)
976 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
977 return;
979 #endif /* HAVE_extendpsisi2 */
980 #ifdef HAVE_zero_extendpsisi2
981 if (unsignedp && HAVE_zero_extendpsisi2)
983 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
984 return;
986 #endif /* HAVE_zero_extendpsisi2 */
987 abort ();
991 if (to_mode == PDImode)
993 if (from_mode != DImode)
994 from = convert_to_mode (DImode, from, unsignedp);
996 #ifdef HAVE_truncdipdi2
997 if (HAVE_truncdipdi2)
999 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1000 return;
1002 #endif /* HAVE_truncdipdi2 */
1003 abort ();
1006 if (from_mode == PDImode)
1008 if (to_mode != DImode)
1010 from = convert_to_mode (DImode, from, unsignedp);
1011 from_mode = DImode;
1013 else
1015 #ifdef HAVE_extendpdidi2
1016 if (HAVE_extendpdidi2)
1018 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1019 return;
1021 #endif /* HAVE_extendpdidi2 */
1022 abort ();
1026 /* Now follow all the conversions between integers
1027 no more than a word long. */
1029 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1030 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1031 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1032 GET_MODE_BITSIZE (from_mode)))
1034 if (!((GET_CODE (from) == MEM
1035 && ! MEM_VOLATILE_P (from)
1036 && direct_load[(int) to_mode]
1037 && ! mode_dependent_address_p (XEXP (from, 0)))
1038 || GET_CODE (from) == REG
1039 || GET_CODE (from) == SUBREG))
1040 from = force_reg (from_mode, from);
1041 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1042 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1043 from = copy_to_reg (from);
1044 emit_move_insn (to, gen_lowpart (to_mode, from));
1045 return;
1048 /* Handle extension. */
1049 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1051 /* Convert directly if that works. */
1052 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1053 != CODE_FOR_nothing)
1055 if (flag_force_mem)
1056 from = force_not_mem (from);
1058 emit_unop_insn (code, to, from, equiv_code);
1059 return;
1061 else
1063 enum machine_mode intermediate;
1064 rtx tmp;
1065 tree shift_amount;
1067 /* Search for a mode to convert via. */
1068 for (intermediate = from_mode; intermediate != VOIDmode;
1069 intermediate = GET_MODE_WIDER_MODE (intermediate))
1070 if (((can_extend_p (to_mode, intermediate, unsignedp)
1071 != CODE_FOR_nothing)
1072 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1073 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1074 GET_MODE_BITSIZE (intermediate))))
1075 && (can_extend_p (intermediate, from_mode, unsignedp)
1076 != CODE_FOR_nothing))
1078 convert_move (to, convert_to_mode (intermediate, from,
1079 unsignedp), unsignedp);
1080 return;
1083 /* No suitable intermediate mode.
1084 Generate what we need with shifts. */
1085 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1086 - GET_MODE_BITSIZE (from_mode), 0);
1087 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1088 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1089 to, unsignedp);
1090 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1091 to, unsignedp);
1092 if (tmp != to)
1093 emit_move_insn (to, tmp);
1094 return;
1098 /* Support special truncate insns for certain modes. */
1100 if (from_mode == DImode && to_mode == SImode)
1102 #ifdef HAVE_truncdisi2
1103 if (HAVE_truncdisi2)
1105 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1106 return;
1108 #endif
1109 convert_move (to, force_reg (from_mode, from), unsignedp);
1110 return;
1113 if (from_mode == DImode && to_mode == HImode)
1115 #ifdef HAVE_truncdihi2
1116 if (HAVE_truncdihi2)
1118 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1119 return;
1121 #endif
1122 convert_move (to, force_reg (from_mode, from), unsignedp);
1123 return;
1126 if (from_mode == DImode && to_mode == QImode)
1128 #ifdef HAVE_truncdiqi2
1129 if (HAVE_truncdiqi2)
1131 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1132 return;
1134 #endif
1135 convert_move (to, force_reg (from_mode, from), unsignedp);
1136 return;
1139 if (from_mode == SImode && to_mode == HImode)
1141 #ifdef HAVE_truncsihi2
1142 if (HAVE_truncsihi2)
1144 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1145 return;
1147 #endif
1148 convert_move (to, force_reg (from_mode, from), unsignedp);
1149 return;
1152 if (from_mode == SImode && to_mode == QImode)
1154 #ifdef HAVE_truncsiqi2
1155 if (HAVE_truncsiqi2)
1157 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1158 return;
1160 #endif
1161 convert_move (to, force_reg (from_mode, from), unsignedp);
1162 return;
1165 if (from_mode == HImode && to_mode == QImode)
1167 #ifdef HAVE_trunchiqi2
1168 if (HAVE_trunchiqi2)
1170 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1171 return;
1173 #endif
1174 convert_move (to, force_reg (from_mode, from), unsignedp);
1175 return;
1178 if (from_mode == TImode && to_mode == DImode)
1180 #ifdef HAVE_trunctidi2
1181 if (HAVE_trunctidi2)
1183 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1184 return;
1186 #endif
1187 convert_move (to, force_reg (from_mode, from), unsignedp);
1188 return;
1191 if (from_mode == TImode && to_mode == SImode)
1193 #ifdef HAVE_trunctisi2
1194 if (HAVE_trunctisi2)
1196 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1197 return;
1199 #endif
1200 convert_move (to, force_reg (from_mode, from), unsignedp);
1201 return;
1204 if (from_mode == TImode && to_mode == HImode)
1206 #ifdef HAVE_trunctihi2
1207 if (HAVE_trunctihi2)
1209 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1210 return;
1212 #endif
1213 convert_move (to, force_reg (from_mode, from), unsignedp);
1214 return;
1217 if (from_mode == TImode && to_mode == QImode)
1219 #ifdef HAVE_trunctiqi2
1220 if (HAVE_trunctiqi2)
1222 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1223 return;
1225 #endif
1226 convert_move (to, force_reg (from_mode, from), unsignedp);
1227 return;
1230 /* Handle truncation of volatile memrefs, and so on;
1231 the things that couldn't be truncated directly,
1232 and for which there was no special instruction. */
1233 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1235 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1236 emit_move_insn (to, temp);
1237 return;
1240 /* Mode combination is not recognized. */
1241 abort ();
1244 /* Return an rtx for a value that would result
1245 from converting X to mode MODE.
1246 Both X and MODE may be floating, or both integer.
1247 UNSIGNEDP is nonzero if X is an unsigned value.
1248 This can be done by referring to a part of X in place
1249 or by copying to a new temporary with conversion.
1251 This function *must not* call protect_from_queue
1252 except when putting X into an insn (in which case convert_move does it). */
1255 convert_to_mode (mode, x, unsignedp)
1256 enum machine_mode mode;
1257 rtx x;
1258 int unsignedp;
1260 return convert_modes (mode, VOIDmode, x, unsignedp);
1263 /* Return an rtx for a value that would result
1264 from converting X from mode OLDMODE to mode MODE.
1265 Both modes may be floating, or both integer.
1266 UNSIGNEDP is nonzero if X is an unsigned value.
1268 This can be done by referring to a part of X in place
1269 or by copying to a new temporary with conversion.
1271 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1273 This function *must not* call protect_from_queue
1274 except when putting X into an insn (in which case convert_move does it). */
1277 convert_modes (mode, oldmode, x, unsignedp)
1278 enum machine_mode mode, oldmode;
1279 rtx x;
1280 int unsignedp;
1282 rtx temp;
1284 /* If FROM is a SUBREG that indicates that we have already done at least
1285 the required extension, strip it. */
1287 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1288 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1289 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1290 x = gen_lowpart (mode, x);
1292 if (GET_MODE (x) != VOIDmode)
1293 oldmode = GET_MODE (x);
1295 if (mode == oldmode)
1296 return x;
1298 /* There is one case that we must handle specially: If we are converting
1299 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1300 we are to interpret the constant as unsigned, gen_lowpart will do
1301 the wrong if the constant appears negative. What we want to do is
1302 make the high-order word of the constant zero, not all ones. */
1304 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1305 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1306 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1308 HOST_WIDE_INT val = INTVAL (x);
1310 if (oldmode != VOIDmode
1311 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1313 int width = GET_MODE_BITSIZE (oldmode);
1315 /* We need to zero extend VAL. */
1316 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1319 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1322 /* We can do this with a gen_lowpart if both desired and current modes
1323 are integer, and this is either a constant integer, a register, or a
1324 non-volatile MEM. Except for the constant case where MODE is no
1325 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1327 if ((GET_CODE (x) == CONST_INT
1328 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1329 || (GET_MODE_CLASS (mode) == MODE_INT
1330 && GET_MODE_CLASS (oldmode) == MODE_INT
1331 && (GET_CODE (x) == CONST_DOUBLE
1332 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1333 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1334 && direct_load[(int) mode])
1335 || (GET_CODE (x) == REG
1336 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1337 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1339 /* ?? If we don't know OLDMODE, we have to assume here that
1340 X does not need sign- or zero-extension. This may not be
1341 the case, but it's the best we can do. */
1342 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1343 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1345 HOST_WIDE_INT val = INTVAL (x);
1346 int width = GET_MODE_BITSIZE (oldmode);
1348 /* We must sign or zero-extend in this case. Start by
1349 zero-extending, then sign extend if we need to. */
1350 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1351 if (! unsignedp
1352 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1353 val |= (HOST_WIDE_INT) (-1) << width;
1355 return GEN_INT (trunc_int_for_mode (val, mode));
1358 return gen_lowpart (mode, x);
1361 temp = gen_reg_rtx (mode);
1362 convert_move (temp, x, unsignedp);
1363 return temp;
1366 /* This macro is used to determine what the largest unit size that
1367 move_by_pieces can use is. */
1369 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1370 move efficiently, as opposed to MOVE_MAX which is the maximum
1371 number of bytes we can move with a single instruction. */
1373 #ifndef MOVE_MAX_PIECES
1374 #define MOVE_MAX_PIECES MOVE_MAX
1375 #endif
1377 /* Generate several move instructions to copy LEN bytes from block FROM to
1378 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1379 and TO through protect_from_queue before calling.
1381 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1382 used to push FROM to the stack.
1384 ALIGN is maximum alignment we can assume. */
1386 void
1387 move_by_pieces (to, from, len, align)
1388 rtx to, from;
1389 unsigned HOST_WIDE_INT len;
1390 unsigned int align;
1392 struct move_by_pieces data;
1393 rtx to_addr, from_addr = XEXP (from, 0);
1394 unsigned int max_size = MOVE_MAX_PIECES + 1;
1395 enum machine_mode mode = VOIDmode, tmode;
1396 enum insn_code icode;
1398 data.offset = 0;
1399 data.from_addr = from_addr;
1400 if (to)
1402 to_addr = XEXP (to, 0);
1403 data.to = to;
1404 data.autinc_to
1405 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1406 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1407 data.reverse
1408 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1410 else
1412 to_addr = NULL_RTX;
1413 data.to = NULL_RTX;
1414 data.autinc_to = 1;
1415 #ifdef STACK_GROWS_DOWNWARD
1416 data.reverse = 1;
1417 #else
1418 data.reverse = 0;
1419 #endif
1421 data.to_addr = to_addr;
1422 data.from = from;
1423 data.autinc_from
1424 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1425 || GET_CODE (from_addr) == POST_INC
1426 || GET_CODE (from_addr) == POST_DEC);
1428 data.explicit_inc_from = 0;
1429 data.explicit_inc_to = 0;
1430 if (data.reverse) data.offset = len;
1431 data.len = len;
1433 /* If copying requires more than two move insns,
1434 copy addresses to registers (to make displacements shorter)
1435 and use post-increment if available. */
1436 if (!(data.autinc_from && data.autinc_to)
1437 && move_by_pieces_ninsns (len, align) > 2)
1439 /* Find the mode of the largest move... */
1440 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1441 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1442 if (GET_MODE_SIZE (tmode) < max_size)
1443 mode = tmode;
1445 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1447 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1448 data.autinc_from = 1;
1449 data.explicit_inc_from = -1;
1451 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1453 data.from_addr = copy_addr_to_reg (from_addr);
1454 data.autinc_from = 1;
1455 data.explicit_inc_from = 1;
1457 if (!data.autinc_from && CONSTANT_P (from_addr))
1458 data.from_addr = copy_addr_to_reg (from_addr);
1459 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1461 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1462 data.autinc_to = 1;
1463 data.explicit_inc_to = -1;
1465 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1467 data.to_addr = copy_addr_to_reg (to_addr);
1468 data.autinc_to = 1;
1469 data.explicit_inc_to = 1;
1471 if (!data.autinc_to && CONSTANT_P (to_addr))
1472 data.to_addr = copy_addr_to_reg (to_addr);
1475 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1476 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1477 align = MOVE_MAX * BITS_PER_UNIT;
1479 /* First move what we can in the largest integer mode, then go to
1480 successively smaller modes. */
1482 while (max_size > 1)
1484 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1485 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1486 if (GET_MODE_SIZE (tmode) < max_size)
1487 mode = tmode;
1489 if (mode == VOIDmode)
1490 break;
1492 icode = mov_optab->handlers[(int) mode].insn_code;
1493 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1494 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1496 max_size = GET_MODE_SIZE (mode);
1499 /* The code above should have handled everything. */
1500 if (data.len > 0)
1501 abort ();
1504 /* Return number of insns required to move L bytes by pieces.
1505 ALIGN (in bits) is maximum alignment we can assume. */
1507 static unsigned HOST_WIDE_INT
1508 move_by_pieces_ninsns (l, align)
1509 unsigned HOST_WIDE_INT l;
1510 unsigned int align;
1512 unsigned HOST_WIDE_INT n_insns = 0;
1513 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1515 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1516 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1517 align = MOVE_MAX * BITS_PER_UNIT;
1519 while (max_size > 1)
1521 enum machine_mode mode = VOIDmode, tmode;
1522 enum insn_code icode;
1524 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1525 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1526 if (GET_MODE_SIZE (tmode) < max_size)
1527 mode = tmode;
1529 if (mode == VOIDmode)
1530 break;
1532 icode = mov_optab->handlers[(int) mode].insn_code;
1533 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1534 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1536 max_size = GET_MODE_SIZE (mode);
1539 if (l)
1540 abort ();
1541 return n_insns;
1544 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1545 with move instructions for mode MODE. GENFUN is the gen_... function
1546 to make a move insn for that mode. DATA has all the other info. */
1548 static void
1549 move_by_pieces_1 (genfun, mode, data)
1550 rtx (*genfun) PARAMS ((rtx, ...));
1551 enum machine_mode mode;
1552 struct move_by_pieces *data;
1554 unsigned int size = GET_MODE_SIZE (mode);
1555 rtx to1 = NULL_RTX, from1;
1557 while (data->len >= size)
1559 if (data->reverse)
1560 data->offset -= size;
1562 if (data->to)
1564 if (data->autinc_to)
1565 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1566 data->offset);
1567 else
1568 to1 = adjust_address (data->to, mode, data->offset);
1571 if (data->autinc_from)
1572 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1573 data->offset);
1574 else
1575 from1 = adjust_address (data->from, mode, data->offset);
1577 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1578 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1579 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1580 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1582 if (data->to)
1583 emit_insn ((*genfun) (to1, from1));
1584 else
1586 #ifdef PUSH_ROUNDING
1587 emit_single_push_insn (mode, from1, NULL);
1588 #else
1589 abort ();
1590 #endif
1593 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1594 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1595 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1596 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1598 if (! data->reverse)
1599 data->offset += size;
1601 data->len -= size;
1605 /* Emit code to move a block Y to a block X.
1606 This may be done with string-move instructions,
1607 with multiple scalar move instructions, or with a library call.
1609 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1610 with mode BLKmode.
1611 SIZE is an rtx that says how long they are.
1612 ALIGN is the maximum alignment we can assume they have.
1614 Return the address of the new block, if memcpy is called and returns it,
1615 0 otherwise. */
1618 emit_block_move (x, y, size)
1619 rtx x, y;
1620 rtx size;
1622 rtx retval = 0;
1623 #ifdef TARGET_MEM_FUNCTIONS
1624 static tree fn;
1625 tree call_expr, arg_list;
1626 #endif
1627 unsigned int align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1629 if (GET_MODE (x) != BLKmode)
1630 abort ();
1632 if (GET_MODE (y) != BLKmode)
1633 abort ();
1635 x = protect_from_queue (x, 1);
1636 y = protect_from_queue (y, 0);
1637 size = protect_from_queue (size, 0);
1639 if (GET_CODE (x) != MEM)
1640 abort ();
1641 if (GET_CODE (y) != MEM)
1642 abort ();
1643 if (size == 0)
1644 abort ();
1646 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1647 move_by_pieces (x, y, INTVAL (size), align);
1648 else
1650 /* Try the most limited insn first, because there's no point
1651 including more than one in the machine description unless
1652 the more limited one has some advantage. */
1654 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1655 enum machine_mode mode;
1657 /* Since this is a move insn, we don't care about volatility. */
1658 volatile_ok = 1;
1660 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1661 mode = GET_MODE_WIDER_MODE (mode))
1663 enum insn_code code = movstr_optab[(int) mode];
1664 insn_operand_predicate_fn pred;
1666 if (code != CODE_FOR_nothing
1667 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1668 here because if SIZE is less than the mode mask, as it is
1669 returned by the macro, it will definitely be less than the
1670 actual mode mask. */
1671 && ((GET_CODE (size) == CONST_INT
1672 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1673 <= (GET_MODE_MASK (mode) >> 1)))
1674 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1675 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1676 || (*pred) (x, BLKmode))
1677 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1678 || (*pred) (y, BLKmode))
1679 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1680 || (*pred) (opalign, VOIDmode)))
1682 rtx op2;
1683 rtx last = get_last_insn ();
1684 rtx pat;
1686 op2 = convert_to_mode (mode, size, 1);
1687 pred = insn_data[(int) code].operand[2].predicate;
1688 if (pred != 0 && ! (*pred) (op2, mode))
1689 op2 = copy_to_mode_reg (mode, op2);
1691 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1692 if (pat)
1694 emit_insn (pat);
1695 volatile_ok = 0;
1696 return 0;
1698 else
1699 delete_insns_since (last);
1703 volatile_ok = 0;
1705 /* X, Y, or SIZE may have been passed through protect_from_queue.
1707 It is unsafe to save the value generated by protect_from_queue
1708 and reuse it later. Consider what happens if emit_queue is
1709 called before the return value from protect_from_queue is used.
1711 Expansion of the CALL_EXPR below will call emit_queue before
1712 we are finished emitting RTL for argument setup. So if we are
1713 not careful we could get the wrong value for an argument.
1715 To avoid this problem we go ahead and emit code to copy X, Y &
1716 SIZE into new pseudos. We can then place those new pseudos
1717 into an RTL_EXPR and use them later, even after a call to
1718 emit_queue.
1720 Note this is not strictly needed for library calls since they
1721 do not call emit_queue before loading their arguments. However,
1722 we may need to have library calls call emit_queue in the future
1723 since failing to do so could cause problems for targets which
1724 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1725 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1726 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1728 #ifdef TARGET_MEM_FUNCTIONS
1729 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1730 #else
1731 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1732 TREE_UNSIGNED (integer_type_node));
1733 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1734 #endif
1736 #ifdef TARGET_MEM_FUNCTIONS
1737 /* It is incorrect to use the libcall calling conventions to call
1738 memcpy in this context.
1740 This could be a user call to memcpy and the user may wish to
1741 examine the return value from memcpy.
1743 For targets where libcalls and normal calls have different conventions
1744 for returning pointers, we could end up generating incorrect code.
1746 So instead of using a libcall sequence we build up a suitable
1747 CALL_EXPR and expand the call in the normal fashion. */
1748 if (fn == NULL_TREE)
1750 tree fntype;
1752 /* This was copied from except.c, I don't know if all this is
1753 necessary in this context or not. */
1754 fn = get_identifier ("memcpy");
1755 fntype = build_pointer_type (void_type_node);
1756 fntype = build_function_type (fntype, NULL_TREE);
1757 fn = build_decl (FUNCTION_DECL, fn, fntype);
1758 ggc_add_tree_root (&fn, 1);
1759 DECL_EXTERNAL (fn) = 1;
1760 TREE_PUBLIC (fn) = 1;
1761 DECL_ARTIFICIAL (fn) = 1;
1762 TREE_NOTHROW (fn) = 1;
1763 make_decl_rtl (fn, NULL);
1764 assemble_external (fn);
1767 /* We need to make an argument list for the function call.
1769 memcpy has three arguments, the first two are void * addresses and
1770 the last is a size_t byte count for the copy. */
1771 arg_list
1772 = build_tree_list (NULL_TREE,
1773 make_tree (build_pointer_type (void_type_node), x));
1774 TREE_CHAIN (arg_list)
1775 = build_tree_list (NULL_TREE,
1776 make_tree (build_pointer_type (void_type_node), y));
1777 TREE_CHAIN (TREE_CHAIN (arg_list))
1778 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1779 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1781 /* Now we have to build up the CALL_EXPR itself. */
1782 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1783 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1784 call_expr, arg_list, NULL_TREE);
1785 TREE_SIDE_EFFECTS (call_expr) = 1;
1787 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1788 #else
1789 emit_library_call (bcopy_libfunc, LCT_NORMAL,
1790 VOIDmode, 3, y, Pmode, x, Pmode,
1791 convert_to_mode (TYPE_MODE (integer_type_node), size,
1792 TREE_UNSIGNED (integer_type_node)),
1793 TYPE_MODE (integer_type_node));
1794 #endif
1796 /* If we are initializing a readonly value, show the above call
1797 clobbered it. Otherwise, a load from it may erroneously be hoisted
1798 from a loop. */
1799 if (RTX_UNCHANGING_P (x))
1800 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
1803 return retval;
1806 /* Copy all or part of a value X into registers starting at REGNO.
1807 The number of registers to be filled is NREGS. */
1809 void
1810 move_block_to_reg (regno, x, nregs, mode)
1811 int regno;
1812 rtx x;
1813 int nregs;
1814 enum machine_mode mode;
1816 int i;
1817 #ifdef HAVE_load_multiple
1818 rtx pat;
1819 rtx last;
1820 #endif
1822 if (nregs == 0)
1823 return;
1825 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1826 x = validize_mem (force_const_mem (mode, x));
1828 /* See if the machine can do this with a load multiple insn. */
1829 #ifdef HAVE_load_multiple
1830 if (HAVE_load_multiple)
1832 last = get_last_insn ();
1833 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1834 GEN_INT (nregs));
1835 if (pat)
1837 emit_insn (pat);
1838 return;
1840 else
1841 delete_insns_since (last);
1843 #endif
1845 for (i = 0; i < nregs; i++)
1846 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1847 operand_subword_force (x, i, mode));
1850 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1851 The number of registers to be filled is NREGS. SIZE indicates the number
1852 of bytes in the object X. */
1854 void
1855 move_block_from_reg (regno, x, nregs, size)
1856 int regno;
1857 rtx x;
1858 int nregs;
1859 int size;
1861 int i;
1862 #ifdef HAVE_store_multiple
1863 rtx pat;
1864 rtx last;
1865 #endif
1866 enum machine_mode mode;
1868 if (nregs == 0)
1869 return;
1871 /* If SIZE is that of a mode no bigger than a word, just use that
1872 mode's store operation. */
1873 if (size <= UNITS_PER_WORD
1874 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode
1875 && !FUNCTION_ARG_REG_LITTLE_ENDIAN)
1877 emit_move_insn (adjust_address (x, mode, 0), gen_rtx_REG (mode, regno));
1878 return;
1881 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1882 to the left before storing to memory. Note that the previous test
1883 doesn't handle all cases (e.g. SIZE == 3). */
1884 if (size < UNITS_PER_WORD
1885 && BYTES_BIG_ENDIAN
1886 && !FUNCTION_ARG_REG_LITTLE_ENDIAN)
1888 rtx tem = operand_subword (x, 0, 1, BLKmode);
1889 rtx shift;
1891 if (tem == 0)
1892 abort ();
1894 shift = expand_shift (LSHIFT_EXPR, word_mode,
1895 gen_rtx_REG (word_mode, regno),
1896 build_int_2 ((UNITS_PER_WORD - size)
1897 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1898 emit_move_insn (tem, shift);
1899 return;
1902 /* See if the machine can do this with a store multiple insn. */
1903 #ifdef HAVE_store_multiple
1904 if (HAVE_store_multiple)
1906 last = get_last_insn ();
1907 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1908 GEN_INT (nregs));
1909 if (pat)
1911 emit_insn (pat);
1912 return;
1914 else
1915 delete_insns_since (last);
1917 #endif
1919 for (i = 0; i < nregs; i++)
1921 rtx tem = operand_subword (x, i, 1, BLKmode);
1923 if (tem == 0)
1924 abort ();
1926 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1930 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1931 registers represented by a PARALLEL. SSIZE represents the total size of
1932 block SRC in bytes, or -1 if not known. */
1933 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatant assumption that
1934 the balance will be in what would be the low-order memory addresses, i.e.
1935 left justified for big endian, right justified for little endian. This
1936 happens to be true for the targets currently using this support. If this
1937 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1938 would be needed. */
1940 void
1941 emit_group_load (dst, orig_src, ssize)
1942 rtx dst, orig_src;
1943 int ssize;
1945 rtx *tmps, src;
1946 int start, i;
1948 if (GET_CODE (dst) != PARALLEL)
1949 abort ();
1951 /* Check for a NULL entry, used to indicate that the parameter goes
1952 both on the stack and in registers. */
1953 if (XEXP (XVECEXP (dst, 0, 0), 0))
1954 start = 0;
1955 else
1956 start = 1;
1958 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
1960 /* Process the pieces. */
1961 for (i = start; i < XVECLEN (dst, 0); i++)
1963 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1964 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1965 unsigned int bytelen = GET_MODE_SIZE (mode);
1966 int shift = 0;
1968 /* Handle trailing fragments that run over the size of the struct. */
1969 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1971 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1972 bytelen = ssize - bytepos;
1973 if (bytelen <= 0)
1974 abort ();
1977 /* If we won't be loading directly from memory, protect the real source
1978 from strange tricks we might play; but make sure that the source can
1979 be loaded directly into the destination. */
1980 src = orig_src;
1981 if (GET_CODE (orig_src) != MEM
1982 && (!CONSTANT_P (orig_src)
1983 || (GET_MODE (orig_src) != mode
1984 && GET_MODE (orig_src) != VOIDmode)))
1986 if (GET_MODE (orig_src) == VOIDmode)
1987 src = gen_reg_rtx (mode);
1988 else
1989 src = gen_reg_rtx (GET_MODE (orig_src));
1991 emit_move_insn (src, orig_src);
1994 /* Optimize the access just a bit. */
1995 if (GET_CODE (src) == MEM
1996 && MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode)
1997 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1998 && bytelen == GET_MODE_SIZE (mode))
2000 tmps[i] = gen_reg_rtx (mode);
2001 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2003 else if (GET_CODE (src) == CONCAT)
2005 if (bytepos == 0
2006 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
2007 tmps[i] = XEXP (src, 0);
2008 else if (bytepos == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
2009 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
2010 tmps[i] = XEXP (src, 1);
2011 else if (bytepos == 0)
2013 rtx mem = assign_stack_temp (GET_MODE (src),
2014 GET_MODE_SIZE (GET_MODE (src)), 0);
2015 emit_move_insn (mem, src);
2016 tmps[i] = adjust_address (mem, mode, 0);
2018 else
2019 abort ();
2021 else if (CONSTANT_P (src)
2022 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2023 tmps[i] = src;
2024 else
2025 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2026 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2027 mode, mode, ssize);
2029 if (BYTES_BIG_ENDIAN && shift)
2030 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2031 tmps[i], 0, OPTAB_WIDEN);
2034 emit_queue ();
2036 /* Copy the extracted pieces into the proper (probable) hard regs. */
2037 for (i = start; i < XVECLEN (dst, 0); i++)
2038 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2041 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2042 registers represented by a PARALLEL. SSIZE represents the total size of
2043 block DST, or -1 if not known. */
2045 void
2046 emit_group_store (orig_dst, src, ssize)
2047 rtx orig_dst, src;
2048 int ssize;
2050 rtx *tmps, dst;
2051 int start, i;
2053 if (GET_CODE (src) != PARALLEL)
2054 abort ();
2056 /* Check for a NULL entry, used to indicate that the parameter goes
2057 both on the stack and in registers. */
2058 if (XEXP (XVECEXP (src, 0, 0), 0))
2059 start = 0;
2060 else
2061 start = 1;
2063 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2065 /* Copy the (probable) hard regs into pseudos. */
2066 for (i = start; i < XVECLEN (src, 0); i++)
2068 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2069 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2070 emit_move_insn (tmps[i], reg);
2072 emit_queue ();
2074 /* If we won't be storing directly into memory, protect the real destination
2075 from strange tricks we might play. */
2076 dst = orig_dst;
2077 if (GET_CODE (dst) == PARALLEL)
2079 rtx temp;
2081 /* We can get a PARALLEL dst if there is a conditional expression in
2082 a return statement. In that case, the dst and src are the same,
2083 so no action is necessary. */
2084 if (rtx_equal_p (dst, src))
2085 return;
2087 /* It is unclear if we can ever reach here, but we may as well handle
2088 it. Allocate a temporary, and split this into a store/load to/from
2089 the temporary. */
2091 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2092 emit_group_store (temp, src, ssize);
2093 emit_group_load (dst, temp, ssize);
2094 return;
2096 else if (GET_CODE (dst) != MEM)
2098 dst = gen_reg_rtx (GET_MODE (orig_dst));
2099 /* Make life a bit easier for combine. */
2100 emit_move_insn (dst, const0_rtx);
2103 /* Process the pieces. */
2104 for (i = start; i < XVECLEN (src, 0); i++)
2106 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2107 enum machine_mode mode = GET_MODE (tmps[i]);
2108 unsigned int bytelen = GET_MODE_SIZE (mode);
2110 /* Handle trailing fragments that run over the size of the struct. */
2111 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2113 if (BYTES_BIG_ENDIAN)
2115 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2116 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2117 tmps[i], 0, OPTAB_WIDEN);
2119 bytelen = ssize - bytepos;
2122 /* Optimize the access just a bit. */
2123 if (GET_CODE (dst) == MEM
2124 && MEM_ALIGN (dst) >= GET_MODE_ALIGNMENT (mode)
2125 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2126 && bytelen == GET_MODE_SIZE (mode))
2127 emit_move_insn (adjust_address (dst, mode, bytepos), tmps[i]);
2128 else
2129 store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2130 mode, tmps[i], ssize);
2133 emit_queue ();
2135 /* Copy from the pseudo into the (probable) hard reg. */
2136 if (GET_CODE (dst) == REG)
2137 emit_move_insn (orig_dst, dst);
2140 /* Generate code to copy a BLKmode object of TYPE out of a
2141 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2142 is null, a stack temporary is created. TGTBLK is returned.
2144 The primary purpose of this routine is to handle functions
2145 that return BLKmode structures in registers. Some machines
2146 (the PA for example) want to return all small structures
2147 in registers regardless of the structure's alignment. */
2150 copy_blkmode_from_reg (tgtblk, srcreg, type)
2151 rtx tgtblk;
2152 rtx srcreg;
2153 tree type;
2155 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2156 rtx src = NULL, dst = NULL;
2157 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2158 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2160 if (tgtblk == 0)
2162 tgtblk = assign_temp (build_qualified_type (type,
2163 (TYPE_QUALS (type)
2164 | TYPE_QUAL_CONST)),
2165 0, 1, 1);
2166 preserve_temp_slots (tgtblk);
2169 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2170 into a new pseudo which is a full word.
2172 If FUNCTION_ARG_REG_LITTLE_ENDIAN is set and convert_to_mode does a copy,
2173 the wrong part of the register gets copied so we fake a type conversion
2174 in place. */
2175 if (GET_MODE (srcreg) != BLKmode
2176 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2178 if (FUNCTION_ARG_REG_LITTLE_ENDIAN)
2179 srcreg = simplify_gen_subreg (word_mode, srcreg, GET_MODE (srcreg), 0);
2180 else
2181 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2184 /* Structures whose size is not a multiple of a word are aligned
2185 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2186 machine, this means we must skip the empty high order bytes when
2187 calculating the bit offset. */
2188 if (BYTES_BIG_ENDIAN
2189 && !FUNCTION_ARG_REG_LITTLE_ENDIAN
2190 && bytes % UNITS_PER_WORD)
2191 big_endian_correction
2192 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2194 /* Copy the structure BITSIZE bites at a time.
2196 We could probably emit more efficient code for machines which do not use
2197 strict alignment, but it doesn't seem worth the effort at the current
2198 time. */
2199 for (bitpos = 0, xbitpos = big_endian_correction;
2200 bitpos < bytes * BITS_PER_UNIT;
2201 bitpos += bitsize, xbitpos += bitsize)
2203 /* We need a new source operand each time xbitpos is on a
2204 word boundary and when xbitpos == big_endian_correction
2205 (the first time through). */
2206 if (xbitpos % BITS_PER_WORD == 0
2207 || xbitpos == big_endian_correction)
2208 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2209 GET_MODE (srcreg));
2211 /* We need a new destination operand each time bitpos is on
2212 a word boundary. */
2213 if (bitpos % BITS_PER_WORD == 0)
2214 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2216 /* Use xbitpos for the source extraction (right justified) and
2217 xbitpos for the destination store (left justified). */
2218 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2219 extract_bit_field (src, bitsize,
2220 xbitpos % BITS_PER_WORD, 1,
2221 NULL_RTX, word_mode, word_mode,
2222 BITS_PER_WORD),
2223 BITS_PER_WORD);
2226 return tgtblk;
2229 /* Add a USE expression for REG to the (possibly empty) list pointed
2230 to by CALL_FUSAGE. REG must denote a hard register. */
2232 void
2233 use_reg (call_fusage, reg)
2234 rtx *call_fusage, reg;
2236 if (GET_CODE (reg) != REG
2237 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2238 abort ();
2240 *call_fusage
2241 = gen_rtx_EXPR_LIST (VOIDmode,
2242 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2245 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2246 starting at REGNO. All of these registers must be hard registers. */
2248 void
2249 use_regs (call_fusage, regno, nregs)
2250 rtx *call_fusage;
2251 int regno;
2252 int nregs;
2254 int i;
2256 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2257 abort ();
2259 for (i = 0; i < nregs; i++)
2260 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2263 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2264 PARALLEL REGS. This is for calls that pass values in multiple
2265 non-contiguous locations. The Irix 6 ABI has examples of this. */
2267 void
2268 use_group_regs (call_fusage, regs)
2269 rtx *call_fusage;
2270 rtx regs;
2272 int i;
2274 for (i = 0; i < XVECLEN (regs, 0); i++)
2276 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2278 /* A NULL entry means the parameter goes both on the stack and in
2279 registers. This can also be a MEM for targets that pass values
2280 partially on the stack and partially in registers. */
2281 if (reg != 0 && GET_CODE (reg) == REG)
2282 use_reg (call_fusage, reg);
2288 can_store_by_pieces (len, constfun, constfundata, align)
2289 unsigned HOST_WIDE_INT len;
2290 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2291 PTR constfundata;
2292 unsigned int align;
2294 unsigned HOST_WIDE_INT max_size, l;
2295 HOST_WIDE_INT offset = 0;
2296 enum machine_mode mode, tmode;
2297 enum insn_code icode;
2298 int reverse;
2299 rtx cst;
2301 if (! MOVE_BY_PIECES_P (len, align))
2302 return 0;
2304 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2305 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2306 align = MOVE_MAX * BITS_PER_UNIT;
2308 /* We would first store what we can in the largest integer mode, then go to
2309 successively smaller modes. */
2311 for (reverse = 0;
2312 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2313 reverse++)
2315 l = len;
2316 mode = VOIDmode;
2317 max_size = MOVE_MAX_PIECES + 1;
2318 while (max_size > 1)
2320 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2321 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2322 if (GET_MODE_SIZE (tmode) < max_size)
2323 mode = tmode;
2325 if (mode == VOIDmode)
2326 break;
2328 icode = mov_optab->handlers[(int) mode].insn_code;
2329 if (icode != CODE_FOR_nothing
2330 && align >= GET_MODE_ALIGNMENT (mode))
2332 unsigned int size = GET_MODE_SIZE (mode);
2334 while (l >= size)
2336 if (reverse)
2337 offset -= size;
2339 cst = (*constfun) (constfundata, offset, mode);
2340 if (!LEGITIMATE_CONSTANT_P (cst))
2341 return 0;
2343 if (!reverse)
2344 offset += size;
2346 l -= size;
2350 max_size = GET_MODE_SIZE (mode);
2353 /* The code above should have handled everything. */
2354 if (l != 0)
2355 abort ();
2358 return 1;
2361 /* Generate several move instructions to store LEN bytes generated by
2362 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2363 pointer which will be passed as argument in every CONSTFUN call.
2364 ALIGN is maximum alignment we can assume. */
2366 void
2367 store_by_pieces (to, len, constfun, constfundata, align)
2368 rtx to;
2369 unsigned HOST_WIDE_INT len;
2370 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2371 PTR constfundata;
2372 unsigned int align;
2374 struct store_by_pieces data;
2376 if (! MOVE_BY_PIECES_P (len, align))
2377 abort ();
2378 to = protect_from_queue (to, 1);
2379 data.constfun = constfun;
2380 data.constfundata = constfundata;
2381 data.len = len;
2382 data.to = to;
2383 store_by_pieces_1 (&data, align);
2386 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2387 rtx with BLKmode). The caller must pass TO through protect_from_queue
2388 before calling. ALIGN is maximum alignment we can assume. */
2390 static void
2391 clear_by_pieces (to, len, align)
2392 rtx to;
2393 unsigned HOST_WIDE_INT len;
2394 unsigned int align;
2396 struct store_by_pieces data;
2398 data.constfun = clear_by_pieces_1;
2399 data.constfundata = NULL;
2400 data.len = len;
2401 data.to = to;
2402 store_by_pieces_1 (&data, align);
2405 /* Callback routine for clear_by_pieces.
2406 Return const0_rtx unconditionally. */
2408 static rtx
2409 clear_by_pieces_1 (data, offset, mode)
2410 PTR data ATTRIBUTE_UNUSED;
2411 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2412 enum machine_mode mode ATTRIBUTE_UNUSED;
2414 return const0_rtx;
2417 /* Subroutine of clear_by_pieces and store_by_pieces.
2418 Generate several move instructions to store LEN bytes of block TO. (A MEM
2419 rtx with BLKmode). The caller must pass TO through protect_from_queue
2420 before calling. ALIGN is maximum alignment we can assume. */
2422 static void
2423 store_by_pieces_1 (data, align)
2424 struct store_by_pieces *data;
2425 unsigned int align;
2427 rtx to_addr = XEXP (data->to, 0);
2428 unsigned HOST_WIDE_INT max_size = MOVE_MAX_PIECES + 1;
2429 enum machine_mode mode = VOIDmode, tmode;
2430 enum insn_code icode;
2432 data->offset = 0;
2433 data->to_addr = to_addr;
2434 data->autinc_to
2435 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2436 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2438 data->explicit_inc_to = 0;
2439 data->reverse
2440 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2441 if (data->reverse)
2442 data->offset = data->len;
2444 /* If storing requires more than two move insns,
2445 copy addresses to registers (to make displacements shorter)
2446 and use post-increment if available. */
2447 if (!data->autinc_to
2448 && move_by_pieces_ninsns (data->len, align) > 2)
2450 /* Determine the main mode we'll be using. */
2451 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2452 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2453 if (GET_MODE_SIZE (tmode) < max_size)
2454 mode = tmode;
2456 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2458 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2459 data->autinc_to = 1;
2460 data->explicit_inc_to = -1;
2463 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2464 && ! data->autinc_to)
2466 data->to_addr = copy_addr_to_reg (to_addr);
2467 data->autinc_to = 1;
2468 data->explicit_inc_to = 1;
2471 if ( !data->autinc_to && CONSTANT_P (to_addr))
2472 data->to_addr = copy_addr_to_reg (to_addr);
2475 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2476 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2477 align = MOVE_MAX * BITS_PER_UNIT;
2479 /* First store what we can in the largest integer mode, then go to
2480 successively smaller modes. */
2482 while (max_size > 1)
2484 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2485 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2486 if (GET_MODE_SIZE (tmode) < max_size)
2487 mode = tmode;
2489 if (mode == VOIDmode)
2490 break;
2492 icode = mov_optab->handlers[(int) mode].insn_code;
2493 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2494 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2496 max_size = GET_MODE_SIZE (mode);
2499 /* The code above should have handled everything. */
2500 if (data->len != 0)
2501 abort ();
2504 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2505 with move instructions for mode MODE. GENFUN is the gen_... function
2506 to make a move insn for that mode. DATA has all the other info. */
2508 static void
2509 store_by_pieces_2 (genfun, mode, data)
2510 rtx (*genfun) PARAMS ((rtx, ...));
2511 enum machine_mode mode;
2512 struct store_by_pieces *data;
2514 unsigned int size = GET_MODE_SIZE (mode);
2515 rtx to1, cst;
2517 while (data->len >= size)
2519 if (data->reverse)
2520 data->offset -= size;
2522 if (data->autinc_to)
2523 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2524 data->offset);
2525 else
2526 to1 = adjust_address (data->to, mode, data->offset);
2528 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2529 emit_insn (gen_add2_insn (data->to_addr,
2530 GEN_INT (-(HOST_WIDE_INT) size)));
2532 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2533 emit_insn ((*genfun) (to1, cst));
2535 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2536 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2538 if (! data->reverse)
2539 data->offset += size;
2541 data->len -= size;
2545 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2546 its length in bytes. */
2549 clear_storage (object, size)
2550 rtx object;
2551 rtx size;
2553 #ifdef TARGET_MEM_FUNCTIONS
2554 static tree fn;
2555 tree call_expr, arg_list;
2556 #endif
2557 rtx retval = 0;
2558 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2559 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2561 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2562 just move a zero. Otherwise, do this a piece at a time. */
2563 if (GET_MODE (object) != BLKmode
2564 && GET_CODE (size) == CONST_INT
2565 && GET_MODE_SIZE (GET_MODE (object)) == (unsigned int) INTVAL (size))
2566 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2567 else
2569 object = protect_from_queue (object, 1);
2570 size = protect_from_queue (size, 0);
2572 if (GET_CODE (size) == CONST_INT
2573 && MOVE_BY_PIECES_P (INTVAL (size), align))
2574 clear_by_pieces (object, INTVAL (size), align);
2575 else
2577 /* Try the most limited insn first, because there's no point
2578 including more than one in the machine description unless
2579 the more limited one has some advantage. */
2581 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2582 enum machine_mode mode;
2584 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2585 mode = GET_MODE_WIDER_MODE (mode))
2587 enum insn_code code = clrstr_optab[(int) mode];
2588 insn_operand_predicate_fn pred;
2590 if (code != CODE_FOR_nothing
2591 /* We don't need MODE to be narrower than
2592 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2593 the mode mask, as it is returned by the macro, it will
2594 definitely be less than the actual mode mask. */
2595 && ((GET_CODE (size) == CONST_INT
2596 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2597 <= (GET_MODE_MASK (mode) >> 1)))
2598 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2599 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2600 || (*pred) (object, BLKmode))
2601 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2602 || (*pred) (opalign, VOIDmode)))
2604 rtx op1;
2605 rtx last = get_last_insn ();
2606 rtx pat;
2608 op1 = convert_to_mode (mode, size, 1);
2609 pred = insn_data[(int) code].operand[1].predicate;
2610 if (pred != 0 && ! (*pred) (op1, mode))
2611 op1 = copy_to_mode_reg (mode, op1);
2613 pat = GEN_FCN ((int) code) (object, op1, opalign);
2614 if (pat)
2616 emit_insn (pat);
2617 return 0;
2619 else
2620 delete_insns_since (last);
2624 /* OBJECT or SIZE may have been passed through protect_from_queue.
2626 It is unsafe to save the value generated by protect_from_queue
2627 and reuse it later. Consider what happens if emit_queue is
2628 called before the return value from protect_from_queue is used.
2630 Expansion of the CALL_EXPR below will call emit_queue before
2631 we are finished emitting RTL for argument setup. So if we are
2632 not careful we could get the wrong value for an argument.
2634 To avoid this problem we go ahead and emit code to copy OBJECT
2635 and SIZE into new pseudos. We can then place those new pseudos
2636 into an RTL_EXPR and use them later, even after a call to
2637 emit_queue.
2639 Note this is not strictly needed for library calls since they
2640 do not call emit_queue before loading their arguments. However,
2641 we may need to have library calls call emit_queue in the future
2642 since failing to do so could cause problems for targets which
2643 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2644 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2646 #ifdef TARGET_MEM_FUNCTIONS
2647 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2648 #else
2649 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2650 TREE_UNSIGNED (integer_type_node));
2651 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2652 #endif
2654 #ifdef TARGET_MEM_FUNCTIONS
2655 /* It is incorrect to use the libcall calling conventions to call
2656 memset in this context.
2658 This could be a user call to memset and the user may wish to
2659 examine the return value from memset.
2661 For targets where libcalls and normal calls have different
2662 conventions for returning pointers, we could end up generating
2663 incorrect code.
2665 So instead of using a libcall sequence we build up a suitable
2666 CALL_EXPR and expand the call in the normal fashion. */
2667 if (fn == NULL_TREE)
2669 tree fntype;
2671 /* This was copied from except.c, I don't know if all this is
2672 necessary in this context or not. */
2673 fn = get_identifier ("memset");
2674 fntype = build_pointer_type (void_type_node);
2675 fntype = build_function_type (fntype, NULL_TREE);
2676 fn = build_decl (FUNCTION_DECL, fn, fntype);
2677 ggc_add_tree_root (&fn, 1);
2678 DECL_EXTERNAL (fn) = 1;
2679 TREE_PUBLIC (fn) = 1;
2680 DECL_ARTIFICIAL (fn) = 1;
2681 TREE_NOTHROW (fn) = 1;
2682 make_decl_rtl (fn, NULL);
2683 assemble_external (fn);
2686 /* We need to make an argument list for the function call.
2688 memset has three arguments, the first is a void * addresses, the
2689 second an integer with the initialization value, the last is a
2690 size_t byte count for the copy. */
2691 arg_list
2692 = build_tree_list (NULL_TREE,
2693 make_tree (build_pointer_type (void_type_node),
2694 object));
2695 TREE_CHAIN (arg_list)
2696 = build_tree_list (NULL_TREE,
2697 make_tree (integer_type_node, const0_rtx));
2698 TREE_CHAIN (TREE_CHAIN (arg_list))
2699 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2700 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2702 /* Now we have to build up the CALL_EXPR itself. */
2703 call_expr = build1 (ADDR_EXPR,
2704 build_pointer_type (TREE_TYPE (fn)), fn);
2705 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2706 call_expr, arg_list, NULL_TREE);
2707 TREE_SIDE_EFFECTS (call_expr) = 1;
2709 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2710 #else
2711 emit_library_call (bzero_libfunc, LCT_NORMAL,
2712 VOIDmode, 2, object, Pmode, size,
2713 TYPE_MODE (integer_type_node));
2714 #endif
2716 /* If we are initializing a readonly value, show the above call
2717 clobbered it. Otherwise, a load from it may erroneously be
2718 hoisted from a loop. */
2719 if (RTX_UNCHANGING_P (object))
2720 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
2724 return retval;
2727 /* Generate code to copy Y into X.
2728 Both Y and X must have the same mode, except that
2729 Y can be a constant with VOIDmode.
2730 This mode cannot be BLKmode; use emit_block_move for that.
2732 Return the last instruction emitted. */
2735 emit_move_insn (x, y)
2736 rtx x, y;
2738 enum machine_mode mode = GET_MODE (x);
2739 rtx y_cst = NULL_RTX;
2740 rtx last_insn;
2742 x = protect_from_queue (x, 1);
2743 y = protect_from_queue (y, 0);
2745 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2746 abort ();
2748 /* Never force constant_p_rtx to memory. */
2749 if (GET_CODE (y) == CONSTANT_P_RTX)
2751 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2753 y_cst = y;
2754 y = force_const_mem (mode, y);
2757 /* If X or Y are memory references, verify that their addresses are valid
2758 for the machine. */
2759 if (GET_CODE (x) == MEM
2760 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2761 && ! push_operand (x, GET_MODE (x)))
2762 || (flag_force_addr
2763 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2764 x = validize_mem (x);
2766 if (GET_CODE (y) == MEM
2767 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2768 || (flag_force_addr
2769 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2770 y = validize_mem (y);
2772 if (mode == BLKmode)
2773 abort ();
2775 last_insn = emit_move_insn_1 (x, y);
2777 if (y_cst && GET_CODE (x) == REG)
2778 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
2780 return last_insn;
2783 /* Low level part of emit_move_insn.
2784 Called just like emit_move_insn, but assumes X and Y
2785 are basically valid. */
2788 emit_move_insn_1 (x, y)
2789 rtx x, y;
2791 enum machine_mode mode = GET_MODE (x);
2792 enum machine_mode submode;
2793 enum mode_class class = GET_MODE_CLASS (mode);
2795 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2796 abort ();
2798 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2799 return
2800 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2802 /* Expand complex moves by moving real part and imag part, if possible. */
2803 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2804 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2805 * BITS_PER_UNIT),
2806 (class == MODE_COMPLEX_INT
2807 ? MODE_INT : MODE_FLOAT),
2809 && (mov_optab->handlers[(int) submode].insn_code
2810 != CODE_FOR_nothing))
2812 /* Don't split destination if it is a stack push. */
2813 int stack = push_operand (x, GET_MODE (x));
2815 #ifdef PUSH_ROUNDING
2816 /* In case we output to the stack, but the size is smaller machine can
2817 push exactly, we need to use move instructions. */
2818 if (stack
2819 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
2820 != GET_MODE_SIZE (submode)))
2822 rtx temp;
2823 HOST_WIDE_INT offset1, offset2;
2825 /* Do not use anti_adjust_stack, since we don't want to update
2826 stack_pointer_delta. */
2827 temp = expand_binop (Pmode,
2828 #ifdef STACK_GROWS_DOWNWARD
2829 sub_optab,
2830 #else
2831 add_optab,
2832 #endif
2833 stack_pointer_rtx,
2834 GEN_INT
2835 (PUSH_ROUNDING
2836 (GET_MODE_SIZE (GET_MODE (x)))),
2837 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2839 if (temp != stack_pointer_rtx)
2840 emit_move_insn (stack_pointer_rtx, temp);
2842 #ifdef STACK_GROWS_DOWNWARD
2843 offset1 = 0;
2844 offset2 = GET_MODE_SIZE (submode);
2845 #else
2846 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2847 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2848 + GET_MODE_SIZE (submode));
2849 #endif
2851 emit_move_insn (change_address (x, submode,
2852 gen_rtx_PLUS (Pmode,
2853 stack_pointer_rtx,
2854 GEN_INT (offset1))),
2855 gen_realpart (submode, y));
2856 emit_move_insn (change_address (x, submode,
2857 gen_rtx_PLUS (Pmode,
2858 stack_pointer_rtx,
2859 GEN_INT (offset2))),
2860 gen_imagpart (submode, y));
2862 else
2863 #endif
2864 /* If this is a stack, push the highpart first, so it
2865 will be in the argument order.
2867 In that case, change_address is used only to convert
2868 the mode, not to change the address. */
2869 if (stack)
2871 /* Note that the real part always precedes the imag part in memory
2872 regardless of machine's endianness. */
2873 #ifdef STACK_GROWS_DOWNWARD
2874 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2875 (gen_rtx_MEM (submode, XEXP (x, 0)),
2876 gen_imagpart (submode, y)));
2877 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2878 (gen_rtx_MEM (submode, XEXP (x, 0)),
2879 gen_realpart (submode, y)));
2880 #else
2881 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2882 (gen_rtx_MEM (submode, XEXP (x, 0)),
2883 gen_realpart (submode, y)));
2884 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2885 (gen_rtx_MEM (submode, XEXP (x, 0)),
2886 gen_imagpart (submode, y)));
2887 #endif
2889 else
2891 rtx realpart_x, realpart_y;
2892 rtx imagpart_x, imagpart_y;
2894 /* If this is a complex value with each part being smaller than a
2895 word, the usual calling sequence will likely pack the pieces into
2896 a single register. Unfortunately, SUBREG of hard registers only
2897 deals in terms of words, so we have a problem converting input
2898 arguments to the CONCAT of two registers that is used elsewhere
2899 for complex values. If this is before reload, we can copy it into
2900 memory and reload. FIXME, we should see about using extract and
2901 insert on integer registers, but complex short and complex char
2902 variables should be rarely used. */
2903 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2904 && (reload_in_progress | reload_completed) == 0)
2906 int packed_dest_p
2907 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2908 int packed_src_p
2909 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2911 if (packed_dest_p || packed_src_p)
2913 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2914 ? MODE_FLOAT : MODE_INT);
2916 enum machine_mode reg_mode
2917 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2919 if (reg_mode != BLKmode)
2921 rtx mem = assign_stack_temp (reg_mode,
2922 GET_MODE_SIZE (mode), 0);
2923 rtx cmem = adjust_address (mem, mode, 0);
2925 cfun->cannot_inline
2926 = N_("function using short complex types cannot be inline");
2928 if (packed_dest_p)
2930 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2932 emit_move_insn_1 (cmem, y);
2933 return emit_move_insn_1 (sreg, mem);
2935 else
2937 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2939 emit_move_insn_1 (mem, sreg);
2940 return emit_move_insn_1 (x, cmem);
2946 realpart_x = gen_realpart (submode, x);
2947 realpart_y = gen_realpart (submode, y);
2948 imagpart_x = gen_imagpart (submode, x);
2949 imagpart_y = gen_imagpart (submode, y);
2951 /* Show the output dies here. This is necessary for SUBREGs
2952 of pseudos since we cannot track their lifetimes correctly;
2953 hard regs shouldn't appear here except as return values.
2954 We never want to emit such a clobber after reload. */
2955 if (x != y
2956 && ! (reload_in_progress || reload_completed)
2957 && (GET_CODE (realpart_x) == SUBREG
2958 || GET_CODE (imagpart_x) == SUBREG))
2959 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2961 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2962 (realpart_x, realpart_y));
2963 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2964 (imagpart_x, imagpart_y));
2967 return get_last_insn ();
2970 /* This will handle any multi-word mode that lacks a move_insn pattern.
2971 However, you will get better code if you define such patterns,
2972 even if they must turn into multiple assembler instructions. */
2973 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2975 rtx last_insn = 0;
2976 rtx seq, inner;
2977 int need_clobber;
2978 int i;
2980 #ifdef PUSH_ROUNDING
2982 /* If X is a push on the stack, do the push now and replace
2983 X with a reference to the stack pointer. */
2984 if (push_operand (x, GET_MODE (x)))
2986 rtx temp;
2987 enum rtx_code code;
2989 /* Do not use anti_adjust_stack, since we don't want to update
2990 stack_pointer_delta. */
2991 temp = expand_binop (Pmode,
2992 #ifdef STACK_GROWS_DOWNWARD
2993 sub_optab,
2994 #else
2995 add_optab,
2996 #endif
2997 stack_pointer_rtx,
2998 GEN_INT
2999 (PUSH_ROUNDING
3000 (GET_MODE_SIZE (GET_MODE (x)))),
3001 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3003 if (temp != stack_pointer_rtx)
3004 emit_move_insn (stack_pointer_rtx, temp);
3006 code = GET_CODE (XEXP (x, 0));
3008 /* Just hope that small offsets off SP are OK. */
3009 if (code == POST_INC)
3010 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3011 GEN_INT (-((HOST_WIDE_INT)
3012 GET_MODE_SIZE (GET_MODE (x)))));
3013 else if (code == POST_DEC)
3014 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3015 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3016 else
3017 temp = stack_pointer_rtx;
3019 x = change_address (x, VOIDmode, temp);
3021 #endif
3023 /* If we are in reload, see if either operand is a MEM whose address
3024 is scheduled for replacement. */
3025 if (reload_in_progress && GET_CODE (x) == MEM
3026 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3027 x = replace_equiv_address_nv (x, inner);
3028 if (reload_in_progress && GET_CODE (y) == MEM
3029 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3030 y = replace_equiv_address_nv (y, inner);
3032 start_sequence ();
3034 need_clobber = 0;
3035 for (i = 0;
3036 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3037 i++)
3039 rtx xpart = operand_subword (x, i, 1, mode);
3040 rtx ypart = operand_subword (y, i, 1, mode);
3042 /* If we can't get a part of Y, put Y into memory if it is a
3043 constant. Otherwise, force it into a register. If we still
3044 can't get a part of Y, abort. */
3045 if (ypart == 0 && CONSTANT_P (y))
3047 y = force_const_mem (mode, y);
3048 ypart = operand_subword (y, i, 1, mode);
3050 else if (ypart == 0)
3051 ypart = operand_subword_force (y, i, mode);
3053 if (xpart == 0 || ypart == 0)
3054 abort ();
3056 need_clobber |= (GET_CODE (xpart) == SUBREG);
3058 last_insn = emit_move_insn (xpart, ypart);
3061 seq = gen_sequence ();
3062 end_sequence ();
3064 /* Show the output dies here. This is necessary for SUBREGs
3065 of pseudos since we cannot track their lifetimes correctly;
3066 hard regs shouldn't appear here except as return values.
3067 We never want to emit such a clobber after reload. */
3068 if (x != y
3069 && ! (reload_in_progress || reload_completed)
3070 && need_clobber != 0)
3071 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3073 emit_insn (seq);
3075 return last_insn;
3077 else
3078 abort ();
3081 /* Pushing data onto the stack. */
3083 /* Push a block of length SIZE (perhaps variable)
3084 and return an rtx to address the beginning of the block.
3085 Note that it is not possible for the value returned to be a QUEUED.
3086 The value may be virtual_outgoing_args_rtx.
3088 EXTRA is the number of bytes of padding to push in addition to SIZE.
3089 BELOW nonzero means this padding comes at low addresses;
3090 otherwise, the padding comes at high addresses. */
3093 push_block (size, extra, below)
3094 rtx size;
3095 int extra, below;
3097 rtx temp;
3099 size = convert_modes (Pmode, ptr_mode, size, 1);
3100 if (CONSTANT_P (size))
3101 anti_adjust_stack (plus_constant (size, extra));
3102 else if (GET_CODE (size) == REG && extra == 0)
3103 anti_adjust_stack (size);
3104 else
3106 temp = copy_to_mode_reg (Pmode, size);
3107 if (extra != 0)
3108 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3109 temp, 0, OPTAB_LIB_WIDEN);
3110 anti_adjust_stack (temp);
3113 #ifndef STACK_GROWS_DOWNWARD
3114 if (0)
3115 #else
3116 if (1)
3117 #endif
3119 temp = virtual_outgoing_args_rtx;
3120 if (extra != 0 && below)
3121 temp = plus_constant (temp, extra);
3123 else
3125 if (GET_CODE (size) == CONST_INT)
3126 temp = plus_constant (virtual_outgoing_args_rtx,
3127 -INTVAL (size) - (below ? 0 : extra));
3128 else if (extra != 0 && !below)
3129 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3130 negate_rtx (Pmode, plus_constant (size, extra)));
3131 else
3132 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3133 negate_rtx (Pmode, size));
3136 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3139 #ifdef PUSH_ROUNDING
3141 /* Emit single push insn. */
3143 static void
3144 emit_single_push_insn (mode, x, type)
3145 rtx x;
3146 enum machine_mode mode;
3147 tree type;
3149 rtx dest_addr;
3150 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3151 rtx dest;
3152 enum insn_code icode;
3153 insn_operand_predicate_fn pred;
3155 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3156 /* If there is push pattern, use it. Otherwise try old way of throwing
3157 MEM representing push operation to move expander. */
3158 icode = push_optab->handlers[(int) mode].insn_code;
3159 if (icode != CODE_FOR_nothing)
3161 if (((pred = insn_data[(int) icode].operand[0].predicate)
3162 && !((*pred) (x, mode))))
3163 x = force_reg (mode, x);
3164 emit_insn (GEN_FCN (icode) (x));
3165 return;
3167 if (GET_MODE_SIZE (mode) == rounded_size)
3168 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3169 else
3171 #ifdef STACK_GROWS_DOWNWARD
3172 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3173 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3174 #else
3175 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3176 GEN_INT (rounded_size));
3177 #endif
3178 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3181 dest = gen_rtx_MEM (mode, dest_addr);
3183 if (type != 0)
3185 set_mem_attributes (dest, type, 1);
3187 if (flag_optimize_sibling_calls)
3188 /* Function incoming arguments may overlap with sibling call
3189 outgoing arguments and we cannot allow reordering of reads
3190 from function arguments with stores to outgoing arguments
3191 of sibling calls. */
3192 set_mem_alias_set (dest, 0);
3194 emit_move_insn (dest, x);
3196 #endif
3198 /* Generate code to push X onto the stack, assuming it has mode MODE and
3199 type TYPE.
3200 MODE is redundant except when X is a CONST_INT (since they don't
3201 carry mode info).
3202 SIZE is an rtx for the size of data to be copied (in bytes),
3203 needed only if X is BLKmode.
3205 ALIGN (in bits) is maximum alignment we can assume.
3207 If PARTIAL and REG are both nonzero, then copy that many of the first
3208 words of X into registers starting with REG, and push the rest of X.
3209 The amount of space pushed is decreased by PARTIAL words,
3210 rounded *down* to a multiple of PARM_BOUNDARY.
3211 REG must be a hard register in this case.
3212 If REG is zero but PARTIAL is not, take any all others actions for an
3213 argument partially in registers, but do not actually load any
3214 registers.
3216 EXTRA is the amount in bytes of extra space to leave next to this arg.
3217 This is ignored if an argument block has already been allocated.
3219 On a machine that lacks real push insns, ARGS_ADDR is the address of
3220 the bottom of the argument block for this call. We use indexing off there
3221 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3222 argument block has not been preallocated.
3224 ARGS_SO_FAR is the size of args previously pushed for this call.
3226 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3227 for arguments passed in registers. If nonzero, it will be the number
3228 of bytes required. */
3230 void
3231 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3232 args_addr, args_so_far, reg_parm_stack_space,
3233 alignment_pad)
3234 rtx x;
3235 enum machine_mode mode;
3236 tree type;
3237 rtx size;
3238 unsigned int align;
3239 int partial;
3240 rtx reg;
3241 int extra;
3242 rtx args_addr;
3243 rtx args_so_far;
3244 int reg_parm_stack_space;
3245 rtx alignment_pad;
3247 rtx xinner;
3248 enum direction stack_direction
3249 #ifdef STACK_GROWS_DOWNWARD
3250 = downward;
3251 #else
3252 = upward;
3253 #endif
3255 /* Decide where to pad the argument: `downward' for below,
3256 `upward' for above, or `none' for don't pad it.
3257 Default is below for small data on big-endian machines; else above. */
3258 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3260 /* Invert direction if stack is post-decrement.
3261 FIXME: why? */
3262 if (STACK_PUSH_CODE == POST_DEC)
3263 if (where_pad != none)
3264 where_pad = (where_pad == downward ? upward : downward);
3266 xinner = x = protect_from_queue (x, 0);
3268 if (mode == BLKmode)
3270 /* Copy a block into the stack, entirely or partially. */
3272 rtx temp;
3273 int used = partial * UNITS_PER_WORD;
3274 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3275 int skip;
3277 if (size == 0)
3278 abort ();
3280 used -= offset;
3282 /* USED is now the # of bytes we need not copy to the stack
3283 because registers will take care of them. */
3285 if (partial != 0)
3286 xinner = adjust_address (xinner, BLKmode, used);
3288 /* If the partial register-part of the arg counts in its stack size,
3289 skip the part of stack space corresponding to the registers.
3290 Otherwise, start copying to the beginning of the stack space,
3291 by setting SKIP to 0. */
3292 skip = (reg_parm_stack_space == 0) ? 0 : used;
3294 #ifdef PUSH_ROUNDING
3295 /* Do it with several push insns if that doesn't take lots of insns
3296 and if there is no difficulty with push insns that skip bytes
3297 on the stack for alignment purposes. */
3298 if (args_addr == 0
3299 && PUSH_ARGS
3300 && GET_CODE (size) == CONST_INT
3301 && skip == 0
3302 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3303 /* Here we avoid the case of a structure whose weak alignment
3304 forces many pushes of a small amount of data,
3305 and such small pushes do rounding that causes trouble. */
3306 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3307 || align >= BIGGEST_ALIGNMENT
3308 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3309 == (align / BITS_PER_UNIT)))
3310 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3312 /* Push padding now if padding above and stack grows down,
3313 or if padding below and stack grows up.
3314 But if space already allocated, this has already been done. */
3315 if (extra && args_addr == 0
3316 && where_pad != none && where_pad != stack_direction)
3317 anti_adjust_stack (GEN_INT (extra));
3319 move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
3321 else
3322 #endif /* PUSH_ROUNDING */
3324 rtx target;
3326 /* Otherwise make space on the stack and copy the data
3327 to the address of that space. */
3329 /* Deduct words put into registers from the size we must copy. */
3330 if (partial != 0)
3332 if (GET_CODE (size) == CONST_INT)
3333 size = GEN_INT (INTVAL (size) - used);
3334 else
3335 size = expand_binop (GET_MODE (size), sub_optab, size,
3336 GEN_INT (used), NULL_RTX, 0,
3337 OPTAB_LIB_WIDEN);
3340 /* Get the address of the stack space.
3341 In this case, we do not deal with EXTRA separately.
3342 A single stack adjust will do. */
3343 if (! args_addr)
3345 temp = push_block (size, extra, where_pad == downward);
3346 extra = 0;
3348 else if (GET_CODE (args_so_far) == CONST_INT)
3349 temp = memory_address (BLKmode,
3350 plus_constant (args_addr,
3351 skip + INTVAL (args_so_far)));
3352 else
3353 temp = memory_address (BLKmode,
3354 plus_constant (gen_rtx_PLUS (Pmode,
3355 args_addr,
3356 args_so_far),
3357 skip));
3358 target = gen_rtx_MEM (BLKmode, temp);
3360 if (type != 0)
3362 set_mem_attributes (target, type, 1);
3363 /* Function incoming arguments may overlap with sibling call
3364 outgoing arguments and we cannot allow reordering of reads
3365 from function arguments with stores to outgoing arguments
3366 of sibling calls. */
3367 set_mem_alias_set (target, 0);
3369 else
3370 set_mem_align (target, align);
3372 /* TEMP is the address of the block. Copy the data there. */
3373 if (GET_CODE (size) == CONST_INT
3374 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3376 move_by_pieces (target, xinner, INTVAL (size), align);
3377 goto ret;
3379 else
3381 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3382 enum machine_mode mode;
3384 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3385 mode != VOIDmode;
3386 mode = GET_MODE_WIDER_MODE (mode))
3388 enum insn_code code = movstr_optab[(int) mode];
3389 insn_operand_predicate_fn pred;
3391 if (code != CODE_FOR_nothing
3392 && ((GET_CODE (size) == CONST_INT
3393 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3394 <= (GET_MODE_MASK (mode) >> 1)))
3395 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3396 && (!(pred = insn_data[(int) code].operand[0].predicate)
3397 || ((*pred) (target, BLKmode)))
3398 && (!(pred = insn_data[(int) code].operand[1].predicate)
3399 || ((*pred) (xinner, BLKmode)))
3400 && (!(pred = insn_data[(int) code].operand[3].predicate)
3401 || ((*pred) (opalign, VOIDmode))))
3403 rtx op2 = convert_to_mode (mode, size, 1);
3404 rtx last = get_last_insn ();
3405 rtx pat;
3407 pred = insn_data[(int) code].operand[2].predicate;
3408 if (pred != 0 && ! (*pred) (op2, mode))
3409 op2 = copy_to_mode_reg (mode, op2);
3411 pat = GEN_FCN ((int) code) (target, xinner,
3412 op2, opalign);
3413 if (pat)
3415 emit_insn (pat);
3416 goto ret;
3418 else
3419 delete_insns_since (last);
3424 if (!ACCUMULATE_OUTGOING_ARGS)
3426 /* If the source is referenced relative to the stack pointer,
3427 copy it to another register to stabilize it. We do not need
3428 to do this if we know that we won't be changing sp. */
3430 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3431 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3432 temp = copy_to_reg (temp);
3435 /* Make inhibit_defer_pop nonzero around the library call
3436 to force it to pop the bcopy-arguments right away. */
3437 NO_DEFER_POP;
3438 #ifdef TARGET_MEM_FUNCTIONS
3439 emit_library_call (memcpy_libfunc, LCT_NORMAL,
3440 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3441 convert_to_mode (TYPE_MODE (sizetype),
3442 size, TREE_UNSIGNED (sizetype)),
3443 TYPE_MODE (sizetype));
3444 #else
3445 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3446 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3447 convert_to_mode (TYPE_MODE (integer_type_node),
3448 size,
3449 TREE_UNSIGNED (integer_type_node)),
3450 TYPE_MODE (integer_type_node));
3451 #endif
3452 OK_DEFER_POP;
3455 else if (partial > 0)
3457 /* Scalar partly in registers. */
3459 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3460 int i;
3461 int not_stack;
3462 /* # words of start of argument
3463 that we must make space for but need not store. */
3464 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3465 int args_offset = INTVAL (args_so_far);
3466 int skip;
3468 /* Push padding now if padding above and stack grows down,
3469 or if padding below and stack grows up.
3470 But if space already allocated, this has already been done. */
3471 if (extra && args_addr == 0
3472 && where_pad != none && where_pad != stack_direction)
3473 anti_adjust_stack (GEN_INT (extra));
3475 /* If we make space by pushing it, we might as well push
3476 the real data. Otherwise, we can leave OFFSET nonzero
3477 and leave the space uninitialized. */
3478 if (args_addr == 0)
3479 offset = 0;
3481 /* Now NOT_STACK gets the number of words that we don't need to
3482 allocate on the stack. */
3483 not_stack = partial - offset;
3485 /* If the partial register-part of the arg counts in its stack size,
3486 skip the part of stack space corresponding to the registers.
3487 Otherwise, start copying to the beginning of the stack space,
3488 by setting SKIP to 0. */
3489 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3491 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3492 x = validize_mem (force_const_mem (mode, x));
3494 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3495 SUBREGs of such registers are not allowed. */
3496 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3497 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3498 x = copy_to_reg (x);
3500 /* Loop over all the words allocated on the stack for this arg. */
3501 /* We can do it by words, because any scalar bigger than a word
3502 has a size a multiple of a word. */
3503 #ifndef PUSH_ARGS_REVERSED
3504 for (i = not_stack; i < size; i++)
3505 #else
3506 for (i = size - 1; i >= not_stack; i--)
3507 #endif
3508 if (i >= not_stack + offset)
3509 emit_push_insn (operand_subword_force (x, i, mode),
3510 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3511 0, args_addr,
3512 GEN_INT (args_offset + ((i - not_stack + skip)
3513 * UNITS_PER_WORD)),
3514 reg_parm_stack_space, alignment_pad);
3516 else
3518 rtx addr;
3519 rtx target = NULL_RTX;
3520 rtx dest;
3522 /* Push padding now if padding above and stack grows down,
3523 or if padding below and stack grows up.
3524 But if space already allocated, this has already been done. */
3525 if (extra && args_addr == 0
3526 && where_pad != none && where_pad != stack_direction)
3527 anti_adjust_stack (GEN_INT (extra));
3529 #ifdef PUSH_ROUNDING
3530 if (args_addr == 0 && PUSH_ARGS)
3531 emit_single_push_insn (mode, x, type);
3532 else
3533 #endif
3535 if (GET_CODE (args_so_far) == CONST_INT)
3536 addr
3537 = memory_address (mode,
3538 plus_constant (args_addr,
3539 INTVAL (args_so_far)));
3540 else
3541 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3542 args_so_far));
3543 target = addr;
3544 dest = gen_rtx_MEM (mode, addr);
3545 if (type != 0)
3547 set_mem_attributes (dest, type, 1);
3548 /* Function incoming arguments may overlap with sibling call
3549 outgoing arguments and we cannot allow reordering of reads
3550 from function arguments with stores to outgoing arguments
3551 of sibling calls. */
3552 set_mem_alias_set (dest, 0);
3555 emit_move_insn (dest, x);
3560 ret:
3561 /* If part should go in registers, copy that part
3562 into the appropriate registers. Do this now, at the end,
3563 since mem-to-mem copies above may do function calls. */
3564 if (partial > 0 && reg != 0)
3566 /* Handle calls that pass values in multiple non-contiguous locations.
3567 The Irix 6 ABI has examples of this. */
3568 if (GET_CODE (reg) == PARALLEL)
3569 emit_group_load (reg, x, -1); /* ??? size? */
3570 else
3571 move_block_to_reg (REGNO (reg), x, partial, mode);
3574 if (extra && args_addr == 0 && where_pad == stack_direction)
3575 anti_adjust_stack (GEN_INT (extra));
3577 if (alignment_pad && args_addr == 0)
3578 anti_adjust_stack (alignment_pad);
3581 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3582 operations. */
3584 static rtx
3585 get_subtarget (x)
3586 rtx x;
3588 return ((x == 0
3589 /* Only registers can be subtargets. */
3590 || GET_CODE (x) != REG
3591 /* If the register is readonly, it can't be set more than once. */
3592 || RTX_UNCHANGING_P (x)
3593 /* Don't use hard regs to avoid extending their life. */
3594 || REGNO (x) < FIRST_PSEUDO_REGISTER
3595 /* Avoid subtargets inside loops,
3596 since they hide some invariant expressions. */
3597 || preserve_subexpressions_p ())
3598 ? 0 : x);
3601 /* Expand an assignment that stores the value of FROM into TO.
3602 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3603 (This may contain a QUEUED rtx;
3604 if the value is constant, this rtx is a constant.)
3605 Otherwise, the returned value is NULL_RTX.
3607 SUGGEST_REG is no longer actually used.
3608 It used to mean, copy the value through a register
3609 and return that register, if that is possible.
3610 We now use WANT_VALUE to decide whether to do this. */
3613 expand_assignment (to, from, want_value, suggest_reg)
3614 tree to, from;
3615 int want_value;
3616 int suggest_reg ATTRIBUTE_UNUSED;
3618 rtx to_rtx = 0;
3619 rtx result;
3621 /* Don't crash if the lhs of the assignment was erroneous. */
3623 if (TREE_CODE (to) == ERROR_MARK)
3625 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3626 return want_value ? result : NULL_RTX;
3629 /* Assignment of a structure component needs special treatment
3630 if the structure component's rtx is not simply a MEM.
3631 Assignment of an array element at a constant index, and assignment of
3632 an array element in an unaligned packed structure field, has the same
3633 problem. */
3635 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3636 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF)
3638 enum machine_mode mode1;
3639 HOST_WIDE_INT bitsize, bitpos;
3640 rtx orig_to_rtx;
3641 tree offset;
3642 int unsignedp;
3643 int volatilep = 0;
3644 tree tem;
3646 push_temp_slots ();
3647 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3648 &unsignedp, &volatilep);
3650 /* If we are going to use store_bit_field and extract_bit_field,
3651 make sure to_rtx will be safe for multiple use. */
3653 if (mode1 == VOIDmode && want_value)
3654 tem = stabilize_reference (tem);
3656 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3658 if (offset != 0)
3660 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3662 if (GET_CODE (to_rtx) != MEM)
3663 abort ();
3665 if (GET_MODE (offset_rtx) != ptr_mode)
3666 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3668 #ifdef POINTERS_EXTEND_UNSIGNED
3669 if (GET_MODE (offset_rtx) != Pmode)
3670 offset_rtx = convert_memory_address (Pmode, offset_rtx);
3671 #endif
3673 /* A constant address in TO_RTX can have VOIDmode, we must not try
3674 to call force_reg for that case. Avoid that case. */
3675 if (GET_CODE (to_rtx) == MEM
3676 && GET_MODE (to_rtx) == BLKmode
3677 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3678 && bitsize > 0
3679 && (bitpos % bitsize) == 0
3680 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3681 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3683 rtx temp
3684 = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3686 if (GET_CODE (XEXP (temp, 0)) == REG)
3687 to_rtx = temp;
3688 else
3689 to_rtx = (replace_equiv_address
3690 (to_rtx, force_reg (GET_MODE (XEXP (temp, 0)),
3691 XEXP (temp, 0))));
3692 bitpos = 0;
3695 to_rtx = offset_address (to_rtx, offset_rtx,
3696 highest_pow2_factor (offset));
3699 if (GET_CODE (to_rtx) == MEM)
3701 tree old_expr = MEM_EXPR (to_rtx);
3703 /* If the field is at offset zero, we could have been given the
3704 DECL_RTX of the parent struct. Don't munge it. */
3705 to_rtx = shallow_copy_rtx (to_rtx);
3707 set_mem_attributes (to_rtx, to, 0);
3709 /* If we changed MEM_EXPR, that means we're now referencing
3710 the COMPONENT_REF, which means that MEM_OFFSET must be
3711 relative to that field. But we've not yet reflected BITPOS
3712 in TO_RTX. This will be done in store_field. Adjust for
3713 that by biasing MEM_OFFSET by -bitpos. */
3714 if (MEM_EXPR (to_rtx) != old_expr && MEM_OFFSET (to_rtx)
3715 && (bitpos / BITS_PER_UNIT) != 0)
3716 set_mem_offset (to_rtx, GEN_INT (INTVAL (MEM_OFFSET (to_rtx))
3717 - (bitpos / BITS_PER_UNIT)));
3720 /* Deal with volatile and readonly fields. The former is only done
3721 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3722 if (volatilep && GET_CODE (to_rtx) == MEM)
3724 if (to_rtx == orig_to_rtx)
3725 to_rtx = copy_rtx (to_rtx);
3726 MEM_VOLATILE_P (to_rtx) = 1;
3729 if (TREE_CODE (to) == COMPONENT_REF
3730 && TREE_READONLY (TREE_OPERAND (to, 1)))
3732 if (to_rtx == orig_to_rtx)
3733 to_rtx = copy_rtx (to_rtx);
3734 RTX_UNCHANGING_P (to_rtx) = 1;
3737 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
3739 if (to_rtx == orig_to_rtx)
3740 to_rtx = copy_rtx (to_rtx);
3741 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3744 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3745 (want_value
3746 /* Spurious cast for HPUX compiler. */
3747 ? ((enum machine_mode)
3748 TYPE_MODE (TREE_TYPE (to)))
3749 : VOIDmode),
3750 unsignedp, TREE_TYPE (tem), get_alias_set (to));
3752 preserve_temp_slots (result);
3753 free_temp_slots ();
3754 pop_temp_slots ();
3756 /* If the value is meaningful, convert RESULT to the proper mode.
3757 Otherwise, return nothing. */
3758 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3759 TYPE_MODE (TREE_TYPE (from)),
3760 result,
3761 TREE_UNSIGNED (TREE_TYPE (to)))
3762 : NULL_RTX);
3765 /* If the rhs is a function call and its value is not an aggregate,
3766 call the function before we start to compute the lhs.
3767 This is needed for correct code for cases such as
3768 val = setjmp (buf) on machines where reference to val
3769 requires loading up part of an address in a separate insn.
3771 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3772 since it might be a promoted variable where the zero- or sign- extension
3773 needs to be done. Handling this in the normal way is safe because no
3774 computation is done before the call. */
3775 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3776 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3777 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3778 && GET_CODE (DECL_RTL (to)) == REG))
3780 rtx value;
3782 push_temp_slots ();
3783 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3784 if (to_rtx == 0)
3785 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3787 /* Handle calls that return values in multiple non-contiguous locations.
3788 The Irix 6 ABI has examples of this. */
3789 if (GET_CODE (to_rtx) == PARALLEL)
3790 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)));
3791 else if (GET_MODE (to_rtx) == BLKmode)
3792 emit_block_move (to_rtx, value, expr_size (from));
3793 else
3795 #ifdef POINTERS_EXTEND_UNSIGNED
3796 if (POINTER_TYPE_P (TREE_TYPE (to))
3797 && GET_MODE (to_rtx) != GET_MODE (value))
3798 value = convert_memory_address (GET_MODE (to_rtx), value);
3799 #endif
3800 emit_move_insn (to_rtx, value);
3802 preserve_temp_slots (to_rtx);
3803 free_temp_slots ();
3804 pop_temp_slots ();
3805 return want_value ? to_rtx : NULL_RTX;
3808 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3809 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3811 if (to_rtx == 0)
3812 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3814 /* Don't move directly into a return register. */
3815 if (TREE_CODE (to) == RESULT_DECL
3816 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3818 rtx temp;
3820 push_temp_slots ();
3821 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3823 if (GET_CODE (to_rtx) == PARALLEL)
3824 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)));
3825 else
3826 emit_move_insn (to_rtx, temp);
3828 preserve_temp_slots (to_rtx);
3829 free_temp_slots ();
3830 pop_temp_slots ();
3831 return want_value ? to_rtx : NULL_RTX;
3834 /* In case we are returning the contents of an object which overlaps
3835 the place the value is being stored, use a safe function when copying
3836 a value through a pointer into a structure value return block. */
3837 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3838 && current_function_returns_struct
3839 && !current_function_returns_pcc_struct)
3841 rtx from_rtx, size;
3843 push_temp_slots ();
3844 size = expr_size (from);
3845 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
3847 #ifdef TARGET_MEM_FUNCTIONS
3848 emit_library_call (memmove_libfunc, LCT_NORMAL,
3849 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3850 XEXP (from_rtx, 0), Pmode,
3851 convert_to_mode (TYPE_MODE (sizetype),
3852 size, TREE_UNSIGNED (sizetype)),
3853 TYPE_MODE (sizetype));
3854 #else
3855 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3856 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3857 XEXP (to_rtx, 0), Pmode,
3858 convert_to_mode (TYPE_MODE (integer_type_node),
3859 size, TREE_UNSIGNED (integer_type_node)),
3860 TYPE_MODE (integer_type_node));
3861 #endif
3863 preserve_temp_slots (to_rtx);
3864 free_temp_slots ();
3865 pop_temp_slots ();
3866 return want_value ? to_rtx : NULL_RTX;
3869 /* Compute FROM and store the value in the rtx we got. */
3871 push_temp_slots ();
3872 result = store_expr (from, to_rtx, want_value);
3873 preserve_temp_slots (result);
3874 free_temp_slots ();
3875 pop_temp_slots ();
3876 return want_value ? result : NULL_RTX;
3879 /* Generate code for computing expression EXP,
3880 and storing the value into TARGET.
3881 TARGET may contain a QUEUED rtx.
3883 If WANT_VALUE is nonzero, return a copy of the value
3884 not in TARGET, so that we can be sure to use the proper
3885 value in a containing expression even if TARGET has something
3886 else stored in it. If possible, we copy the value through a pseudo
3887 and return that pseudo. Or, if the value is constant, we try to
3888 return the constant. In some cases, we return a pseudo
3889 copied *from* TARGET.
3891 If the mode is BLKmode then we may return TARGET itself.
3892 It turns out that in BLKmode it doesn't cause a problem.
3893 because C has no operators that could combine two different
3894 assignments into the same BLKmode object with different values
3895 with no sequence point. Will other languages need this to
3896 be more thorough?
3898 If WANT_VALUE is 0, we return NULL, to make sure
3899 to catch quickly any cases where the caller uses the value
3900 and fails to set WANT_VALUE. */
3903 store_expr (exp, target, want_value)
3904 tree exp;
3905 rtx target;
3906 int want_value;
3908 rtx temp;
3909 int dont_return_target = 0;
3910 int dont_store_target = 0;
3912 if (TREE_CODE (exp) == COMPOUND_EXPR)
3914 /* Perform first part of compound expression, then assign from second
3915 part. */
3916 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3917 emit_queue ();
3918 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3920 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3922 /* For conditional expression, get safe form of the target. Then
3923 test the condition, doing the appropriate assignment on either
3924 side. This avoids the creation of unnecessary temporaries.
3925 For non-BLKmode, it is more efficient not to do this. */
3927 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3929 emit_queue ();
3930 target = protect_from_queue (target, 1);
3932 do_pending_stack_adjust ();
3933 NO_DEFER_POP;
3934 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3935 start_cleanup_deferral ();
3936 store_expr (TREE_OPERAND (exp, 1), target, 0);
3937 end_cleanup_deferral ();
3938 emit_queue ();
3939 emit_jump_insn (gen_jump (lab2));
3940 emit_barrier ();
3941 emit_label (lab1);
3942 start_cleanup_deferral ();
3943 store_expr (TREE_OPERAND (exp, 2), target, 0);
3944 end_cleanup_deferral ();
3945 emit_queue ();
3946 emit_label (lab2);
3947 OK_DEFER_POP;
3949 return want_value ? target : NULL_RTX;
3951 else if (queued_subexp_p (target))
3952 /* If target contains a postincrement, let's not risk
3953 using it as the place to generate the rhs. */
3955 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3957 /* Expand EXP into a new pseudo. */
3958 temp = gen_reg_rtx (GET_MODE (target));
3959 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3961 else
3962 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3964 /* If target is volatile, ANSI requires accessing the value
3965 *from* the target, if it is accessed. So make that happen.
3966 In no case return the target itself. */
3967 if (! MEM_VOLATILE_P (target) && want_value)
3968 dont_return_target = 1;
3970 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3971 && GET_MODE (target) != BLKmode)
3972 /* If target is in memory and caller wants value in a register instead,
3973 arrange that. Pass TARGET as target for expand_expr so that,
3974 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3975 We know expand_expr will not use the target in that case.
3976 Don't do this if TARGET is volatile because we are supposed
3977 to write it and then read it. */
3979 temp = expand_expr (exp, target, GET_MODE (target), 0);
3980 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3982 /* If TEMP is already in the desired TARGET, only copy it from
3983 memory and don't store it there again. */
3984 if (temp == target
3985 || (rtx_equal_p (temp, target)
3986 && ! side_effects_p (temp) && ! side_effects_p (target)))
3987 dont_store_target = 1;
3988 temp = copy_to_reg (temp);
3990 dont_return_target = 1;
3992 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3993 /* If this is an scalar in a register that is stored in a wider mode
3994 than the declared mode, compute the result into its declared mode
3995 and then convert to the wider mode. Our value is the computed
3996 expression. */
3998 /* If we don't want a value, we can do the conversion inside EXP,
3999 which will often result in some optimizations. Do the conversion
4000 in two steps: first change the signedness, if needed, then
4001 the extend. But don't do this if the type of EXP is a subtype
4002 of something else since then the conversion might involve
4003 more than just converting modes. */
4004 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4005 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4007 if (TREE_UNSIGNED (TREE_TYPE (exp))
4008 != SUBREG_PROMOTED_UNSIGNED_P (target))
4010 = convert
4011 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
4012 TREE_TYPE (exp)),
4013 exp);
4015 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
4016 SUBREG_PROMOTED_UNSIGNED_P (target)),
4017 exp);
4020 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4022 /* If TEMP is a volatile MEM and we want a result value, make
4023 the access now so it gets done only once. Likewise if
4024 it contains TARGET. */
4025 if (GET_CODE (temp) == MEM && want_value
4026 && (MEM_VOLATILE_P (temp)
4027 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
4028 temp = copy_to_reg (temp);
4030 /* If TEMP is a VOIDmode constant, use convert_modes to make
4031 sure that we properly convert it. */
4032 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4034 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4035 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4036 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4037 GET_MODE (target), temp,
4038 SUBREG_PROMOTED_UNSIGNED_P (target));
4041 convert_move (SUBREG_REG (target), temp,
4042 SUBREG_PROMOTED_UNSIGNED_P (target));
4044 /* If we promoted a constant, change the mode back down to match
4045 target. Otherwise, the caller might get confused by a result whose
4046 mode is larger than expected. */
4048 if (want_value && GET_MODE (temp) != GET_MODE (target))
4050 if (GET_MODE (temp) != VOIDmode)
4052 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4053 SUBREG_PROMOTED_VAR_P (temp) = 1;
4054 SUBREG_PROMOTED_UNSIGNED_P (temp)
4055 = SUBREG_PROMOTED_UNSIGNED_P (target);
4057 else
4058 temp = convert_modes (GET_MODE (target),
4059 GET_MODE (SUBREG_REG (target)),
4060 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4063 return want_value ? temp : NULL_RTX;
4065 else
4067 temp = expand_expr (exp, target, GET_MODE (target), 0);
4068 /* Return TARGET if it's a specified hardware register.
4069 If TARGET is a volatile mem ref, either return TARGET
4070 or return a reg copied *from* TARGET; ANSI requires this.
4072 Otherwise, if TEMP is not TARGET, return TEMP
4073 if it is constant (for efficiency),
4074 or if we really want the correct value. */
4075 if (!(target && GET_CODE (target) == REG
4076 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4077 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4078 && ! rtx_equal_p (temp, target)
4079 && (CONSTANT_P (temp) || want_value))
4080 dont_return_target = 1;
4083 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4084 the same as that of TARGET, adjust the constant. This is needed, for
4085 example, in case it is a CONST_DOUBLE and we want only a word-sized
4086 value. */
4087 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4088 && TREE_CODE (exp) != ERROR_MARK
4089 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4090 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4091 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4093 /* If value was not generated in the target, store it there.
4094 Convert the value to TARGET's type first if necessary.
4095 If TEMP and TARGET compare equal according to rtx_equal_p, but
4096 one or both of them are volatile memory refs, we have to distinguish
4097 two cases:
4098 - expand_expr has used TARGET. In this case, we must not generate
4099 another copy. This can be detected by TARGET being equal according
4100 to == .
4101 - expand_expr has not used TARGET - that means that the source just
4102 happens to have the same RTX form. Since temp will have been created
4103 by expand_expr, it will compare unequal according to == .
4104 We must generate a copy in this case, to reach the correct number
4105 of volatile memory references. */
4107 if ((! rtx_equal_p (temp, target)
4108 || (temp != target && (side_effects_p (temp)
4109 || side_effects_p (target))))
4110 && TREE_CODE (exp) != ERROR_MARK
4111 && ! dont_store_target)
4113 target = protect_from_queue (target, 1);
4114 if (GET_MODE (temp) != GET_MODE (target)
4115 && GET_MODE (temp) != VOIDmode)
4117 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4118 if (dont_return_target)
4120 /* In this case, we will return TEMP,
4121 so make sure it has the proper mode.
4122 But don't forget to store the value into TARGET. */
4123 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4124 emit_move_insn (target, temp);
4126 else
4127 convert_move (target, temp, unsignedp);
4130 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4132 /* Handle copying a string constant into an array. The string
4133 constant may be shorter than the array. So copy just the string's
4134 actual length, and clear the rest. First get the size of the data
4135 type of the string, which is actually the size of the target. */
4136 rtx size = expr_size (exp);
4138 if (GET_CODE (size) == CONST_INT
4139 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4140 emit_block_move (target, temp, size);
4141 else
4143 /* Compute the size of the data to copy from the string. */
4144 tree copy_size
4145 = size_binop (MIN_EXPR,
4146 make_tree (sizetype, size),
4147 size_int (TREE_STRING_LENGTH (exp)));
4148 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
4149 VOIDmode, 0);
4150 rtx label = 0;
4152 /* Copy that much. */
4153 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx, 0);
4154 emit_block_move (target, temp, copy_size_rtx);
4156 /* Figure out how much is left in TARGET that we have to clear.
4157 Do all calculations in ptr_mode. */
4158 if (GET_CODE (copy_size_rtx) == CONST_INT)
4160 size = plus_constant (size, -INTVAL (copy_size_rtx));
4161 target = adjust_address (target, BLKmode,
4162 INTVAL (copy_size_rtx));
4164 else
4166 size = expand_binop (ptr_mode, sub_optab, size,
4167 copy_size_rtx, NULL_RTX, 0,
4168 OPTAB_LIB_WIDEN);
4170 #ifdef POINTERS_EXTEND_UNSIGNED
4171 if (GET_MODE (copy_size_rtx) != Pmode)
4172 copy_size_rtx = convert_memory_address (Pmode,
4173 copy_size_rtx);
4174 #endif
4176 target = offset_address (target, copy_size_rtx,
4177 highest_pow2_factor (copy_size));
4178 label = gen_label_rtx ();
4179 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4180 GET_MODE (size), 0, label);
4183 if (size != const0_rtx)
4184 clear_storage (target, size);
4186 if (label)
4187 emit_label (label);
4190 /* Handle calls that return values in multiple non-contiguous locations.
4191 The Irix 6 ABI has examples of this. */
4192 else if (GET_CODE (target) == PARALLEL)
4193 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)));
4194 else if (GET_MODE (temp) == BLKmode)
4195 emit_block_move (target, temp, expr_size (exp));
4196 else
4197 emit_move_insn (target, temp);
4200 /* If we don't want a value, return NULL_RTX. */
4201 if (! want_value)
4202 return NULL_RTX;
4204 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4205 ??? The latter test doesn't seem to make sense. */
4206 else if (dont_return_target && GET_CODE (temp) != MEM)
4207 return temp;
4209 /* Return TARGET itself if it is a hard register. */
4210 else if (want_value && GET_MODE (target) != BLKmode
4211 && ! (GET_CODE (target) == REG
4212 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4213 return copy_to_reg (target);
4215 else
4216 return target;
4219 /* Return 1 if EXP just contains zeros. */
4221 static int
4222 is_zeros_p (exp)
4223 tree exp;
4225 tree elt;
4227 switch (TREE_CODE (exp))
4229 case CONVERT_EXPR:
4230 case NOP_EXPR:
4231 case NON_LVALUE_EXPR:
4232 case VIEW_CONVERT_EXPR:
4233 return is_zeros_p (TREE_OPERAND (exp, 0));
4235 case INTEGER_CST:
4236 return integer_zerop (exp);
4238 case COMPLEX_CST:
4239 return
4240 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4242 case REAL_CST:
4243 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4245 case CONSTRUCTOR:
4246 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4247 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4248 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4249 if (! is_zeros_p (TREE_VALUE (elt)))
4250 return 0;
4252 return 1;
4254 default:
4255 return 0;
4259 /* Return 1 if EXP contains mostly (3/4) zeros. */
4261 static int
4262 mostly_zeros_p (exp)
4263 tree exp;
4265 if (TREE_CODE (exp) == CONSTRUCTOR)
4267 int elts = 0, zeros = 0;
4268 tree elt = CONSTRUCTOR_ELTS (exp);
4269 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4271 /* If there are no ranges of true bits, it is all zero. */
4272 return elt == NULL_TREE;
4274 for (; elt; elt = TREE_CHAIN (elt))
4276 /* We do not handle the case where the index is a RANGE_EXPR,
4277 so the statistic will be somewhat inaccurate.
4278 We do make a more accurate count in store_constructor itself,
4279 so since this function is only used for nested array elements,
4280 this should be close enough. */
4281 if (mostly_zeros_p (TREE_VALUE (elt)))
4282 zeros++;
4283 elts++;
4286 return 4 * zeros >= 3 * elts;
4289 return is_zeros_p (exp);
4292 /* Helper function for store_constructor.
4293 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4294 TYPE is the type of the CONSTRUCTOR, not the element type.
4295 CLEARED is as for store_constructor.
4296 ALIAS_SET is the alias set to use for any stores.
4298 This provides a recursive shortcut back to store_constructor when it isn't
4299 necessary to go through store_field. This is so that we can pass through
4300 the cleared field to let store_constructor know that we may not have to
4301 clear a substructure if the outer structure has already been cleared. */
4303 static void
4304 store_constructor_field (target, bitsize, bitpos, mode, exp, type, cleared,
4305 alias_set)
4306 rtx target;
4307 unsigned HOST_WIDE_INT bitsize;
4308 HOST_WIDE_INT bitpos;
4309 enum machine_mode mode;
4310 tree exp, type;
4311 int cleared;
4312 int alias_set;
4314 if (TREE_CODE (exp) == CONSTRUCTOR
4315 && bitpos % BITS_PER_UNIT == 0
4316 /* If we have a non-zero bitpos for a register target, then we just
4317 let store_field do the bitfield handling. This is unlikely to
4318 generate unnecessary clear instructions anyways. */
4319 && (bitpos == 0 || GET_CODE (target) == MEM))
4321 if (GET_CODE (target) == MEM)
4322 target
4323 = adjust_address (target,
4324 GET_MODE (target) == BLKmode
4325 || 0 != (bitpos
4326 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4327 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4330 /* Update the alias set, if required. */
4331 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4332 && MEM_ALIAS_SET (target) != 0)
4334 target = copy_rtx (target);
4335 set_mem_alias_set (target, alias_set);
4338 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4340 else
4341 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4342 alias_set);
4345 /* Store the value of constructor EXP into the rtx TARGET.
4346 TARGET is either a REG or a MEM; we know it cannot conflict, since
4347 safe_from_p has been called.
4348 CLEARED is true if TARGET is known to have been zero'd.
4349 SIZE is the number of bytes of TARGET we are allowed to modify: this
4350 may not be the same as the size of EXP if we are assigning to a field
4351 which has been packed to exclude padding bits. */
4353 static void
4354 store_constructor (exp, target, cleared, size)
4355 tree exp;
4356 rtx target;
4357 int cleared;
4358 HOST_WIDE_INT size;
4360 tree type = TREE_TYPE (exp);
4361 #ifdef WORD_REGISTER_OPERATIONS
4362 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4363 #endif
4365 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4366 || TREE_CODE (type) == QUAL_UNION_TYPE)
4368 tree elt;
4370 /* We either clear the aggregate or indicate the value is dead. */
4371 if ((TREE_CODE (type) == UNION_TYPE
4372 || TREE_CODE (type) == QUAL_UNION_TYPE)
4373 && ! cleared
4374 && ! CONSTRUCTOR_ELTS (exp))
4375 /* If the constructor is empty, clear the union. */
4377 clear_storage (target, expr_size (exp));
4378 cleared = 1;
4381 /* If we are building a static constructor into a register,
4382 set the initial value as zero so we can fold the value into
4383 a constant. But if more than one register is involved,
4384 this probably loses. */
4385 else if (! cleared && GET_CODE (target) == REG && TREE_STATIC (exp)
4386 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4388 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4389 cleared = 1;
4392 /* If the constructor has fewer fields than the structure
4393 or if we are initializing the structure to mostly zeros,
4394 clear the whole structure first. Don't do this if TARGET is a
4395 register whose mode size isn't equal to SIZE since clear_storage
4396 can't handle this case. */
4397 else if (! cleared && size > 0
4398 && ((list_length (CONSTRUCTOR_ELTS (exp))
4399 != fields_length (type))
4400 || mostly_zeros_p (exp))
4401 && (GET_CODE (target) != REG
4402 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4403 == size)))
4405 clear_storage (target, GEN_INT (size));
4406 cleared = 1;
4409 if (! cleared)
4410 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4412 /* Store each element of the constructor into
4413 the corresponding field of TARGET. */
4415 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4417 tree field = TREE_PURPOSE (elt);
4418 tree value = TREE_VALUE (elt);
4419 enum machine_mode mode;
4420 HOST_WIDE_INT bitsize;
4421 HOST_WIDE_INT bitpos = 0;
4422 int unsignedp;
4423 tree offset;
4424 rtx to_rtx = target;
4426 /* Just ignore missing fields.
4427 We cleared the whole structure, above,
4428 if any fields are missing. */
4429 if (field == 0)
4430 continue;
4432 if (cleared && is_zeros_p (value))
4433 continue;
4435 if (host_integerp (DECL_SIZE (field), 1))
4436 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4437 else
4438 bitsize = -1;
4440 unsignedp = TREE_UNSIGNED (field);
4441 mode = DECL_MODE (field);
4442 if (DECL_BIT_FIELD (field))
4443 mode = VOIDmode;
4445 offset = DECL_FIELD_OFFSET (field);
4446 if (host_integerp (offset, 0)
4447 && host_integerp (bit_position (field), 0))
4449 bitpos = int_bit_position (field);
4450 offset = 0;
4452 else
4453 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4455 if (offset)
4457 rtx offset_rtx;
4459 if (contains_placeholder_p (offset))
4460 offset = build (WITH_RECORD_EXPR, sizetype,
4461 offset, make_tree (TREE_TYPE (exp), target));
4463 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4464 if (GET_CODE (to_rtx) != MEM)
4465 abort ();
4467 if (GET_MODE (offset_rtx) != ptr_mode)
4468 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4470 #ifdef POINTERS_EXTEND_UNSIGNED
4471 if (GET_MODE (offset_rtx) != Pmode)
4472 offset_rtx = convert_memory_address (Pmode, offset_rtx);
4473 #endif
4475 to_rtx = offset_address (to_rtx, offset_rtx,
4476 highest_pow2_factor (offset));
4479 if (TREE_READONLY (field))
4481 if (GET_CODE (to_rtx) == MEM)
4482 to_rtx = copy_rtx (to_rtx);
4484 RTX_UNCHANGING_P (to_rtx) = 1;
4487 #ifdef WORD_REGISTER_OPERATIONS
4488 /* If this initializes a field that is smaller than a word, at the
4489 start of a word, try to widen it to a full word.
4490 This special case allows us to output C++ member function
4491 initializations in a form that the optimizers can understand. */
4492 if (GET_CODE (target) == REG
4493 && bitsize < BITS_PER_WORD
4494 && bitpos % BITS_PER_WORD == 0
4495 && GET_MODE_CLASS (mode) == MODE_INT
4496 && TREE_CODE (value) == INTEGER_CST
4497 && exp_size >= 0
4498 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4500 tree type = TREE_TYPE (value);
4502 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4504 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4505 value = convert (type, value);
4508 if (BYTES_BIG_ENDIAN)
4509 value
4510 = fold (build (LSHIFT_EXPR, type, value,
4511 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4512 bitsize = BITS_PER_WORD;
4513 mode = word_mode;
4515 #endif
4517 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4518 && DECL_NONADDRESSABLE_P (field))
4520 to_rtx = copy_rtx (to_rtx);
4521 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4524 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4525 value, type, cleared,
4526 get_alias_set (TREE_TYPE (field)));
4529 else if (TREE_CODE (type) == ARRAY_TYPE)
4531 tree elt;
4532 int i;
4533 int need_to_clear;
4534 tree domain = TYPE_DOMAIN (type);
4535 tree elttype = TREE_TYPE (type);
4536 int const_bounds_p = (TYPE_MIN_VALUE (domain)
4537 && TYPE_MAX_VALUE (domain)
4538 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4539 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4540 HOST_WIDE_INT minelt = 0;
4541 HOST_WIDE_INT maxelt = 0;
4543 /* If we have constant bounds for the range of the type, get them. */
4544 if (const_bounds_p)
4546 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4547 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4550 /* If the constructor has fewer elements than the array,
4551 clear the whole array first. Similarly if this is
4552 static constructor of a non-BLKmode object. */
4553 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4554 need_to_clear = 1;
4555 else
4557 HOST_WIDE_INT count = 0, zero_count = 0;
4558 need_to_clear = ! const_bounds_p;
4560 /* This loop is a more accurate version of the loop in
4561 mostly_zeros_p (it handles RANGE_EXPR in an index).
4562 It is also needed to check for missing elements. */
4563 for (elt = CONSTRUCTOR_ELTS (exp);
4564 elt != NULL_TREE && ! need_to_clear;
4565 elt = TREE_CHAIN (elt))
4567 tree index = TREE_PURPOSE (elt);
4568 HOST_WIDE_INT this_node_count;
4570 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4572 tree lo_index = TREE_OPERAND (index, 0);
4573 tree hi_index = TREE_OPERAND (index, 1);
4575 if (! host_integerp (lo_index, 1)
4576 || ! host_integerp (hi_index, 1))
4578 need_to_clear = 1;
4579 break;
4582 this_node_count = (tree_low_cst (hi_index, 1)
4583 - tree_low_cst (lo_index, 1) + 1);
4585 else
4586 this_node_count = 1;
4588 count += this_node_count;
4589 if (mostly_zeros_p (TREE_VALUE (elt)))
4590 zero_count += this_node_count;
4593 /* Clear the entire array first if there are any missing elements,
4594 or if the incidence of zero elements is >= 75%. */
4595 if (! need_to_clear
4596 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4597 need_to_clear = 1;
4600 if (need_to_clear && size > 0)
4602 if (! cleared)
4603 clear_storage (target, GEN_INT (size));
4604 cleared = 1;
4606 else if (REG_P (target))
4607 /* Inform later passes that the old value is dead. */
4608 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4610 /* Store each element of the constructor into
4611 the corresponding element of TARGET, determined
4612 by counting the elements. */
4613 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4614 elt;
4615 elt = TREE_CHAIN (elt), i++)
4617 enum machine_mode mode;
4618 HOST_WIDE_INT bitsize;
4619 HOST_WIDE_INT bitpos;
4620 int unsignedp;
4621 tree value = TREE_VALUE (elt);
4622 tree index = TREE_PURPOSE (elt);
4623 rtx xtarget = target;
4625 if (cleared && is_zeros_p (value))
4626 continue;
4628 unsignedp = TREE_UNSIGNED (elttype);
4629 mode = TYPE_MODE (elttype);
4630 if (mode == BLKmode)
4631 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4632 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4633 : -1);
4634 else
4635 bitsize = GET_MODE_BITSIZE (mode);
4637 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4639 tree lo_index = TREE_OPERAND (index, 0);
4640 tree hi_index = TREE_OPERAND (index, 1);
4641 rtx index_r, pos_rtx, hi_r, loop_top, loop_end;
4642 struct nesting *loop;
4643 HOST_WIDE_INT lo, hi, count;
4644 tree position;
4646 /* If the range is constant and "small", unroll the loop. */
4647 if (const_bounds_p
4648 && host_integerp (lo_index, 0)
4649 && host_integerp (hi_index, 0)
4650 && (lo = tree_low_cst (lo_index, 0),
4651 hi = tree_low_cst (hi_index, 0),
4652 count = hi - lo + 1,
4653 (GET_CODE (target) != MEM
4654 || count <= 2
4655 || (host_integerp (TYPE_SIZE (elttype), 1)
4656 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4657 <= 40 * 8)))))
4659 lo -= minelt; hi -= minelt;
4660 for (; lo <= hi; lo++)
4662 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4664 if (GET_CODE (target) == MEM
4665 && !MEM_KEEP_ALIAS_SET_P (target)
4666 && TYPE_NONALIASED_COMPONENT (type))
4668 target = copy_rtx (target);
4669 MEM_KEEP_ALIAS_SET_P (target) = 1;
4672 store_constructor_field
4673 (target, bitsize, bitpos, mode, value, type, cleared,
4674 get_alias_set (elttype));
4677 else
4679 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4680 loop_top = gen_label_rtx ();
4681 loop_end = gen_label_rtx ();
4683 unsignedp = TREE_UNSIGNED (domain);
4685 index = build_decl (VAR_DECL, NULL_TREE, domain);
4687 index_r
4688 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4689 &unsignedp, 0));
4690 SET_DECL_RTL (index, index_r);
4691 if (TREE_CODE (value) == SAVE_EXPR
4692 && SAVE_EXPR_RTL (value) == 0)
4694 /* Make sure value gets expanded once before the
4695 loop. */
4696 expand_expr (value, const0_rtx, VOIDmode, 0);
4697 emit_queue ();
4699 store_expr (lo_index, index_r, 0);
4700 loop = expand_start_loop (0);
4702 /* Assign value to element index. */
4703 position
4704 = convert (ssizetype,
4705 fold (build (MINUS_EXPR, TREE_TYPE (index),
4706 index, TYPE_MIN_VALUE (domain))));
4707 position = size_binop (MULT_EXPR, position,
4708 convert (ssizetype,
4709 TYPE_SIZE_UNIT (elttype)));
4711 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4712 xtarget = offset_address (target, pos_rtx,
4713 highest_pow2_factor (position));
4714 xtarget = adjust_address (xtarget, mode, 0);
4715 if (TREE_CODE (value) == CONSTRUCTOR)
4716 store_constructor (value, xtarget, cleared,
4717 bitsize / BITS_PER_UNIT);
4718 else
4719 store_expr (value, xtarget, 0);
4721 expand_exit_loop_if_false (loop,
4722 build (LT_EXPR, integer_type_node,
4723 index, hi_index));
4725 expand_increment (build (PREINCREMENT_EXPR,
4726 TREE_TYPE (index),
4727 index, integer_one_node), 0, 0);
4728 expand_end_loop ();
4729 emit_label (loop_end);
4732 else if ((index != 0 && ! host_integerp (index, 0))
4733 || ! host_integerp (TYPE_SIZE (elttype), 1))
4735 tree position;
4737 if (index == 0)
4738 index = ssize_int (1);
4740 if (minelt)
4741 index = convert (ssizetype,
4742 fold (build (MINUS_EXPR, index,
4743 TYPE_MIN_VALUE (domain))));
4745 position = size_binop (MULT_EXPR, index,
4746 convert (ssizetype,
4747 TYPE_SIZE_UNIT (elttype)));
4748 xtarget = offset_address (target,
4749 expand_expr (position, 0, VOIDmode, 0),
4750 highest_pow2_factor (position));
4751 xtarget = adjust_address (xtarget, mode, 0);
4752 store_expr (value, xtarget, 0);
4754 else
4756 if (index != 0)
4757 bitpos = ((tree_low_cst (index, 0) - minelt)
4758 * tree_low_cst (TYPE_SIZE (elttype), 1));
4759 else
4760 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4762 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
4763 && TYPE_NONALIASED_COMPONENT (type))
4765 target = copy_rtx (target);
4766 MEM_KEEP_ALIAS_SET_P (target) = 1;
4769 store_constructor_field (target, bitsize, bitpos, mode, value,
4770 type, cleared, get_alias_set (elttype));
4776 /* Set constructor assignments. */
4777 else if (TREE_CODE (type) == SET_TYPE)
4779 tree elt = CONSTRUCTOR_ELTS (exp);
4780 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4781 tree domain = TYPE_DOMAIN (type);
4782 tree domain_min, domain_max, bitlength;
4784 /* The default implementation strategy is to extract the constant
4785 parts of the constructor, use that to initialize the target,
4786 and then "or" in whatever non-constant ranges we need in addition.
4788 If a large set is all zero or all ones, it is
4789 probably better to set it using memset (if available) or bzero.
4790 Also, if a large set has just a single range, it may also be
4791 better to first clear all the first clear the set (using
4792 bzero/memset), and set the bits we want. */
4794 /* Check for all zeros. */
4795 if (elt == NULL_TREE && size > 0)
4797 if (!cleared)
4798 clear_storage (target, GEN_INT (size));
4799 return;
4802 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4803 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4804 bitlength = size_binop (PLUS_EXPR,
4805 size_diffop (domain_max, domain_min),
4806 ssize_int (1));
4808 nbits = tree_low_cst (bitlength, 1);
4810 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4811 are "complicated" (more than one range), initialize (the
4812 constant parts) by copying from a constant. */
4813 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4814 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4816 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4817 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4818 char *bit_buffer = (char *) alloca (nbits);
4819 HOST_WIDE_INT word = 0;
4820 unsigned int bit_pos = 0;
4821 unsigned int ibit = 0;
4822 unsigned int offset = 0; /* In bytes from beginning of set. */
4824 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4825 for (;;)
4827 if (bit_buffer[ibit])
4829 if (BYTES_BIG_ENDIAN)
4830 word |= (1 << (set_word_size - 1 - bit_pos));
4831 else
4832 word |= 1 << bit_pos;
4835 bit_pos++; ibit++;
4836 if (bit_pos >= set_word_size || ibit == nbits)
4838 if (word != 0 || ! cleared)
4840 rtx datum = GEN_INT (word);
4841 rtx to_rtx;
4843 /* The assumption here is that it is safe to use
4844 XEXP if the set is multi-word, but not if
4845 it's single-word. */
4846 if (GET_CODE (target) == MEM)
4847 to_rtx = adjust_address (target, mode, offset);
4848 else if (offset == 0)
4849 to_rtx = target;
4850 else
4851 abort ();
4852 emit_move_insn (to_rtx, datum);
4855 if (ibit == nbits)
4856 break;
4857 word = 0;
4858 bit_pos = 0;
4859 offset += set_word_size / BITS_PER_UNIT;
4863 else if (!cleared)
4864 /* Don't bother clearing storage if the set is all ones. */
4865 if (TREE_CHAIN (elt) != NULL_TREE
4866 || (TREE_PURPOSE (elt) == NULL_TREE
4867 ? nbits != 1
4868 : ( ! host_integerp (TREE_VALUE (elt), 0)
4869 || ! host_integerp (TREE_PURPOSE (elt), 0)
4870 || (tree_low_cst (TREE_VALUE (elt), 0)
4871 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
4872 != (HOST_WIDE_INT) nbits))))
4873 clear_storage (target, expr_size (exp));
4875 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4877 /* Start of range of element or NULL. */
4878 tree startbit = TREE_PURPOSE (elt);
4879 /* End of range of element, or element value. */
4880 tree endbit = TREE_VALUE (elt);
4881 #ifdef TARGET_MEM_FUNCTIONS
4882 HOST_WIDE_INT startb, endb;
4883 #endif
4884 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4886 bitlength_rtx = expand_expr (bitlength,
4887 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4889 /* Handle non-range tuple element like [ expr ]. */
4890 if (startbit == NULL_TREE)
4892 startbit = save_expr (endbit);
4893 endbit = startbit;
4896 startbit = convert (sizetype, startbit);
4897 endbit = convert (sizetype, endbit);
4898 if (! integer_zerop (domain_min))
4900 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4901 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4903 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4904 EXPAND_CONST_ADDRESS);
4905 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4906 EXPAND_CONST_ADDRESS);
4908 if (REG_P (target))
4910 targetx
4911 = assign_temp
4912 ((build_qualified_type (type_for_mode (GET_MODE (target), 0),
4913 TYPE_QUAL_CONST)),
4914 0, 1, 1);
4915 emit_move_insn (targetx, target);
4918 else if (GET_CODE (target) == MEM)
4919 targetx = target;
4920 else
4921 abort ();
4923 #ifdef TARGET_MEM_FUNCTIONS
4924 /* Optimization: If startbit and endbit are
4925 constants divisible by BITS_PER_UNIT,
4926 call memset instead. */
4927 if (TREE_CODE (startbit) == INTEGER_CST
4928 && TREE_CODE (endbit) == INTEGER_CST
4929 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4930 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4932 emit_library_call (memset_libfunc, LCT_NORMAL,
4933 VOIDmode, 3,
4934 plus_constant (XEXP (targetx, 0),
4935 startb / BITS_PER_UNIT),
4936 Pmode,
4937 constm1_rtx, TYPE_MODE (integer_type_node),
4938 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4939 TYPE_MODE (sizetype));
4941 else
4942 #endif
4943 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4944 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
4945 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
4946 startbit_rtx, TYPE_MODE (sizetype),
4947 endbit_rtx, TYPE_MODE (sizetype));
4949 if (REG_P (target))
4950 emit_move_insn (target, targetx);
4954 else
4955 abort ();
4958 /* Store the value of EXP (an expression tree)
4959 into a subfield of TARGET which has mode MODE and occupies
4960 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4961 If MODE is VOIDmode, it means that we are storing into a bit-field.
4963 If VALUE_MODE is VOIDmode, return nothing in particular.
4964 UNSIGNEDP is not used in this case.
4966 Otherwise, return an rtx for the value stored. This rtx
4967 has mode VALUE_MODE if that is convenient to do.
4968 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4970 TYPE is the type of the underlying object,
4972 ALIAS_SET is the alias set for the destination. This value will
4973 (in general) be different from that for TARGET, since TARGET is a
4974 reference to the containing structure. */
4976 static rtx
4977 store_field (target, bitsize, bitpos, mode, exp, value_mode, unsignedp, type,
4978 alias_set)
4979 rtx target;
4980 HOST_WIDE_INT bitsize;
4981 HOST_WIDE_INT bitpos;
4982 enum machine_mode mode;
4983 tree exp;
4984 enum machine_mode value_mode;
4985 int unsignedp;
4986 tree type;
4987 int alias_set;
4989 HOST_WIDE_INT width_mask = 0;
4991 if (TREE_CODE (exp) == ERROR_MARK)
4992 return const0_rtx;
4994 /* If we have nothing to store, do nothing unless the expression has
4995 side-effects. */
4996 if (bitsize == 0)
4997 return expand_expr (exp, const0_rtx, VOIDmode, 0);
4998 else if (bitsize >=0 && bitsize < HOST_BITS_PER_WIDE_INT)
4999 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5001 /* If we are storing into an unaligned field of an aligned union that is
5002 in a register, we may have the mode of TARGET being an integer mode but
5003 MODE == BLKmode. In that case, get an aligned object whose size and
5004 alignment are the same as TARGET and store TARGET into it (we can avoid
5005 the store if the field being stored is the entire width of TARGET). Then
5006 call ourselves recursively to store the field into a BLKmode version of
5007 that object. Finally, load from the object into TARGET. This is not
5008 very efficient in general, but should only be slightly more expensive
5009 than the otherwise-required unaligned accesses. Perhaps this can be
5010 cleaned up later. */
5012 if (mode == BLKmode
5013 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5015 rtx object
5016 = assign_temp
5017 (build_qualified_type (type, TYPE_QUALS (type) | TYPE_QUAL_CONST),
5018 0, 1, 1);
5019 rtx blk_object = adjust_address (object, BLKmode, 0);
5021 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5022 emit_move_insn (object, target);
5024 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5025 alias_set);
5027 emit_move_insn (target, object);
5029 /* We want to return the BLKmode version of the data. */
5030 return blk_object;
5033 if (GET_CODE (target) == CONCAT)
5035 /* We're storing into a struct containing a single __complex. */
5037 if (bitpos != 0)
5038 abort ();
5039 return store_expr (exp, target, 0);
5042 /* If the structure is in a register or if the component
5043 is a bit field, we cannot use addressing to access it.
5044 Use bit-field techniques or SUBREG to store in it. */
5046 if (mode == VOIDmode
5047 || (mode != BLKmode && ! direct_store[(int) mode]
5048 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5049 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5050 || GET_CODE (target) == REG
5051 || GET_CODE (target) == SUBREG
5052 /* If the field isn't aligned enough to store as an ordinary memref,
5053 store it as a bit field. */
5054 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
5055 && (MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode)
5056 || bitpos % GET_MODE_ALIGNMENT (mode)))
5057 /* If the RHS and field are a constant size and the size of the
5058 RHS isn't the same size as the bitfield, we must use bitfield
5059 operations. */
5060 || (bitsize >= 0
5061 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5062 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5064 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5066 /* If BITSIZE is narrower than the size of the type of EXP
5067 we will be narrowing TEMP. Normally, what's wanted are the
5068 low-order bits. However, if EXP's type is a record and this is
5069 big-endian machine, we want the upper BITSIZE bits. */
5070 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5071 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5072 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5073 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5074 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5075 - bitsize),
5076 temp, 1);
5078 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5079 MODE. */
5080 if (mode != VOIDmode && mode != BLKmode
5081 && mode != TYPE_MODE (TREE_TYPE (exp)))
5082 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5084 /* If the modes of TARGET and TEMP are both BLKmode, both
5085 must be in memory and BITPOS must be aligned on a byte
5086 boundary. If so, we simply do a block copy. */
5087 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5089 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5090 || bitpos % BITS_PER_UNIT != 0)
5091 abort ();
5093 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5094 emit_block_move (target, temp,
5095 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5096 / BITS_PER_UNIT));
5098 return value_mode == VOIDmode ? const0_rtx : target;
5101 /* Store the value in the bitfield. */
5102 store_bit_field (target, bitsize, bitpos, mode, temp,
5103 int_size_in_bytes (type));
5105 if (value_mode != VOIDmode)
5107 /* The caller wants an rtx for the value.
5108 If possible, avoid refetching from the bitfield itself. */
5109 if (width_mask != 0
5110 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5112 tree count;
5113 enum machine_mode tmode;
5115 if (unsignedp)
5116 return expand_and (temp,
5117 GEN_INT
5118 (trunc_int_for_mode
5119 (width_mask,
5120 GET_MODE (temp) == VOIDmode
5121 ? value_mode
5122 : GET_MODE (temp))), NULL_RTX);
5124 tmode = GET_MODE (temp);
5125 if (tmode == VOIDmode)
5126 tmode = value_mode;
5127 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5128 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5129 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5132 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5133 NULL_RTX, value_mode, VOIDmode,
5134 int_size_in_bytes (type));
5136 return const0_rtx;
5138 else
5140 rtx addr = XEXP (target, 0);
5141 rtx to_rtx = target;
5143 /* If a value is wanted, it must be the lhs;
5144 so make the address stable for multiple use. */
5146 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5147 && ! CONSTANT_ADDRESS_P (addr)
5148 /* A frame-pointer reference is already stable. */
5149 && ! (GET_CODE (addr) == PLUS
5150 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5151 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5152 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5153 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5155 /* Now build a reference to just the desired component. */
5157 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5159 if (to_rtx == target)
5160 to_rtx = copy_rtx (to_rtx);
5162 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5163 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5164 set_mem_alias_set (to_rtx, alias_set);
5166 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5170 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5171 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5172 codes and find the ultimate containing object, which we return.
5174 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5175 bit position, and *PUNSIGNEDP to the signedness of the field.
5176 If the position of the field is variable, we store a tree
5177 giving the variable offset (in units) in *POFFSET.
5178 This offset is in addition to the bit position.
5179 If the position is not variable, we store 0 in *POFFSET.
5181 If any of the extraction expressions is volatile,
5182 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5184 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5185 is a mode that can be used to access the field. In that case, *PBITSIZE
5186 is redundant.
5188 If the field describes a variable-sized object, *PMODE is set to
5189 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5190 this case, but the address of the object can be found. */
5192 tree
5193 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5194 punsignedp, pvolatilep)
5195 tree exp;
5196 HOST_WIDE_INT *pbitsize;
5197 HOST_WIDE_INT *pbitpos;
5198 tree *poffset;
5199 enum machine_mode *pmode;
5200 int *punsignedp;
5201 int *pvolatilep;
5203 tree size_tree = 0;
5204 enum machine_mode mode = VOIDmode;
5205 tree offset = size_zero_node;
5206 tree bit_offset = bitsize_zero_node;
5207 tree placeholder_ptr = 0;
5208 tree tem;
5210 /* First get the mode, signedness, and size. We do this from just the
5211 outermost expression. */
5212 if (TREE_CODE (exp) == COMPONENT_REF)
5214 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5215 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5216 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5218 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5220 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5222 size_tree = TREE_OPERAND (exp, 1);
5223 *punsignedp = TREE_UNSIGNED (exp);
5225 else
5227 mode = TYPE_MODE (TREE_TYPE (exp));
5228 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5230 if (mode == BLKmode)
5231 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5232 else
5233 *pbitsize = GET_MODE_BITSIZE (mode);
5236 if (size_tree != 0)
5238 if (! host_integerp (size_tree, 1))
5239 mode = BLKmode, *pbitsize = -1;
5240 else
5241 *pbitsize = tree_low_cst (size_tree, 1);
5244 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5245 and find the ultimate containing object. */
5246 while (1)
5248 if (TREE_CODE (exp) == BIT_FIELD_REF)
5249 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5250 else if (TREE_CODE (exp) == COMPONENT_REF)
5252 tree field = TREE_OPERAND (exp, 1);
5253 tree this_offset = DECL_FIELD_OFFSET (field);
5255 /* If this field hasn't been filled in yet, don't go
5256 past it. This should only happen when folding expressions
5257 made during type construction. */
5258 if (this_offset == 0)
5259 break;
5260 else if (! TREE_CONSTANT (this_offset)
5261 && contains_placeholder_p (this_offset))
5262 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5264 offset = size_binop (PLUS_EXPR, offset, this_offset);
5265 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5266 DECL_FIELD_BIT_OFFSET (field));
5268 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5271 else if (TREE_CODE (exp) == ARRAY_REF
5272 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5274 tree index = TREE_OPERAND (exp, 1);
5275 tree array = TREE_OPERAND (exp, 0);
5276 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5277 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5278 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5280 /* We assume all arrays have sizes that are a multiple of a byte.
5281 First subtract the lower bound, if any, in the type of the
5282 index, then convert to sizetype and multiply by the size of the
5283 array element. */
5284 if (low_bound != 0 && ! integer_zerop (low_bound))
5285 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5286 index, low_bound));
5288 /* If the index has a self-referential type, pass it to a
5289 WITH_RECORD_EXPR; if the component size is, pass our
5290 component to one. */
5291 if (! TREE_CONSTANT (index)
5292 && contains_placeholder_p (index))
5293 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5294 if (! TREE_CONSTANT (unit_size)
5295 && contains_placeholder_p (unit_size))
5296 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5298 offset = size_binop (PLUS_EXPR, offset,
5299 size_binop (MULT_EXPR,
5300 convert (sizetype, index),
5301 unit_size));
5304 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5306 tree new = find_placeholder (exp, &placeholder_ptr);
5308 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5309 We might have been called from tree optimization where we
5310 haven't set up an object yet. */
5311 if (new == 0)
5312 break;
5313 else
5314 exp = new;
5316 continue;
5318 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5319 && TREE_CODE (exp) != VIEW_CONVERT_EXPR
5320 && ! ((TREE_CODE (exp) == NOP_EXPR
5321 || TREE_CODE (exp) == CONVERT_EXPR)
5322 && (TYPE_MODE (TREE_TYPE (exp))
5323 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5324 break;
5326 /* If any reference in the chain is volatile, the effect is volatile. */
5327 if (TREE_THIS_VOLATILE (exp))
5328 *pvolatilep = 1;
5330 exp = TREE_OPERAND (exp, 0);
5333 /* If OFFSET is constant, see if we can return the whole thing as a
5334 constant bit position. Otherwise, split it up. */
5335 if (host_integerp (offset, 0)
5336 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5337 bitsize_unit_node))
5338 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5339 && host_integerp (tem, 0))
5340 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5341 else
5342 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5344 *pmode = mode;
5345 return exp;
5348 /* Return 1 if T is an expression that get_inner_reference handles. */
5351 handled_component_p (t)
5352 tree t;
5354 switch (TREE_CODE (t))
5356 case BIT_FIELD_REF:
5357 case COMPONENT_REF:
5358 case ARRAY_REF:
5359 case ARRAY_RANGE_REF:
5360 case NON_LVALUE_EXPR:
5361 case VIEW_CONVERT_EXPR:
5362 return 1;
5364 case NOP_EXPR:
5365 case CONVERT_EXPR:
5366 return (TYPE_MODE (TREE_TYPE (t))
5367 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5369 default:
5370 return 0;
5374 /* Given an rtx VALUE that may contain additions and multiplications, return
5375 an equivalent value that just refers to a register, memory, or constant.
5376 This is done by generating instructions to perform the arithmetic and
5377 returning a pseudo-register containing the value.
5379 The returned value may be a REG, SUBREG, MEM or constant. */
5382 force_operand (value, target)
5383 rtx value, target;
5385 optab binoptab = 0;
5386 /* Use a temporary to force order of execution of calls to
5387 `force_operand'. */
5388 rtx tmp;
5389 rtx op2;
5390 /* Use subtarget as the target for operand 0 of a binary operation. */
5391 rtx subtarget = get_subtarget (target);
5393 /* Check for a PIC address load. */
5394 if (flag_pic
5395 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5396 && XEXP (value, 0) == pic_offset_table_rtx
5397 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5398 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5399 || GET_CODE (XEXP (value, 1)) == CONST))
5401 if (!subtarget)
5402 subtarget = gen_reg_rtx (GET_MODE (value));
5403 emit_move_insn (subtarget, value);
5404 return subtarget;
5407 if (GET_CODE (value) == PLUS)
5408 binoptab = add_optab;
5409 else if (GET_CODE (value) == MINUS)
5410 binoptab = sub_optab;
5411 else if (GET_CODE (value) == MULT)
5413 op2 = XEXP (value, 1);
5414 if (!CONSTANT_P (op2)
5415 && !(GET_CODE (op2) == REG && op2 != subtarget))
5416 subtarget = 0;
5417 tmp = force_operand (XEXP (value, 0), subtarget);
5418 return expand_mult (GET_MODE (value), tmp,
5419 force_operand (op2, NULL_RTX),
5420 target, 1);
5423 if (binoptab)
5425 op2 = XEXP (value, 1);
5426 if (!CONSTANT_P (op2)
5427 && !(GET_CODE (op2) == REG && op2 != subtarget))
5428 subtarget = 0;
5429 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5431 binoptab = add_optab;
5432 op2 = negate_rtx (GET_MODE (value), op2);
5435 /* Check for an addition with OP2 a constant integer and our first
5436 operand a PLUS of a virtual register and something else. In that
5437 case, we want to emit the sum of the virtual register and the
5438 constant first and then add the other value. This allows virtual
5439 register instantiation to simply modify the constant rather than
5440 creating another one around this addition. */
5441 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5442 && GET_CODE (XEXP (value, 0)) == PLUS
5443 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5444 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5445 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5447 rtx temp = expand_binop (GET_MODE (value), binoptab,
5448 XEXP (XEXP (value, 0), 0), op2,
5449 subtarget, 0, OPTAB_LIB_WIDEN);
5450 return expand_binop (GET_MODE (value), binoptab, temp,
5451 force_operand (XEXP (XEXP (value, 0), 1), 0),
5452 target, 0, OPTAB_LIB_WIDEN);
5455 tmp = force_operand (XEXP (value, 0), subtarget);
5456 return expand_binop (GET_MODE (value), binoptab, tmp,
5457 force_operand (op2, NULL_RTX),
5458 target, 0, OPTAB_LIB_WIDEN);
5459 /* We give UNSIGNEDP = 0 to expand_binop
5460 because the only operations we are expanding here are signed ones. */
5463 #ifdef INSN_SCHEDULING
5464 /* On machines that have insn scheduling, we want all memory reference to be
5465 explicit, so we need to deal with such paradoxical SUBREGs. */
5466 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
5467 && (GET_MODE_SIZE (GET_MODE (value))
5468 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5469 value
5470 = simplify_gen_subreg (GET_MODE (value),
5471 force_reg (GET_MODE (SUBREG_REG (value)),
5472 force_operand (SUBREG_REG (value),
5473 NULL_RTX)),
5474 GET_MODE (SUBREG_REG (value)),
5475 SUBREG_BYTE (value));
5476 #endif
5478 return value;
5481 /* Subroutine of expand_expr: return nonzero iff there is no way that
5482 EXP can reference X, which is being modified. TOP_P is nonzero if this
5483 call is going to be used to determine whether we need a temporary
5484 for EXP, as opposed to a recursive call to this function.
5486 It is always safe for this routine to return zero since it merely
5487 searches for optimization opportunities. */
5490 safe_from_p (x, exp, top_p)
5491 rtx x;
5492 tree exp;
5493 int top_p;
5495 rtx exp_rtl = 0;
5496 int i, nops;
5497 static tree save_expr_list;
5499 if (x == 0
5500 /* If EXP has varying size, we MUST use a target since we currently
5501 have no way of allocating temporaries of variable size
5502 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5503 So we assume here that something at a higher level has prevented a
5504 clash. This is somewhat bogus, but the best we can do. Only
5505 do this when X is BLKmode and when we are at the top level. */
5506 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5507 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5508 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5509 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5510 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5511 != INTEGER_CST)
5512 && GET_MODE (x) == BLKmode)
5513 /* If X is in the outgoing argument area, it is always safe. */
5514 || (GET_CODE (x) == MEM
5515 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5516 || (GET_CODE (XEXP (x, 0)) == PLUS
5517 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5518 return 1;
5520 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5521 find the underlying pseudo. */
5522 if (GET_CODE (x) == SUBREG)
5524 x = SUBREG_REG (x);
5525 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5526 return 0;
5529 /* A SAVE_EXPR might appear many times in the expression passed to the
5530 top-level safe_from_p call, and if it has a complex subexpression,
5531 examining it multiple times could result in a combinatorial explosion.
5532 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5533 with optimization took about 28 minutes to compile -- even though it was
5534 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5535 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5536 we have processed. Note that the only test of top_p was above. */
5538 if (top_p)
5540 int rtn;
5541 tree t;
5543 save_expr_list = 0;
5545 rtn = safe_from_p (x, exp, 0);
5547 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5548 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5550 return rtn;
5553 /* Now look at our tree code and possibly recurse. */
5554 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5556 case 'd':
5557 exp_rtl = DECL_RTL_SET_P (exp) ? DECL_RTL (exp) : NULL_RTX;
5558 break;
5560 case 'c':
5561 return 1;
5563 case 'x':
5564 if (TREE_CODE (exp) == TREE_LIST)
5565 return ((TREE_VALUE (exp) == 0
5566 || safe_from_p (x, TREE_VALUE (exp), 0))
5567 && (TREE_CHAIN (exp) == 0
5568 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5569 else if (TREE_CODE (exp) == ERROR_MARK)
5570 return 1; /* An already-visited SAVE_EXPR? */
5571 else
5572 return 0;
5574 case '1':
5575 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5577 case '2':
5578 case '<':
5579 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5580 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5582 case 'e':
5583 case 'r':
5584 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5585 the expression. If it is set, we conflict iff we are that rtx or
5586 both are in memory. Otherwise, we check all operands of the
5587 expression recursively. */
5589 switch (TREE_CODE (exp))
5591 case ADDR_EXPR:
5592 /* If the operand is static or we are static, we can't conflict.
5593 Likewise if we don't conflict with the operand at all. */
5594 if (staticp (TREE_OPERAND (exp, 0))
5595 || TREE_STATIC (exp)
5596 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5597 return 1;
5599 /* Otherwise, the only way this can conflict is if we are taking
5600 the address of a DECL a that address if part of X, which is
5601 very rare. */
5602 exp = TREE_OPERAND (exp, 0);
5603 if (DECL_P (exp))
5605 if (!DECL_RTL_SET_P (exp)
5606 || GET_CODE (DECL_RTL (exp)) != MEM)
5607 return 0;
5608 else
5609 exp_rtl = XEXP (DECL_RTL (exp), 0);
5611 break;
5613 case INDIRECT_REF:
5614 if (GET_CODE (x) == MEM
5615 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5616 get_alias_set (exp)))
5617 return 0;
5618 break;
5620 case CALL_EXPR:
5621 /* Assume that the call will clobber all hard registers and
5622 all of memory. */
5623 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5624 || GET_CODE (x) == MEM)
5625 return 0;
5626 break;
5628 case RTL_EXPR:
5629 /* If a sequence exists, we would have to scan every instruction
5630 in the sequence to see if it was safe. This is probably not
5631 worthwhile. */
5632 if (RTL_EXPR_SEQUENCE (exp))
5633 return 0;
5635 exp_rtl = RTL_EXPR_RTL (exp);
5636 break;
5638 case WITH_CLEANUP_EXPR:
5639 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
5640 break;
5642 case CLEANUP_POINT_EXPR:
5643 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5645 case SAVE_EXPR:
5646 exp_rtl = SAVE_EXPR_RTL (exp);
5647 if (exp_rtl)
5648 break;
5650 /* If we've already scanned this, don't do it again. Otherwise,
5651 show we've scanned it and record for clearing the flag if we're
5652 going on. */
5653 if (TREE_PRIVATE (exp))
5654 return 1;
5656 TREE_PRIVATE (exp) = 1;
5657 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5659 TREE_PRIVATE (exp) = 0;
5660 return 0;
5663 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
5664 return 1;
5666 case BIND_EXPR:
5667 /* The only operand we look at is operand 1. The rest aren't
5668 part of the expression. */
5669 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5671 case METHOD_CALL_EXPR:
5672 /* This takes an rtx argument, but shouldn't appear here. */
5673 abort ();
5675 default:
5676 break;
5679 /* If we have an rtx, we do not need to scan our operands. */
5680 if (exp_rtl)
5681 break;
5683 nops = first_rtl_op (TREE_CODE (exp));
5684 for (i = 0; i < nops; i++)
5685 if (TREE_OPERAND (exp, i) != 0
5686 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5687 return 0;
5689 /* If this is a language-specific tree code, it may require
5690 special handling. */
5691 if ((unsigned int) TREE_CODE (exp)
5692 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5693 && !(*lang_hooks.safe_from_p) (x, exp))
5694 return 0;
5697 /* If we have an rtl, find any enclosed object. Then see if we conflict
5698 with it. */
5699 if (exp_rtl)
5701 if (GET_CODE (exp_rtl) == SUBREG)
5703 exp_rtl = SUBREG_REG (exp_rtl);
5704 if (GET_CODE (exp_rtl) == REG
5705 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5706 return 0;
5709 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5710 are memory and they conflict. */
5711 return ! (rtx_equal_p (x, exp_rtl)
5712 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5713 && true_dependence (exp_rtl, GET_MODE (x), x,
5714 rtx_addr_varies_p)));
5717 /* If we reach here, it is safe. */
5718 return 1;
5721 /* Subroutine of expand_expr: return rtx if EXP is a
5722 variable or parameter; else return 0. */
5724 static rtx
5725 var_rtx (exp)
5726 tree exp;
5728 STRIP_NOPS (exp);
5729 switch (TREE_CODE (exp))
5731 case PARM_DECL:
5732 case VAR_DECL:
5733 return DECL_RTL (exp);
5734 default:
5735 return 0;
5739 #ifdef MAX_INTEGER_COMPUTATION_MODE
5741 void
5742 check_max_integer_computation_mode (exp)
5743 tree exp;
5745 enum tree_code code;
5746 enum machine_mode mode;
5748 /* Strip any NOPs that don't change the mode. */
5749 STRIP_NOPS (exp);
5750 code = TREE_CODE (exp);
5752 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5753 if (code == NOP_EXPR
5754 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5755 return;
5757 /* First check the type of the overall operation. We need only look at
5758 unary, binary and relational operations. */
5759 if (TREE_CODE_CLASS (code) == '1'
5760 || TREE_CODE_CLASS (code) == '2'
5761 || TREE_CODE_CLASS (code) == '<')
5763 mode = TYPE_MODE (TREE_TYPE (exp));
5764 if (GET_MODE_CLASS (mode) == MODE_INT
5765 && mode > MAX_INTEGER_COMPUTATION_MODE)
5766 internal_error ("unsupported wide integer operation");
5769 /* Check operand of a unary op. */
5770 if (TREE_CODE_CLASS (code) == '1')
5772 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5773 if (GET_MODE_CLASS (mode) == MODE_INT
5774 && mode > MAX_INTEGER_COMPUTATION_MODE)
5775 internal_error ("unsupported wide integer operation");
5778 /* Check operands of a binary/comparison op. */
5779 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5781 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5782 if (GET_MODE_CLASS (mode) == MODE_INT
5783 && mode > MAX_INTEGER_COMPUTATION_MODE)
5784 internal_error ("unsupported wide integer operation");
5786 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5787 if (GET_MODE_CLASS (mode) == MODE_INT
5788 && mode > MAX_INTEGER_COMPUTATION_MODE)
5789 internal_error ("unsupported wide integer operation");
5792 #endif
5794 /* Return the highest power of two that EXP is known to be a multiple of.
5795 This is used in updating alignment of MEMs in array references. */
5797 static HOST_WIDE_INT
5798 highest_pow2_factor (exp)
5799 tree exp;
5801 HOST_WIDE_INT c0, c1;
5803 switch (TREE_CODE (exp))
5805 case INTEGER_CST:
5806 /* If the integer is expressable in a HOST_WIDE_INT, we can find the
5807 lowest bit that's a one. If the result is zero, return
5808 BIGGEST_ALIGNMENT. We need to handle this case since we can find it
5809 in a COND_EXPR, a MIN_EXPR, or a MAX_EXPR. If the constant overlows,
5810 we have an erroneous program, so return BIGGEST_ALIGNMENT to avoid any
5811 later ICE. */
5812 if (TREE_CONSTANT_OVERFLOW (exp)
5813 || integer_zerop (exp))
5814 return BIGGEST_ALIGNMENT;
5815 else if (host_integerp (exp, 0))
5817 c0 = tree_low_cst (exp, 0);
5818 c0 = c0 < 0 ? - c0 : c0;
5819 return c0 & -c0;
5821 break;
5823 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
5824 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5825 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5826 return MIN (c0, c1);
5828 case MULT_EXPR:
5829 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5830 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5831 return c0 * c1;
5833 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
5834 case CEIL_DIV_EXPR:
5835 if (integer_pow2p (TREE_OPERAND (exp, 1))
5836 && host_integerp (TREE_OPERAND (exp, 1), 1))
5838 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5839 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
5840 return MAX (1, c0 / c1);
5842 break;
5844 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
5845 case SAVE_EXPR: case WITH_RECORD_EXPR:
5846 return highest_pow2_factor (TREE_OPERAND (exp, 0));
5848 case COMPOUND_EXPR:
5849 return highest_pow2_factor (TREE_OPERAND (exp, 1));
5851 case COND_EXPR:
5852 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5853 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
5854 return MIN (c0, c1);
5856 default:
5857 break;
5860 return 1;
5863 /* Return an object on the placeholder list that matches EXP, a
5864 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
5865 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
5866 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
5867 is a location which initially points to a starting location in the
5868 placeholder list (zero means start of the list) and where a pointer into
5869 the placeholder list at which the object is found is placed. */
5871 tree
5872 find_placeholder (exp, plist)
5873 tree exp;
5874 tree *plist;
5876 tree type = TREE_TYPE (exp);
5877 tree placeholder_expr;
5879 for (placeholder_expr
5880 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
5881 placeholder_expr != 0;
5882 placeholder_expr = TREE_CHAIN (placeholder_expr))
5884 tree need_type = TYPE_MAIN_VARIANT (type);
5885 tree elt;
5887 /* Find the outermost reference that is of the type we want. If none,
5888 see if any object has a type that is a pointer to the type we
5889 want. */
5890 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
5891 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
5892 || TREE_CODE (elt) == COND_EXPR)
5893 ? TREE_OPERAND (elt, 1)
5894 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5895 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5896 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5897 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5898 ? TREE_OPERAND (elt, 0) : 0))
5899 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
5901 if (plist)
5902 *plist = placeholder_expr;
5903 return elt;
5906 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
5908 = ((TREE_CODE (elt) == COMPOUND_EXPR
5909 || TREE_CODE (elt) == COND_EXPR)
5910 ? TREE_OPERAND (elt, 1)
5911 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5912 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5913 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5914 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5915 ? TREE_OPERAND (elt, 0) : 0))
5916 if (POINTER_TYPE_P (TREE_TYPE (elt))
5917 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
5918 == need_type))
5920 if (plist)
5921 *plist = placeholder_expr;
5922 return build1 (INDIRECT_REF, need_type, elt);
5926 return 0;
5929 /* expand_expr: generate code for computing expression EXP.
5930 An rtx for the computed value is returned. The value is never null.
5931 In the case of a void EXP, const0_rtx is returned.
5933 The value may be stored in TARGET if TARGET is nonzero.
5934 TARGET is just a suggestion; callers must assume that
5935 the rtx returned may not be the same as TARGET.
5937 If TARGET is CONST0_RTX, it means that the value will be ignored.
5939 If TMODE is not VOIDmode, it suggests generating the
5940 result in mode TMODE. But this is done only when convenient.
5941 Otherwise, TMODE is ignored and the value generated in its natural mode.
5942 TMODE is just a suggestion; callers must assume that
5943 the rtx returned may not have mode TMODE.
5945 Note that TARGET may have neither TMODE nor MODE. In that case, it
5946 probably will not be used.
5948 If MODIFIER is EXPAND_SUM then when EXP is an addition
5949 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5950 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5951 products as above, or REG or MEM, or constant.
5952 Ordinarily in such cases we would output mul or add instructions
5953 and then return a pseudo reg containing the sum.
5955 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5956 it also marks a label as absolutely required (it can't be dead).
5957 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
5958 This is used for outputting expressions used in initializers.
5960 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5961 with a constant address even if that address is not normally legitimate.
5962 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
5965 expand_expr (exp, target, tmode, modifier)
5966 tree exp;
5967 rtx target;
5968 enum machine_mode tmode;
5969 enum expand_modifier modifier;
5971 rtx op0, op1, temp;
5972 tree type = TREE_TYPE (exp);
5973 int unsignedp = TREE_UNSIGNED (type);
5974 enum machine_mode mode;
5975 enum tree_code code = TREE_CODE (exp);
5976 optab this_optab;
5977 rtx subtarget, original_target;
5978 int ignore;
5979 tree context;
5981 /* Handle ERROR_MARK before anybody tries to access its type. */
5982 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
5984 op0 = CONST0_RTX (tmode);
5985 if (op0 != 0)
5986 return op0;
5987 return const0_rtx;
5990 mode = TYPE_MODE (type);
5991 /* Use subtarget as the target for operand 0 of a binary operation. */
5992 subtarget = get_subtarget (target);
5993 original_target = target;
5994 ignore = (target == const0_rtx
5995 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
5996 || code == CONVERT_EXPR || code == REFERENCE_EXPR
5997 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
5998 && TREE_CODE (type) == VOID_TYPE));
6000 /* If we are going to ignore this result, we need only do something
6001 if there is a side-effect somewhere in the expression. If there
6002 is, short-circuit the most common cases here. Note that we must
6003 not call expand_expr with anything but const0_rtx in case this
6004 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6006 if (ignore)
6008 if (! TREE_SIDE_EFFECTS (exp))
6009 return const0_rtx;
6011 /* Ensure we reference a volatile object even if value is ignored, but
6012 don't do this if all we are doing is taking its address. */
6013 if (TREE_THIS_VOLATILE (exp)
6014 && TREE_CODE (exp) != FUNCTION_DECL
6015 && mode != VOIDmode && mode != BLKmode
6016 && modifier != EXPAND_CONST_ADDRESS)
6018 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6019 if (GET_CODE (temp) == MEM)
6020 temp = copy_to_reg (temp);
6021 return const0_rtx;
6024 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6025 || code == INDIRECT_REF || code == BUFFER_REF)
6026 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6027 modifier);
6029 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6030 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6032 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6033 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6034 return const0_rtx;
6036 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6037 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6038 /* If the second operand has no side effects, just evaluate
6039 the first. */
6040 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6041 modifier);
6042 else if (code == BIT_FIELD_REF)
6044 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6045 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6046 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6047 return const0_rtx;
6050 target = 0;
6053 #ifdef MAX_INTEGER_COMPUTATION_MODE
6054 /* Only check stuff here if the mode we want is different from the mode
6055 of the expression; if it's the same, check_max_integer_computiation_mode
6056 will handle it. Do we really need to check this stuff at all? */
6058 if (target
6059 && GET_MODE (target) != mode
6060 && TREE_CODE (exp) != INTEGER_CST
6061 && TREE_CODE (exp) != PARM_DECL
6062 && TREE_CODE (exp) != ARRAY_REF
6063 && TREE_CODE (exp) != ARRAY_RANGE_REF
6064 && TREE_CODE (exp) != COMPONENT_REF
6065 && TREE_CODE (exp) != BIT_FIELD_REF
6066 && TREE_CODE (exp) != INDIRECT_REF
6067 && TREE_CODE (exp) != CALL_EXPR
6068 && TREE_CODE (exp) != VAR_DECL
6069 && TREE_CODE (exp) != RTL_EXPR)
6071 enum machine_mode mode = GET_MODE (target);
6073 if (GET_MODE_CLASS (mode) == MODE_INT
6074 && mode > MAX_INTEGER_COMPUTATION_MODE)
6075 internal_error ("unsupported wide integer operation");
6078 if (tmode != mode
6079 && TREE_CODE (exp) != INTEGER_CST
6080 && TREE_CODE (exp) != PARM_DECL
6081 && TREE_CODE (exp) != ARRAY_REF
6082 && TREE_CODE (exp) != ARRAY_RANGE_REF
6083 && TREE_CODE (exp) != COMPONENT_REF
6084 && TREE_CODE (exp) != BIT_FIELD_REF
6085 && TREE_CODE (exp) != INDIRECT_REF
6086 && TREE_CODE (exp) != VAR_DECL
6087 && TREE_CODE (exp) != CALL_EXPR
6088 && TREE_CODE (exp) != RTL_EXPR
6089 && GET_MODE_CLASS (tmode) == MODE_INT
6090 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6091 internal_error ("unsupported wide integer operation");
6093 check_max_integer_computation_mode (exp);
6094 #endif
6096 /* If will do cse, generate all results into pseudo registers
6097 since 1) that allows cse to find more things
6098 and 2) otherwise cse could produce an insn the machine
6099 cannot support. And exception is a CONSTRUCTOR into a multi-word
6100 MEM: that's much more likely to be most efficient into the MEM. */
6102 if (! cse_not_expected && mode != BLKmode && target
6103 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6104 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD))
6105 target = subtarget;
6107 switch (code)
6109 case LABEL_DECL:
6111 tree function = decl_function_context (exp);
6112 /* Handle using a label in a containing function. */
6113 if (function != current_function_decl
6114 && function != inline_function_decl && function != 0)
6116 struct function *p = find_function_data (function);
6117 p->expr->x_forced_labels
6118 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6119 p->expr->x_forced_labels);
6121 else
6123 if (modifier == EXPAND_INITIALIZER)
6124 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6125 label_rtx (exp),
6126 forced_labels);
6129 temp = gen_rtx_MEM (FUNCTION_MODE,
6130 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6131 if (function != current_function_decl
6132 && function != inline_function_decl && function != 0)
6133 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6134 return temp;
6137 case PARM_DECL:
6138 if (DECL_RTL (exp) == 0)
6140 error_with_decl (exp, "prior parameter's size depends on `%s'");
6141 return CONST0_RTX (mode);
6144 /* ... fall through ... */
6146 case VAR_DECL:
6147 /* If a static var's type was incomplete when the decl was written,
6148 but the type is complete now, lay out the decl now. */
6149 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6150 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6152 rtx value = DECL_RTL_IF_SET (exp);
6154 layout_decl (exp, 0);
6156 /* If the RTL was already set, update its mode and memory
6157 attributes. */
6158 if (value != 0)
6160 PUT_MODE (value, DECL_MODE (exp));
6161 SET_DECL_RTL (exp, 0);
6162 set_mem_attributes (value, exp, 1);
6163 SET_DECL_RTL (exp, value);
6167 /* ... fall through ... */
6169 case FUNCTION_DECL:
6170 case RESULT_DECL:
6171 if (DECL_RTL (exp) == 0)
6172 abort ();
6174 /* Ensure variable marked as used even if it doesn't go through
6175 a parser. If it hasn't be used yet, write out an external
6176 definition. */
6177 if (! TREE_USED (exp))
6179 assemble_external (exp);
6180 TREE_USED (exp) = 1;
6183 /* Show we haven't gotten RTL for this yet. */
6184 temp = 0;
6186 /* Handle variables inherited from containing functions. */
6187 context = decl_function_context (exp);
6189 /* We treat inline_function_decl as an alias for the current function
6190 because that is the inline function whose vars, types, etc.
6191 are being merged into the current function.
6192 See expand_inline_function. */
6194 if (context != 0 && context != current_function_decl
6195 && context != inline_function_decl
6196 /* If var is static, we don't need a static chain to access it. */
6197 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6198 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6200 rtx addr;
6202 /* Mark as non-local and addressable. */
6203 DECL_NONLOCAL (exp) = 1;
6204 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6205 abort ();
6206 mark_addressable (exp);
6207 if (GET_CODE (DECL_RTL (exp)) != MEM)
6208 abort ();
6209 addr = XEXP (DECL_RTL (exp), 0);
6210 if (GET_CODE (addr) == MEM)
6211 addr
6212 = replace_equiv_address (addr,
6213 fix_lexical_addr (XEXP (addr, 0), exp));
6214 else
6215 addr = fix_lexical_addr (addr, exp);
6217 temp = replace_equiv_address (DECL_RTL (exp), addr);
6220 /* This is the case of an array whose size is to be determined
6221 from its initializer, while the initializer is still being parsed.
6222 See expand_decl. */
6224 else if (GET_CODE (DECL_RTL (exp)) == MEM
6225 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6226 temp = validize_mem (DECL_RTL (exp));
6228 /* If DECL_RTL is memory, we are in the normal case and either
6229 the address is not valid or it is not a register and -fforce-addr
6230 is specified, get the address into a register. */
6232 else if (GET_CODE (DECL_RTL (exp)) == MEM
6233 && modifier != EXPAND_CONST_ADDRESS
6234 && modifier != EXPAND_SUM
6235 && modifier != EXPAND_INITIALIZER
6236 && (! memory_address_p (DECL_MODE (exp),
6237 XEXP (DECL_RTL (exp), 0))
6238 || (flag_force_addr
6239 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6240 temp = replace_equiv_address (DECL_RTL (exp),
6241 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6243 /* If we got something, return it. But first, set the alignment
6244 if the address is a register. */
6245 if (temp != 0)
6247 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6248 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6250 return temp;
6253 /* If the mode of DECL_RTL does not match that of the decl, it
6254 must be a promoted value. We return a SUBREG of the wanted mode,
6255 but mark it so that we know that it was already extended. */
6257 if (GET_CODE (DECL_RTL (exp)) == REG
6258 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6260 /* Get the signedness used for this variable. Ensure we get the
6261 same mode we got when the variable was declared. */
6262 if (GET_MODE (DECL_RTL (exp))
6263 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
6264 abort ();
6266 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6267 SUBREG_PROMOTED_VAR_P (temp) = 1;
6268 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6269 return temp;
6272 return DECL_RTL (exp);
6274 case INTEGER_CST:
6275 return immed_double_const (TREE_INT_CST_LOW (exp),
6276 TREE_INT_CST_HIGH (exp), mode);
6278 case CONST_DECL:
6279 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
6281 case REAL_CST:
6282 /* If optimized, generate immediate CONST_DOUBLE
6283 which will be turned into memory by reload if necessary.
6285 We used to force a register so that loop.c could see it. But
6286 this does not allow gen_* patterns to perform optimizations with
6287 the constants. It also produces two insns in cases like "x = 1.0;".
6288 On most machines, floating-point constants are not permitted in
6289 many insns, so we'd end up copying it to a register in any case.
6291 Now, we do the copying in expand_binop, if appropriate. */
6292 return immed_real_const (exp);
6294 case COMPLEX_CST:
6295 case STRING_CST:
6296 if (! TREE_CST_RTL (exp))
6297 output_constant_def (exp, 1);
6299 /* TREE_CST_RTL probably contains a constant address.
6300 On RISC machines where a constant address isn't valid,
6301 make some insns to get that address into a register. */
6302 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6303 && modifier != EXPAND_CONST_ADDRESS
6304 && modifier != EXPAND_INITIALIZER
6305 && modifier != EXPAND_SUM
6306 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6307 || (flag_force_addr
6308 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6309 return replace_equiv_address (TREE_CST_RTL (exp),
6310 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6311 return TREE_CST_RTL (exp);
6313 case EXPR_WITH_FILE_LOCATION:
6315 rtx to_return;
6316 const char *saved_input_filename = input_filename;
6317 int saved_lineno = lineno;
6318 input_filename = EXPR_WFL_FILENAME (exp);
6319 lineno = EXPR_WFL_LINENO (exp);
6320 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6321 emit_line_note (input_filename, lineno);
6322 /* Possibly avoid switching back and forth here. */
6323 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6324 input_filename = saved_input_filename;
6325 lineno = saved_lineno;
6326 return to_return;
6329 case SAVE_EXPR:
6330 context = decl_function_context (exp);
6332 /* If this SAVE_EXPR was at global context, assume we are an
6333 initialization function and move it into our context. */
6334 if (context == 0)
6335 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6337 /* We treat inline_function_decl as an alias for the current function
6338 because that is the inline function whose vars, types, etc.
6339 are being merged into the current function.
6340 See expand_inline_function. */
6341 if (context == current_function_decl || context == inline_function_decl)
6342 context = 0;
6344 /* If this is non-local, handle it. */
6345 if (context)
6347 /* The following call just exists to abort if the context is
6348 not of a containing function. */
6349 find_function_data (context);
6351 temp = SAVE_EXPR_RTL (exp);
6352 if (temp && GET_CODE (temp) == REG)
6354 put_var_into_stack (exp);
6355 temp = SAVE_EXPR_RTL (exp);
6357 if (temp == 0 || GET_CODE (temp) != MEM)
6358 abort ();
6359 return
6360 replace_equiv_address (temp,
6361 fix_lexical_addr (XEXP (temp, 0), exp));
6363 if (SAVE_EXPR_RTL (exp) == 0)
6365 if (mode == VOIDmode)
6366 temp = const0_rtx;
6367 else
6368 temp = assign_temp (build_qualified_type (type,
6369 (TYPE_QUALS (type)
6370 | TYPE_QUAL_CONST)),
6371 3, 0, 0);
6373 SAVE_EXPR_RTL (exp) = temp;
6374 if (!optimize && GET_CODE (temp) == REG)
6375 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6376 save_expr_regs);
6378 /* If the mode of TEMP does not match that of the expression, it
6379 must be a promoted value. We pass store_expr a SUBREG of the
6380 wanted mode but mark it so that we know that it was already
6381 extended. Note that `unsignedp' was modified above in
6382 this case. */
6384 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6386 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6387 SUBREG_PROMOTED_VAR_P (temp) = 1;
6388 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6391 if (temp == const0_rtx)
6392 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6393 else
6394 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6396 TREE_USED (exp) = 1;
6399 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6400 must be a promoted value. We return a SUBREG of the wanted mode,
6401 but mark it so that we know that it was already extended. */
6403 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6404 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6406 /* Compute the signedness and make the proper SUBREG. */
6407 promote_mode (type, mode, &unsignedp, 0);
6408 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6409 SUBREG_PROMOTED_VAR_P (temp) = 1;
6410 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6411 return temp;
6414 return SAVE_EXPR_RTL (exp);
6416 case UNSAVE_EXPR:
6418 rtx temp;
6419 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6420 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6421 return temp;
6424 case PLACEHOLDER_EXPR:
6426 tree old_list = placeholder_list;
6427 tree placeholder_expr = 0;
6429 exp = find_placeholder (exp, &placeholder_expr);
6430 if (exp == 0)
6431 abort ();
6433 placeholder_list = TREE_CHAIN (placeholder_expr);
6434 temp = expand_expr (exp, original_target, tmode, modifier);
6435 placeholder_list = old_list;
6436 return temp;
6439 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6440 abort ();
6442 case WITH_RECORD_EXPR:
6443 /* Put the object on the placeholder list, expand our first operand,
6444 and pop the list. */
6445 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6446 placeholder_list);
6447 target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
6448 modifier);
6449 placeholder_list = TREE_CHAIN (placeholder_list);
6450 return target;
6452 case GOTO_EXPR:
6453 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6454 expand_goto (TREE_OPERAND (exp, 0));
6455 else
6456 expand_computed_goto (TREE_OPERAND (exp, 0));
6457 return const0_rtx;
6459 case EXIT_EXPR:
6460 expand_exit_loop_if_false (NULL,
6461 invert_truthvalue (TREE_OPERAND (exp, 0)));
6462 return const0_rtx;
6464 case LABELED_BLOCK_EXPR:
6465 if (LABELED_BLOCK_BODY (exp))
6466 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
6467 /* Should perhaps use expand_label, but this is simpler and safer. */
6468 do_pending_stack_adjust ();
6469 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6470 return const0_rtx;
6472 case EXIT_BLOCK_EXPR:
6473 if (EXIT_BLOCK_RETURN (exp))
6474 sorry ("returned value in block_exit_expr");
6475 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6476 return const0_rtx;
6478 case LOOP_EXPR:
6479 push_temp_slots ();
6480 expand_start_loop (1);
6481 expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
6482 expand_end_loop ();
6483 pop_temp_slots ();
6485 return const0_rtx;
6487 case BIND_EXPR:
6489 tree vars = TREE_OPERAND (exp, 0);
6490 int vars_need_expansion = 0;
6492 /* Need to open a binding contour here because
6493 if there are any cleanups they must be contained here. */
6494 expand_start_bindings (2);
6496 /* Mark the corresponding BLOCK for output in its proper place. */
6497 if (TREE_OPERAND (exp, 2) != 0
6498 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6499 insert_block (TREE_OPERAND (exp, 2));
6501 /* If VARS have not yet been expanded, expand them now. */
6502 while (vars)
6504 if (!DECL_RTL_SET_P (vars))
6506 vars_need_expansion = 1;
6507 expand_decl (vars);
6509 expand_decl_init (vars);
6510 vars = TREE_CHAIN (vars);
6513 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
6515 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6517 return temp;
6520 case RTL_EXPR:
6521 if (RTL_EXPR_SEQUENCE (exp))
6523 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6524 abort ();
6525 emit_insns (RTL_EXPR_SEQUENCE (exp));
6526 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6528 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6529 free_temps_for_rtl_expr (exp);
6530 return RTL_EXPR_RTL (exp);
6532 case CONSTRUCTOR:
6533 /* If we don't need the result, just ensure we evaluate any
6534 subexpressions. */
6535 if (ignore)
6537 tree elt;
6539 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6540 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6542 return const0_rtx;
6545 /* All elts simple constants => refer to a constant in memory. But
6546 if this is a non-BLKmode mode, let it store a field at a time
6547 since that should make a CONST_INT or CONST_DOUBLE when we
6548 fold. Likewise, if we have a target we can use, it is best to
6549 store directly into the target unless the type is large enough
6550 that memcpy will be used. If we are making an initializer and
6551 all operands are constant, put it in memory as well. */
6552 else if ((TREE_STATIC (exp)
6553 && ((mode == BLKmode
6554 && ! (target != 0 && safe_from_p (target, exp, 1)))
6555 || TREE_ADDRESSABLE (exp)
6556 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6557 && (! MOVE_BY_PIECES_P
6558 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6559 TYPE_ALIGN (type)))
6560 && ! mostly_zeros_p (exp))))
6561 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6563 rtx constructor = output_constant_def (exp, 1);
6565 if (modifier != EXPAND_CONST_ADDRESS
6566 && modifier != EXPAND_INITIALIZER
6567 && modifier != EXPAND_SUM)
6568 constructor = validize_mem (constructor);
6570 return constructor;
6572 else
6574 /* Handle calls that pass values in multiple non-contiguous
6575 locations. The Irix 6 ABI has examples of this. */
6576 if (target == 0 || ! safe_from_p (target, exp, 1)
6577 || GET_CODE (target) == PARALLEL)
6578 target
6579 = assign_temp (build_qualified_type (type,
6580 (TYPE_QUALS (type)
6581 | (TREE_READONLY (exp)
6582 * TYPE_QUAL_CONST))),
6583 0, TREE_ADDRESSABLE (exp), 1);
6585 store_constructor (exp, target, 0,
6586 int_size_in_bytes (TREE_TYPE (exp)));
6587 return target;
6590 case INDIRECT_REF:
6592 tree exp1 = TREE_OPERAND (exp, 0);
6593 tree index;
6594 tree string = string_constant (exp1, &index);
6596 /* Try to optimize reads from const strings. */
6597 if (string
6598 && TREE_CODE (string) == STRING_CST
6599 && TREE_CODE (index) == INTEGER_CST
6600 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6601 && GET_MODE_CLASS (mode) == MODE_INT
6602 && GET_MODE_SIZE (mode) == 1
6603 && modifier != EXPAND_WRITE)
6604 return
6605 GEN_INT (TREE_STRING_POINTER (string)[TREE_INT_CST_LOW (index)]);
6607 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6608 op0 = memory_address (mode, op0);
6609 temp = gen_rtx_MEM (mode, op0);
6610 set_mem_attributes (temp, exp, 0);
6612 /* If we are writing to this object and its type is a record with
6613 readonly fields, we must mark it as readonly so it will
6614 conflict with readonly references to those fields. */
6615 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
6616 RTX_UNCHANGING_P (temp) = 1;
6618 return temp;
6621 case ARRAY_REF:
6622 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6623 abort ();
6626 tree array = TREE_OPERAND (exp, 0);
6627 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6628 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6629 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6630 HOST_WIDE_INT i;
6632 /* Optimize the special-case of a zero lower bound.
6634 We convert the low_bound to sizetype to avoid some problems
6635 with constant folding. (E.g. suppose the lower bound is 1,
6636 and its mode is QI. Without the conversion, (ARRAY
6637 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6638 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6640 if (! integer_zerop (low_bound))
6641 index = size_diffop (index, convert (sizetype, low_bound));
6643 /* Fold an expression like: "foo"[2].
6644 This is not done in fold so it won't happen inside &.
6645 Don't fold if this is for wide characters since it's too
6646 difficult to do correctly and this is a very rare case. */
6648 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6649 && TREE_CODE (array) == STRING_CST
6650 && TREE_CODE (index) == INTEGER_CST
6651 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6652 && GET_MODE_CLASS (mode) == MODE_INT
6653 && GET_MODE_SIZE (mode) == 1)
6654 return
6655 GEN_INT (TREE_STRING_POINTER (array)[TREE_INT_CST_LOW (index)]);
6657 /* If this is a constant index into a constant array,
6658 just get the value from the array. Handle both the cases when
6659 we have an explicit constructor and when our operand is a variable
6660 that was declared const. */
6662 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6663 && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6664 && TREE_CODE (index) == INTEGER_CST
6665 && 0 > compare_tree_int (index,
6666 list_length (CONSTRUCTOR_ELTS
6667 (TREE_OPERAND (exp, 0)))))
6669 tree elem;
6671 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6672 i = TREE_INT_CST_LOW (index);
6673 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6676 if (elem)
6677 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6678 modifier);
6681 else if (optimize >= 1
6682 && modifier != EXPAND_CONST_ADDRESS
6683 && modifier != EXPAND_INITIALIZER
6684 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6685 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6686 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6688 if (TREE_CODE (index) == INTEGER_CST)
6690 tree init = DECL_INITIAL (array);
6692 if (TREE_CODE (init) == CONSTRUCTOR)
6694 tree elem;
6696 for (elem = CONSTRUCTOR_ELTS (init);
6697 (elem
6698 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6699 elem = TREE_CHAIN (elem))
6702 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6703 return expand_expr (fold (TREE_VALUE (elem)), target,
6704 tmode, modifier);
6706 else if (TREE_CODE (init) == STRING_CST
6707 && 0 > compare_tree_int (index,
6708 TREE_STRING_LENGTH (init)))
6710 tree type = TREE_TYPE (TREE_TYPE (init));
6711 enum machine_mode mode = TYPE_MODE (type);
6713 if (GET_MODE_CLASS (mode) == MODE_INT
6714 && GET_MODE_SIZE (mode) == 1)
6715 return (GEN_INT
6716 (TREE_STRING_POINTER
6717 (init)[TREE_INT_CST_LOW (index)]));
6722 /* Fall through. */
6724 case COMPONENT_REF:
6725 case BIT_FIELD_REF:
6726 case ARRAY_RANGE_REF:
6727 /* If the operand is a CONSTRUCTOR, we can just extract the
6728 appropriate field if it is present. Don't do this if we have
6729 already written the data since we want to refer to that copy
6730 and varasm.c assumes that's what we'll do. */
6731 if (code == COMPONENT_REF
6732 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6733 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6735 tree elt;
6737 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6738 elt = TREE_CHAIN (elt))
6739 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6740 /* We can normally use the value of the field in the
6741 CONSTRUCTOR. However, if this is a bitfield in
6742 an integral mode that we can fit in a HOST_WIDE_INT,
6743 we must mask only the number of bits in the bitfield,
6744 since this is done implicitly by the constructor. If
6745 the bitfield does not meet either of those conditions,
6746 we can't do this optimization. */
6747 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6748 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6749 == MODE_INT)
6750 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6751 <= HOST_BITS_PER_WIDE_INT))))
6753 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6754 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6756 HOST_WIDE_INT bitsize
6757 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6759 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6761 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6762 op0 = expand_and (op0, op1, target);
6764 else
6766 enum machine_mode imode
6767 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6768 tree count
6769 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6772 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6773 target, 0);
6774 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6775 target, 0);
6779 return op0;
6784 enum machine_mode mode1;
6785 HOST_WIDE_INT bitsize, bitpos;
6786 tree offset;
6787 int volatilep = 0;
6788 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6789 &mode1, &unsignedp, &volatilep);
6790 rtx orig_op0;
6792 /* If we got back the original object, something is wrong. Perhaps
6793 we are evaluating an expression too early. In any event, don't
6794 infinitely recurse. */
6795 if (tem == exp)
6796 abort ();
6798 /* If TEM's type is a union of variable size, pass TARGET to the inner
6799 computation, since it will need a temporary and TARGET is known
6800 to have to do. This occurs in unchecked conversion in Ada. */
6802 orig_op0 = op0
6803 = expand_expr (tem,
6804 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6805 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6806 != INTEGER_CST)
6807 ? target : NULL_RTX),
6808 VOIDmode,
6809 (modifier == EXPAND_INITIALIZER
6810 || modifier == EXPAND_CONST_ADDRESS)
6811 ? modifier : EXPAND_NORMAL);
6813 /* If this is a constant, put it into a register if it is a
6814 legitimate constant and OFFSET is 0 and memory if it isn't. */
6815 if (CONSTANT_P (op0))
6817 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6818 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6819 && offset == 0)
6820 op0 = force_reg (mode, op0);
6821 else
6822 op0 = validize_mem (force_const_mem (mode, op0));
6825 if (offset != 0)
6827 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
6829 /* If this object is in a register, put it into memory.
6830 This case can't occur in C, but can in Ada if we have
6831 unchecked conversion of an expression from a scalar type to
6832 an array or record type. */
6833 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6834 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
6836 /* If the operand is a SAVE_EXPR, we can deal with this by
6837 forcing the SAVE_EXPR into memory. */
6838 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
6840 put_var_into_stack (TREE_OPERAND (exp, 0));
6841 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
6843 else
6845 tree nt
6846 = build_qualified_type (TREE_TYPE (tem),
6847 (TYPE_QUALS (TREE_TYPE (tem))
6848 | TYPE_QUAL_CONST));
6849 rtx memloc = assign_temp (nt, 1, 1, 1);
6851 emit_move_insn (memloc, op0);
6852 op0 = memloc;
6856 if (GET_CODE (op0) != MEM)
6857 abort ();
6859 if (GET_MODE (offset_rtx) != ptr_mode)
6860 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6862 #ifdef POINTERS_EXTEND_UNSIGNED
6863 if (GET_MODE (offset_rtx) != Pmode)
6864 offset_rtx = convert_memory_address (Pmode, offset_rtx);
6865 #endif
6867 /* A constant address in OP0 can have VOIDmode, we must not try
6868 to call force_reg for that case. Avoid that case. */
6869 if (GET_CODE (op0) == MEM
6870 && GET_MODE (op0) == BLKmode
6871 && GET_MODE (XEXP (op0, 0)) != VOIDmode
6872 && bitsize != 0
6873 && (bitpos % bitsize) == 0
6874 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6875 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
6877 rtx temp = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
6879 if (GET_CODE (XEXP (temp, 0)) == REG)
6880 op0 = temp;
6881 else
6882 op0 = (replace_equiv_address
6883 (op0,
6884 force_reg (GET_MODE (XEXP (temp, 0)),
6885 XEXP (temp, 0))));
6886 bitpos = 0;
6889 op0 = offset_address (op0, offset_rtx,
6890 highest_pow2_factor (offset));
6893 /* Don't forget about volatility even if this is a bitfield. */
6894 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6896 if (op0 == orig_op0)
6897 op0 = copy_rtx (op0);
6899 MEM_VOLATILE_P (op0) = 1;
6902 /* In cases where an aligned union has an unaligned object
6903 as a field, we might be extracting a BLKmode value from
6904 an integer-mode (e.g., SImode) object. Handle this case
6905 by doing the extract into an object as wide as the field
6906 (which we know to be the width of a basic mode), then
6907 storing into memory, and changing the mode to BLKmode. */
6908 if (mode1 == VOIDmode
6909 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6910 || (mode1 != BLKmode && ! direct_load[(int) mode1]
6911 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6912 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
6913 && modifier != EXPAND_CONST_ADDRESS
6914 && modifier != EXPAND_INITIALIZER)
6915 /* If the field isn't aligned enough to fetch as a memref,
6916 fetch it as a bit field. */
6917 || (mode1 != BLKmode
6918 && SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))
6919 && ((TYPE_ALIGN (TREE_TYPE (tem))
6920 < GET_MODE_ALIGNMENT (mode))
6921 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
6922 /* If the type and the field are a constant size and the
6923 size of the type isn't the same size as the bitfield,
6924 we must use bitfield operations. */
6925 || (bitsize >= 0
6926 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
6927 == INTEGER_CST)
6928 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
6929 bitsize)))
6931 enum machine_mode ext_mode = mode;
6933 if (ext_mode == BLKmode
6934 && ! (target != 0 && GET_CODE (op0) == MEM
6935 && GET_CODE (target) == MEM
6936 && bitpos % BITS_PER_UNIT == 0))
6937 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6939 if (ext_mode == BLKmode)
6941 /* In this case, BITPOS must start at a byte boundary and
6942 TARGET, if specified, must be a MEM. */
6943 if (GET_CODE (op0) != MEM
6944 || (target != 0 && GET_CODE (target) != MEM)
6945 || bitpos % BITS_PER_UNIT != 0)
6946 abort ();
6948 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
6949 if (target == 0)
6950 target = assign_temp (type, 0, 1, 1);
6952 emit_block_move (target, op0,
6953 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6954 / BITS_PER_UNIT));
6956 return target;
6959 op0 = validize_mem (op0);
6961 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
6962 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
6964 op0 = extract_bit_field (op0, bitsize, bitpos,
6965 unsignedp, target, ext_mode, ext_mode,
6966 int_size_in_bytes (TREE_TYPE (tem)));
6968 /* If the result is a record type and BITSIZE is narrower than
6969 the mode of OP0, an integral mode, and this is a big endian
6970 machine, we must put the field into the high-order bits. */
6971 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
6972 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
6973 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
6974 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
6975 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
6976 - bitsize),
6977 op0, 1);
6979 if (mode == BLKmode)
6981 rtx new = assign_temp (build_qualified_type
6982 (type_for_mode (ext_mode, 0),
6983 TYPE_QUAL_CONST), 0, 1, 1);
6985 emit_move_insn (new, op0);
6986 op0 = copy_rtx (new);
6987 PUT_MODE (op0, BLKmode);
6988 set_mem_attributes (op0, exp, 1);
6991 return op0;
6994 /* If the result is BLKmode, use that to access the object
6995 now as well. */
6996 if (mode == BLKmode)
6997 mode1 = BLKmode;
6999 /* Get a reference to just this component. */
7000 if (modifier == EXPAND_CONST_ADDRESS
7001 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7002 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7003 else
7004 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7006 if (op0 == orig_op0)
7007 op0 = copy_rtx (op0);
7009 set_mem_attributes (op0, exp, 0);
7010 if (GET_CODE (XEXP (op0, 0)) == REG)
7011 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7013 MEM_VOLATILE_P (op0) |= volatilep;
7014 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7015 || modifier == EXPAND_CONST_ADDRESS
7016 || modifier == EXPAND_INITIALIZER)
7017 return op0;
7018 else if (target == 0)
7019 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7021 convert_move (target, op0, unsignedp);
7022 return target;
7025 case VTABLE_REF:
7027 rtx insn, before = get_last_insn (), vtbl_ref;
7029 /* Evaluate the interior expression. */
7030 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7031 tmode, modifier);
7033 /* Get or create an instruction off which to hang a note. */
7034 if (REG_P (subtarget))
7036 target = subtarget;
7037 insn = get_last_insn ();
7038 if (insn == before)
7039 abort ();
7040 if (! INSN_P (insn))
7041 insn = prev_nonnote_insn (insn);
7043 else
7045 target = gen_reg_rtx (GET_MODE (subtarget));
7046 insn = emit_move_insn (target, subtarget);
7049 /* Collect the data for the note. */
7050 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7051 vtbl_ref = plus_constant (vtbl_ref,
7052 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7053 /* Discard the initial CONST that was added. */
7054 vtbl_ref = XEXP (vtbl_ref, 0);
7056 REG_NOTES (insn)
7057 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7059 return target;
7062 /* Intended for a reference to a buffer of a file-object in Pascal.
7063 But it's not certain that a special tree code will really be
7064 necessary for these. INDIRECT_REF might work for them. */
7065 case BUFFER_REF:
7066 abort ();
7068 case IN_EXPR:
7070 /* Pascal set IN expression.
7072 Algorithm:
7073 rlo = set_low - (set_low%bits_per_word);
7074 the_word = set [ (index - rlo)/bits_per_word ];
7075 bit_index = index % bits_per_word;
7076 bitmask = 1 << bit_index;
7077 return !!(the_word & bitmask); */
7079 tree set = TREE_OPERAND (exp, 0);
7080 tree index = TREE_OPERAND (exp, 1);
7081 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7082 tree set_type = TREE_TYPE (set);
7083 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7084 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7085 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7086 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7087 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7088 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7089 rtx setaddr = XEXP (setval, 0);
7090 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7091 rtx rlow;
7092 rtx diff, quo, rem, addr, bit, result;
7094 /* If domain is empty, answer is no. Likewise if index is constant
7095 and out of bounds. */
7096 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7097 && TREE_CODE (set_low_bound) == INTEGER_CST
7098 && tree_int_cst_lt (set_high_bound, set_low_bound))
7099 || (TREE_CODE (index) == INTEGER_CST
7100 && TREE_CODE (set_low_bound) == INTEGER_CST
7101 && tree_int_cst_lt (index, set_low_bound))
7102 || (TREE_CODE (set_high_bound) == INTEGER_CST
7103 && TREE_CODE (index) == INTEGER_CST
7104 && tree_int_cst_lt (set_high_bound, index))))
7105 return const0_rtx;
7107 if (target == 0)
7108 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7110 /* If we get here, we have to generate the code for both cases
7111 (in range and out of range). */
7113 op0 = gen_label_rtx ();
7114 op1 = gen_label_rtx ();
7116 if (! (GET_CODE (index_val) == CONST_INT
7117 && GET_CODE (lo_r) == CONST_INT))
7118 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7119 GET_MODE (index_val), iunsignedp, op1);
7121 if (! (GET_CODE (index_val) == CONST_INT
7122 && GET_CODE (hi_r) == CONST_INT))
7123 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7124 GET_MODE (index_val), iunsignedp, op1);
7126 /* Calculate the element number of bit zero in the first word
7127 of the set. */
7128 if (GET_CODE (lo_r) == CONST_INT)
7129 rlow = GEN_INT (INTVAL (lo_r)
7130 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7131 else
7132 rlow = expand_binop (index_mode, and_optab, lo_r,
7133 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7134 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7136 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7137 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7139 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7140 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7141 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7142 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7144 addr = memory_address (byte_mode,
7145 expand_binop (index_mode, add_optab, diff,
7146 setaddr, NULL_RTX, iunsignedp,
7147 OPTAB_LIB_WIDEN));
7149 /* Extract the bit we want to examine. */
7150 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7151 gen_rtx_MEM (byte_mode, addr),
7152 make_tree (TREE_TYPE (index), rem),
7153 NULL_RTX, 1);
7154 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7155 GET_MODE (target) == byte_mode ? target : 0,
7156 1, OPTAB_LIB_WIDEN);
7158 if (result != target)
7159 convert_move (target, result, 1);
7161 /* Output the code to handle the out-of-range case. */
7162 emit_jump (op0);
7163 emit_label (op1);
7164 emit_move_insn (target, const0_rtx);
7165 emit_label (op0);
7166 return target;
7169 case WITH_CLEANUP_EXPR:
7170 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7172 WITH_CLEANUP_EXPR_RTL (exp)
7173 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7174 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 1));
7176 /* That's it for this cleanup. */
7177 TREE_OPERAND (exp, 1) = 0;
7179 return WITH_CLEANUP_EXPR_RTL (exp);
7181 case CLEANUP_POINT_EXPR:
7183 /* Start a new binding layer that will keep track of all cleanup
7184 actions to be performed. */
7185 expand_start_bindings (2);
7187 target_temp_slot_level = temp_slot_level;
7189 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7190 /* If we're going to use this value, load it up now. */
7191 if (! ignore)
7192 op0 = force_not_mem (op0);
7193 preserve_temp_slots (op0);
7194 expand_end_bindings (NULL_TREE, 0, 0);
7196 return op0;
7198 case CALL_EXPR:
7199 /* Check for a built-in function. */
7200 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7201 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7202 == FUNCTION_DECL)
7203 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7205 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7206 == BUILT_IN_FRONTEND)
7207 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7208 else
7209 return expand_builtin (exp, target, subtarget, tmode, ignore);
7212 return expand_call (exp, target, ignore);
7214 case NON_LVALUE_EXPR:
7215 case NOP_EXPR:
7216 case CONVERT_EXPR:
7217 case REFERENCE_EXPR:
7218 if (TREE_OPERAND (exp, 0) == error_mark_node)
7219 return const0_rtx;
7221 if (TREE_CODE (type) == UNION_TYPE)
7223 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7225 /* If both input and output are BLKmode, this conversion isn't doing
7226 anything except possibly changing memory attribute. */
7227 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7229 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7230 modifier);
7232 result = copy_rtx (result);
7233 set_mem_attributes (result, exp, 0);
7234 return result;
7237 if (target == 0)
7238 target = assign_temp (type, 0, 1, 1);
7240 if (GET_CODE (target) == MEM)
7241 /* Store data into beginning of memory target. */
7242 store_expr (TREE_OPERAND (exp, 0),
7243 adjust_address (target, TYPE_MODE (valtype), 0), 0);
7245 else if (GET_CODE (target) == REG)
7246 /* Store this field into a union of the proper type. */
7247 store_field (target,
7248 MIN ((int_size_in_bytes (TREE_TYPE
7249 (TREE_OPERAND (exp, 0)))
7250 * BITS_PER_UNIT),
7251 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7252 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7253 VOIDmode, 0, type, 0);
7254 else
7255 abort ();
7257 /* Return the entire union. */
7258 return target;
7261 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7263 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7264 modifier);
7266 /* If the signedness of the conversion differs and OP0 is
7267 a promoted SUBREG, clear that indication since we now
7268 have to do the proper extension. */
7269 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7270 && GET_CODE (op0) == SUBREG)
7271 SUBREG_PROMOTED_VAR_P (op0) = 0;
7273 return op0;
7276 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
7277 if (GET_MODE (op0) == mode)
7278 return op0;
7280 /* If OP0 is a constant, just convert it into the proper mode. */
7281 if (CONSTANT_P (op0))
7282 return
7283 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7284 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7286 if (modifier == EXPAND_INITIALIZER)
7287 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7289 if (target == 0)
7290 return
7291 convert_to_mode (mode, op0,
7292 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7293 else
7294 convert_move (target, op0,
7295 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7296 return target;
7298 case VIEW_CONVERT_EXPR:
7299 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7301 /* If the input and output modes are both the same, we are done.
7302 Otherwise, if neither mode is BLKmode and both are within a word, we
7303 can use gen_lowpart. If neither is true, make sure the operand is
7304 in memory and convert the MEM to the new mode. */
7305 if (TYPE_MODE (type) == GET_MODE (op0))
7307 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7308 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7309 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7310 op0 = gen_lowpart (TYPE_MODE (type), op0);
7311 else if (GET_CODE (op0) != MEM)
7313 /* If the operand is not a MEM, force it into memory. Since we
7314 are going to be be changing the mode of the MEM, don't call
7315 force_const_mem for constants because we don't allow pool
7316 constants to change mode. */
7317 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7319 if (TREE_ADDRESSABLE (exp))
7320 abort ();
7322 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7323 target
7324 = assign_stack_temp_for_type
7325 (TYPE_MODE (inner_type),
7326 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7328 emit_move_insn (target, op0);
7329 op0 = target;
7332 /* At this point, OP0 is in the correct mode. If the output type is such
7333 that the operand is known to be aligned, indicate that it is.
7334 Otherwise, we need only be concerned about alignment for non-BLKmode
7335 results. */
7336 if (GET_CODE (op0) == MEM)
7338 op0 = copy_rtx (op0);
7340 if (TYPE_ALIGN_OK (type))
7341 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7342 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7343 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7345 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7346 HOST_WIDE_INT temp_size
7347 = MAX (int_size_in_bytes (inner_type),
7348 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7349 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7350 temp_size, 0, type);
7351 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7353 if (TREE_ADDRESSABLE (exp))
7354 abort ();
7356 if (GET_MODE (op0) == BLKmode)
7357 emit_block_move (new_with_op0_mode, op0,
7358 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))));
7359 else
7360 emit_move_insn (new_with_op0_mode, op0);
7362 op0 = new;
7365 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7368 return op0;
7370 case PLUS_EXPR:
7371 /* We come here from MINUS_EXPR when the second operand is a
7372 constant. */
7373 plus_expr:
7374 this_optab = ! unsignedp && flag_trapv
7375 && (GET_MODE_CLASS (mode) == MODE_INT)
7376 ? addv_optab : add_optab;
7378 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7379 something else, make sure we add the register to the constant and
7380 then to the other thing. This case can occur during strength
7381 reduction and doing it this way will produce better code if the
7382 frame pointer or argument pointer is eliminated.
7384 fold-const.c will ensure that the constant is always in the inner
7385 PLUS_EXPR, so the only case we need to do anything about is if
7386 sp, ap, or fp is our second argument, in which case we must swap
7387 the innermost first argument and our second argument. */
7389 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7390 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7391 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7392 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7393 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7394 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7396 tree t = TREE_OPERAND (exp, 1);
7398 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7399 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7402 /* If the result is to be ptr_mode and we are adding an integer to
7403 something, we might be forming a constant. So try to use
7404 plus_constant. If it produces a sum and we can't accept it,
7405 use force_operand. This allows P = &ARR[const] to generate
7406 efficient code on machines where a SYMBOL_REF is not a valid
7407 address.
7409 If this is an EXPAND_SUM call, always return the sum. */
7410 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7411 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7413 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7414 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7415 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7417 rtx constant_part;
7419 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7420 EXPAND_SUM);
7421 /* Use immed_double_const to ensure that the constant is
7422 truncated according to the mode of OP1, then sign extended
7423 to a HOST_WIDE_INT. Using the constant directly can result
7424 in non-canonical RTL in a 64x32 cross compile. */
7425 constant_part
7426 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7427 (HOST_WIDE_INT) 0,
7428 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7429 op1 = plus_constant (op1, INTVAL (constant_part));
7430 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7431 op1 = force_operand (op1, target);
7432 return op1;
7435 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7436 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7437 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7439 rtx constant_part;
7441 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7442 EXPAND_SUM);
7443 if (! CONSTANT_P (op0))
7445 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7446 VOIDmode, modifier);
7447 /* Don't go to both_summands if modifier
7448 says it's not right to return a PLUS. */
7449 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7450 goto binop2;
7451 goto both_summands;
7453 /* Use immed_double_const to ensure that the constant is
7454 truncated according to the mode of OP1, then sign extended
7455 to a HOST_WIDE_INT. Using the constant directly can result
7456 in non-canonical RTL in a 64x32 cross compile. */
7457 constant_part
7458 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7459 (HOST_WIDE_INT) 0,
7460 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7461 op0 = plus_constant (op0, INTVAL (constant_part));
7462 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7463 op0 = force_operand (op0, target);
7464 return op0;
7468 /* No sense saving up arithmetic to be done
7469 if it's all in the wrong mode to form part of an address.
7470 And force_operand won't know whether to sign-extend or
7471 zero-extend. */
7472 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7473 || mode != ptr_mode)
7474 goto binop;
7476 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7477 subtarget = 0;
7479 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
7480 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
7482 both_summands:
7483 /* Make sure any term that's a sum with a constant comes last. */
7484 if (GET_CODE (op0) == PLUS
7485 && CONSTANT_P (XEXP (op0, 1)))
7487 temp = op0;
7488 op0 = op1;
7489 op1 = temp;
7491 /* If adding to a sum including a constant,
7492 associate it to put the constant outside. */
7493 if (GET_CODE (op1) == PLUS
7494 && CONSTANT_P (XEXP (op1, 1)))
7496 rtx constant_term = const0_rtx;
7498 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7499 if (temp != 0)
7500 op0 = temp;
7501 /* Ensure that MULT comes first if there is one. */
7502 else if (GET_CODE (op0) == MULT)
7503 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7504 else
7505 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7507 /* Let's also eliminate constants from op0 if possible. */
7508 op0 = eliminate_constant_term (op0, &constant_term);
7510 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7511 their sum should be a constant. Form it into OP1, since the
7512 result we want will then be OP0 + OP1. */
7514 temp = simplify_binary_operation (PLUS, mode, constant_term,
7515 XEXP (op1, 1));
7516 if (temp != 0)
7517 op1 = temp;
7518 else
7519 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7522 /* Put a constant term last and put a multiplication first. */
7523 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7524 temp = op1, op1 = op0, op0 = temp;
7526 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7527 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7529 case MINUS_EXPR:
7530 /* For initializers, we are allowed to return a MINUS of two
7531 symbolic constants. Here we handle all cases when both operands
7532 are constant. */
7533 /* Handle difference of two symbolic constants,
7534 for the sake of an initializer. */
7535 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7536 && really_constant_p (TREE_OPERAND (exp, 0))
7537 && really_constant_p (TREE_OPERAND (exp, 1)))
7539 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
7540 modifier);
7541 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
7542 modifier);
7544 /* If the last operand is a CONST_INT, use plus_constant of
7545 the negated constant. Else make the MINUS. */
7546 if (GET_CODE (op1) == CONST_INT)
7547 return plus_constant (op0, - INTVAL (op1));
7548 else
7549 return gen_rtx_MINUS (mode, op0, op1);
7551 /* Convert A - const to A + (-const). */
7552 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7554 tree negated = fold (build1 (NEGATE_EXPR, type,
7555 TREE_OPERAND (exp, 1)));
7557 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7558 /* If we can't negate the constant in TYPE, leave it alone and
7559 expand_binop will negate it for us. We used to try to do it
7560 here in the signed version of TYPE, but that doesn't work
7561 on POINTER_TYPEs. */;
7562 else
7564 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7565 goto plus_expr;
7568 this_optab = ! unsignedp && flag_trapv
7569 && (GET_MODE_CLASS(mode) == MODE_INT)
7570 ? subv_optab : sub_optab;
7571 goto binop;
7573 case MULT_EXPR:
7574 /* If first operand is constant, swap them.
7575 Thus the following special case checks need only
7576 check the second operand. */
7577 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7579 tree t1 = TREE_OPERAND (exp, 0);
7580 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7581 TREE_OPERAND (exp, 1) = t1;
7584 /* Attempt to return something suitable for generating an
7585 indexed address, for machines that support that. */
7587 if (modifier == EXPAND_SUM && mode == ptr_mode
7588 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7589 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7591 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7592 EXPAND_SUM);
7594 /* Apply distributive law if OP0 is x+c. */
7595 if (GET_CODE (op0) == PLUS
7596 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7597 return
7598 gen_rtx_PLUS
7599 (mode,
7600 gen_rtx_MULT
7601 (mode, XEXP (op0, 0),
7602 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7603 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7604 * INTVAL (XEXP (op0, 1))));
7606 if (GET_CODE (op0) != REG)
7607 op0 = force_operand (op0, NULL_RTX);
7608 if (GET_CODE (op0) != REG)
7609 op0 = copy_to_mode_reg (mode, op0);
7611 return
7612 gen_rtx_MULT (mode, op0,
7613 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7616 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7617 subtarget = 0;
7619 /* Check for multiplying things that have been extended
7620 from a narrower type. If this machine supports multiplying
7621 in that narrower type with a result in the desired type,
7622 do it that way, and avoid the explicit type-conversion. */
7623 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7624 && TREE_CODE (type) == INTEGER_TYPE
7625 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7626 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7627 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7628 && int_fits_type_p (TREE_OPERAND (exp, 1),
7629 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7630 /* Don't use a widening multiply if a shift will do. */
7631 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7632 > HOST_BITS_PER_WIDE_INT)
7633 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7635 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7636 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7638 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7639 /* If both operands are extended, they must either both
7640 be zero-extended or both be sign-extended. */
7641 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7643 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7645 enum machine_mode innermode
7646 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7647 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7648 ? smul_widen_optab : umul_widen_optab);
7649 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7650 ? umul_widen_optab : smul_widen_optab);
7651 if (mode == GET_MODE_WIDER_MODE (innermode))
7653 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7655 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7656 NULL_RTX, VOIDmode, 0);
7657 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7658 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7659 VOIDmode, 0);
7660 else
7661 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7662 NULL_RTX, VOIDmode, 0);
7663 goto binop2;
7665 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7666 && innermode == word_mode)
7668 rtx htem;
7669 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7670 NULL_RTX, VOIDmode, 0);
7671 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7672 op1 = convert_modes (innermode, mode,
7673 expand_expr (TREE_OPERAND (exp, 1),
7674 NULL_RTX, VOIDmode, 0),
7675 unsignedp);
7676 else
7677 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7678 NULL_RTX, VOIDmode, 0);
7679 temp = expand_binop (mode, other_optab, op0, op1, target,
7680 unsignedp, OPTAB_LIB_WIDEN);
7681 htem = expand_mult_highpart_adjust (innermode,
7682 gen_highpart (innermode, temp),
7683 op0, op1,
7684 gen_highpart (innermode, temp),
7685 unsignedp);
7686 emit_move_insn (gen_highpart (innermode, temp), htem);
7687 return temp;
7691 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7692 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7693 return expand_mult (mode, op0, op1, target, unsignedp);
7695 case TRUNC_DIV_EXPR:
7696 case FLOOR_DIV_EXPR:
7697 case CEIL_DIV_EXPR:
7698 case ROUND_DIV_EXPR:
7699 case EXACT_DIV_EXPR:
7700 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7701 subtarget = 0;
7702 /* Possible optimization: compute the dividend with EXPAND_SUM
7703 then if the divisor is constant can optimize the case
7704 where some terms of the dividend have coeffs divisible by it. */
7705 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7706 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7707 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7709 case RDIV_EXPR:
7710 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7711 expensive divide. If not, combine will rebuild the original
7712 computation. */
7713 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7714 && !real_onep (TREE_OPERAND (exp, 0)))
7715 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7716 build (RDIV_EXPR, type,
7717 build_real (type, dconst1),
7718 TREE_OPERAND (exp, 1))),
7719 target, tmode, unsignedp);
7720 this_optab = sdiv_optab;
7721 goto binop;
7723 case TRUNC_MOD_EXPR:
7724 case FLOOR_MOD_EXPR:
7725 case CEIL_MOD_EXPR:
7726 case ROUND_MOD_EXPR:
7727 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7728 subtarget = 0;
7729 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7730 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7731 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7733 case FIX_ROUND_EXPR:
7734 case FIX_FLOOR_EXPR:
7735 case FIX_CEIL_EXPR:
7736 abort (); /* Not used for C. */
7738 case FIX_TRUNC_EXPR:
7739 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7740 if (target == 0)
7741 target = gen_reg_rtx (mode);
7742 expand_fix (target, op0, unsignedp);
7743 return target;
7745 case FLOAT_EXPR:
7746 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7747 if (target == 0)
7748 target = gen_reg_rtx (mode);
7749 /* expand_float can't figure out what to do if FROM has VOIDmode.
7750 So give it the correct mode. With -O, cse will optimize this. */
7751 if (GET_MODE (op0) == VOIDmode)
7752 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7753 op0);
7754 expand_float (target, op0,
7755 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7756 return target;
7758 case NEGATE_EXPR:
7759 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7760 temp = expand_unop (mode,
7761 ! unsignedp && flag_trapv
7762 && (GET_MODE_CLASS(mode) == MODE_INT)
7763 ? negv_optab : neg_optab, op0, target, 0);
7764 if (temp == 0)
7765 abort ();
7766 return temp;
7768 case ABS_EXPR:
7769 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7771 /* Handle complex values specially. */
7772 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7773 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7774 return expand_complex_abs (mode, op0, target, unsignedp);
7776 /* Unsigned abs is simply the operand. Testing here means we don't
7777 risk generating incorrect code below. */
7778 if (TREE_UNSIGNED (type))
7779 return op0;
7781 return expand_abs (mode, op0, target, unsignedp,
7782 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7784 case MAX_EXPR:
7785 case MIN_EXPR:
7786 target = original_target;
7787 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7788 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7789 || GET_MODE (target) != mode
7790 || (GET_CODE (target) == REG
7791 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7792 target = gen_reg_rtx (mode);
7793 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7794 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7796 /* First try to do it with a special MIN or MAX instruction.
7797 If that does not win, use a conditional jump to select the proper
7798 value. */
7799 this_optab = (TREE_UNSIGNED (type)
7800 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7801 : (code == MIN_EXPR ? smin_optab : smax_optab));
7803 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7804 OPTAB_WIDEN);
7805 if (temp != 0)
7806 return temp;
7808 /* At this point, a MEM target is no longer useful; we will get better
7809 code without it. */
7811 if (GET_CODE (target) == MEM)
7812 target = gen_reg_rtx (mode);
7814 if (target != op0)
7815 emit_move_insn (target, op0);
7817 op0 = gen_label_rtx ();
7819 /* If this mode is an integer too wide to compare properly,
7820 compare word by word. Rely on cse to optimize constant cases. */
7821 if (GET_MODE_CLASS (mode) == MODE_INT
7822 && ! can_compare_p (GE, mode, ccp_jump))
7824 if (code == MAX_EXPR)
7825 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7826 target, op1, NULL_RTX, op0);
7827 else
7828 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7829 op1, target, NULL_RTX, op0);
7831 else
7833 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7834 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7835 unsignedp, mode, NULL_RTX, NULL_RTX,
7836 op0);
7838 emit_move_insn (target, op1);
7839 emit_label (op0);
7840 return target;
7842 case BIT_NOT_EXPR:
7843 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7844 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7845 if (temp == 0)
7846 abort ();
7847 return temp;
7849 case FFS_EXPR:
7850 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7851 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7852 if (temp == 0)
7853 abort ();
7854 return temp;
7856 /* ??? Can optimize bitwise operations with one arg constant.
7857 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7858 and (a bitwise1 b) bitwise2 b (etc)
7859 but that is probably not worth while. */
7861 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7862 boolean values when we want in all cases to compute both of them. In
7863 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7864 as actual zero-or-1 values and then bitwise anding. In cases where
7865 there cannot be any side effects, better code would be made by
7866 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7867 how to recognize those cases. */
7869 case TRUTH_AND_EXPR:
7870 case BIT_AND_EXPR:
7871 this_optab = and_optab;
7872 goto binop;
7874 case TRUTH_OR_EXPR:
7875 case BIT_IOR_EXPR:
7876 this_optab = ior_optab;
7877 goto binop;
7879 case TRUTH_XOR_EXPR:
7880 case BIT_XOR_EXPR:
7881 this_optab = xor_optab;
7882 goto binop;
7884 case LSHIFT_EXPR:
7885 case RSHIFT_EXPR:
7886 case LROTATE_EXPR:
7887 case RROTATE_EXPR:
7888 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7889 subtarget = 0;
7890 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7891 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7892 unsignedp);
7894 /* Could determine the answer when only additive constants differ. Also,
7895 the addition of one can be handled by changing the condition. */
7896 case LT_EXPR:
7897 case LE_EXPR:
7898 case GT_EXPR:
7899 case GE_EXPR:
7900 case EQ_EXPR:
7901 case NE_EXPR:
7902 case UNORDERED_EXPR:
7903 case ORDERED_EXPR:
7904 case UNLT_EXPR:
7905 case UNLE_EXPR:
7906 case UNGT_EXPR:
7907 case UNGE_EXPR:
7908 case UNEQ_EXPR:
7909 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7910 if (temp != 0)
7911 return temp;
7913 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7914 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7915 && original_target
7916 && GET_CODE (original_target) == REG
7917 && (GET_MODE (original_target)
7918 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7920 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7921 VOIDmode, 0);
7923 if (temp != original_target)
7924 temp = copy_to_reg (temp);
7926 op1 = gen_label_rtx ();
7927 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7928 GET_MODE (temp), unsignedp, op1);
7929 emit_move_insn (temp, const1_rtx);
7930 emit_label (op1);
7931 return temp;
7934 /* If no set-flag instruction, must generate a conditional
7935 store into a temporary variable. Drop through
7936 and handle this like && and ||. */
7938 case TRUTH_ANDIF_EXPR:
7939 case TRUTH_ORIF_EXPR:
7940 if (! ignore
7941 && (target == 0 || ! safe_from_p (target, exp, 1)
7942 /* Make sure we don't have a hard reg (such as function's return
7943 value) live across basic blocks, if not optimizing. */
7944 || (!optimize && GET_CODE (target) == REG
7945 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7946 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7948 if (target)
7949 emit_clr_insn (target);
7951 op1 = gen_label_rtx ();
7952 jumpifnot (exp, op1);
7954 if (target)
7955 emit_0_to_1_insn (target);
7957 emit_label (op1);
7958 return ignore ? const0_rtx : target;
7960 case TRUTH_NOT_EXPR:
7961 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7962 /* The parser is careful to generate TRUTH_NOT_EXPR
7963 only with operands that are always zero or one. */
7964 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7965 target, 1, OPTAB_LIB_WIDEN);
7966 if (temp == 0)
7967 abort ();
7968 return temp;
7970 case COMPOUND_EXPR:
7971 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7972 emit_queue ();
7973 return expand_expr (TREE_OPERAND (exp, 1),
7974 (ignore ? const0_rtx : target),
7975 VOIDmode, 0);
7977 case COND_EXPR:
7978 /* If we would have a "singleton" (see below) were it not for a
7979 conversion in each arm, bring that conversion back out. */
7980 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7981 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
7982 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
7983 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
7985 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
7986 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
7988 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
7989 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
7990 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
7991 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
7992 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
7993 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
7994 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
7995 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
7996 return expand_expr (build1 (NOP_EXPR, type,
7997 build (COND_EXPR, TREE_TYPE (iftrue),
7998 TREE_OPERAND (exp, 0),
7999 iftrue, iffalse)),
8000 target, tmode, modifier);
8004 /* Note that COND_EXPRs whose type is a structure or union
8005 are required to be constructed to contain assignments of
8006 a temporary variable, so that we can evaluate them here
8007 for side effect only. If type is void, we must do likewise. */
8009 /* If an arm of the branch requires a cleanup,
8010 only that cleanup is performed. */
8012 tree singleton = 0;
8013 tree binary_op = 0, unary_op = 0;
8015 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8016 convert it to our mode, if necessary. */
8017 if (integer_onep (TREE_OPERAND (exp, 1))
8018 && integer_zerop (TREE_OPERAND (exp, 2))
8019 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8021 if (ignore)
8023 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8024 modifier);
8025 return const0_rtx;
8028 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8029 if (GET_MODE (op0) == mode)
8030 return op0;
8032 if (target == 0)
8033 target = gen_reg_rtx (mode);
8034 convert_move (target, op0, unsignedp);
8035 return target;
8038 /* Check for X ? A + B : A. If we have this, we can copy A to the
8039 output and conditionally add B. Similarly for unary operations.
8040 Don't do this if X has side-effects because those side effects
8041 might affect A or B and the "?" operation is a sequence point in
8042 ANSI. (operand_equal_p tests for side effects.) */
8044 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8045 && operand_equal_p (TREE_OPERAND (exp, 2),
8046 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8047 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8048 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8049 && operand_equal_p (TREE_OPERAND (exp, 1),
8050 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8051 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8052 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8053 && operand_equal_p (TREE_OPERAND (exp, 2),
8054 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8055 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8056 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8057 && operand_equal_p (TREE_OPERAND (exp, 1),
8058 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8059 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8061 /* If we are not to produce a result, we have no target. Otherwise,
8062 if a target was specified use it; it will not be used as an
8063 intermediate target unless it is safe. If no target, use a
8064 temporary. */
8066 if (ignore)
8067 temp = 0;
8068 else if (original_target
8069 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8070 || (singleton && GET_CODE (original_target) == REG
8071 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8072 && original_target == var_rtx (singleton)))
8073 && GET_MODE (original_target) == mode
8074 #ifdef HAVE_conditional_move
8075 && (! can_conditionally_move_p (mode)
8076 || GET_CODE (original_target) == REG
8077 || TREE_ADDRESSABLE (type))
8078 #endif
8079 && (GET_CODE (original_target) != MEM
8080 || TREE_ADDRESSABLE (type)))
8081 temp = original_target;
8082 else if (TREE_ADDRESSABLE (type))
8083 abort ();
8084 else
8085 temp = assign_temp (type, 0, 0, 1);
8087 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8088 do the test of X as a store-flag operation, do this as
8089 A + ((X != 0) << log C). Similarly for other simple binary
8090 operators. Only do for C == 1 if BRANCH_COST is low. */
8091 if (temp && singleton && binary_op
8092 && (TREE_CODE (binary_op) == PLUS_EXPR
8093 || TREE_CODE (binary_op) == MINUS_EXPR
8094 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8095 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8096 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8097 : integer_onep (TREE_OPERAND (binary_op, 1)))
8098 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8100 rtx result;
8101 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8102 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8103 ? addv_optab : add_optab)
8104 : TREE_CODE (binary_op) == MINUS_EXPR
8105 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8106 ? subv_optab : sub_optab)
8107 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8108 : xor_optab);
8110 /* If we had X ? A : A + 1, do this as A + (X == 0).
8112 We have to invert the truth value here and then put it
8113 back later if do_store_flag fails. We cannot simply copy
8114 TREE_OPERAND (exp, 0) to another variable and modify that
8115 because invert_truthvalue can modify the tree pointed to
8116 by its argument. */
8117 if (singleton == TREE_OPERAND (exp, 1))
8118 TREE_OPERAND (exp, 0)
8119 = invert_truthvalue (TREE_OPERAND (exp, 0));
8121 result = do_store_flag (TREE_OPERAND (exp, 0),
8122 (safe_from_p (temp, singleton, 1)
8123 ? temp : NULL_RTX),
8124 mode, BRANCH_COST <= 1);
8126 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8127 result = expand_shift (LSHIFT_EXPR, mode, result,
8128 build_int_2 (tree_log2
8129 (TREE_OPERAND
8130 (binary_op, 1)),
8132 (safe_from_p (temp, singleton, 1)
8133 ? temp : NULL_RTX), 0);
8135 if (result)
8137 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8138 return expand_binop (mode, boptab, op1, result, temp,
8139 unsignedp, OPTAB_LIB_WIDEN);
8141 else if (singleton == TREE_OPERAND (exp, 1))
8142 TREE_OPERAND (exp, 0)
8143 = invert_truthvalue (TREE_OPERAND (exp, 0));
8146 do_pending_stack_adjust ();
8147 NO_DEFER_POP;
8148 op0 = gen_label_rtx ();
8150 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8152 if (temp != 0)
8154 /* If the target conflicts with the other operand of the
8155 binary op, we can't use it. Also, we can't use the target
8156 if it is a hard register, because evaluating the condition
8157 might clobber it. */
8158 if ((binary_op
8159 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8160 || (GET_CODE (temp) == REG
8161 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8162 temp = gen_reg_rtx (mode);
8163 store_expr (singleton, temp, 0);
8165 else
8166 expand_expr (singleton,
8167 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8168 if (singleton == TREE_OPERAND (exp, 1))
8169 jumpif (TREE_OPERAND (exp, 0), op0);
8170 else
8171 jumpifnot (TREE_OPERAND (exp, 0), op0);
8173 start_cleanup_deferral ();
8174 if (binary_op && temp == 0)
8175 /* Just touch the other operand. */
8176 expand_expr (TREE_OPERAND (binary_op, 1),
8177 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8178 else if (binary_op)
8179 store_expr (build (TREE_CODE (binary_op), type,
8180 make_tree (type, temp),
8181 TREE_OPERAND (binary_op, 1)),
8182 temp, 0);
8183 else
8184 store_expr (build1 (TREE_CODE (unary_op), type,
8185 make_tree (type, temp)),
8186 temp, 0);
8187 op1 = op0;
8189 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8190 comparison operator. If we have one of these cases, set the
8191 output to A, branch on A (cse will merge these two references),
8192 then set the output to FOO. */
8193 else if (temp
8194 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8195 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8196 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8197 TREE_OPERAND (exp, 1), 0)
8198 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8199 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8200 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8202 if (GET_CODE (temp) == REG
8203 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8204 temp = gen_reg_rtx (mode);
8205 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8206 jumpif (TREE_OPERAND (exp, 0), op0);
8208 start_cleanup_deferral ();
8209 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8210 op1 = op0;
8212 else if (temp
8213 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8214 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8215 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8216 TREE_OPERAND (exp, 2), 0)
8217 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8218 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8219 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8221 if (GET_CODE (temp) == REG
8222 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8223 temp = gen_reg_rtx (mode);
8224 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8225 jumpifnot (TREE_OPERAND (exp, 0), op0);
8227 start_cleanup_deferral ();
8228 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8229 op1 = op0;
8231 else
8233 op1 = gen_label_rtx ();
8234 jumpifnot (TREE_OPERAND (exp, 0), op0);
8236 start_cleanup_deferral ();
8238 /* One branch of the cond can be void, if it never returns. For
8239 example A ? throw : E */
8240 if (temp != 0
8241 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8242 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8243 else
8244 expand_expr (TREE_OPERAND (exp, 1),
8245 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8246 end_cleanup_deferral ();
8247 emit_queue ();
8248 emit_jump_insn (gen_jump (op1));
8249 emit_barrier ();
8250 emit_label (op0);
8251 start_cleanup_deferral ();
8252 if (temp != 0
8253 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8254 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8255 else
8256 expand_expr (TREE_OPERAND (exp, 2),
8257 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8260 end_cleanup_deferral ();
8262 emit_queue ();
8263 emit_label (op1);
8264 OK_DEFER_POP;
8266 return temp;
8269 case TARGET_EXPR:
8271 /* Something needs to be initialized, but we didn't know
8272 where that thing was when building the tree. For example,
8273 it could be the return value of a function, or a parameter
8274 to a function which lays down in the stack, or a temporary
8275 variable which must be passed by reference.
8277 We guarantee that the expression will either be constructed
8278 or copied into our original target. */
8280 tree slot = TREE_OPERAND (exp, 0);
8281 tree cleanups = NULL_TREE;
8282 tree exp1;
8284 if (TREE_CODE (slot) != VAR_DECL)
8285 abort ();
8287 if (! ignore)
8288 target = original_target;
8290 /* Set this here so that if we get a target that refers to a
8291 register variable that's already been used, put_reg_into_stack
8292 knows that it should fix up those uses. */
8293 TREE_USED (slot) = 1;
8295 if (target == 0)
8297 if (DECL_RTL_SET_P (slot))
8299 target = DECL_RTL (slot);
8300 /* If we have already expanded the slot, so don't do
8301 it again. (mrs) */
8302 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8303 return target;
8305 else
8307 target = assign_temp (type, 2, 0, 1);
8308 /* All temp slots at this level must not conflict. */
8309 preserve_temp_slots (target);
8310 SET_DECL_RTL (slot, target);
8311 if (TREE_ADDRESSABLE (slot))
8312 put_var_into_stack (slot);
8314 /* Since SLOT is not known to the called function
8315 to belong to its stack frame, we must build an explicit
8316 cleanup. This case occurs when we must build up a reference
8317 to pass the reference as an argument. In this case,
8318 it is very likely that such a reference need not be
8319 built here. */
8321 if (TREE_OPERAND (exp, 2) == 0)
8322 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
8323 cleanups = TREE_OPERAND (exp, 2);
8326 else
8328 /* This case does occur, when expanding a parameter which
8329 needs to be constructed on the stack. The target
8330 is the actual stack address that we want to initialize.
8331 The function we call will perform the cleanup in this case. */
8333 /* If we have already assigned it space, use that space,
8334 not target that we were passed in, as our target
8335 parameter is only a hint. */
8336 if (DECL_RTL_SET_P (slot))
8338 target = DECL_RTL (slot);
8339 /* If we have already expanded the slot, so don't do
8340 it again. (mrs) */
8341 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8342 return target;
8344 else
8346 SET_DECL_RTL (slot, target);
8347 /* If we must have an addressable slot, then make sure that
8348 the RTL that we just stored in slot is OK. */
8349 if (TREE_ADDRESSABLE (slot))
8350 put_var_into_stack (slot);
8354 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8355 /* Mark it as expanded. */
8356 TREE_OPERAND (exp, 1) = NULL_TREE;
8358 store_expr (exp1, target, 0);
8360 expand_decl_cleanup (NULL_TREE, cleanups);
8362 return target;
8365 case INIT_EXPR:
8367 tree lhs = TREE_OPERAND (exp, 0);
8368 tree rhs = TREE_OPERAND (exp, 1);
8370 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8371 return temp;
8374 case MODIFY_EXPR:
8376 /* If lhs is complex, expand calls in rhs before computing it.
8377 That's so we don't compute a pointer and save it over a
8378 call. If lhs is simple, compute it first so we can give it
8379 as a target if the rhs is just a call. This avoids an
8380 extra temp and copy and that prevents a partial-subsumption
8381 which makes bad code. Actually we could treat
8382 component_ref's of vars like vars. */
8384 tree lhs = TREE_OPERAND (exp, 0);
8385 tree rhs = TREE_OPERAND (exp, 1);
8387 temp = 0;
8389 /* Check for |= or &= of a bitfield of size one into another bitfield
8390 of size 1. In this case, (unless we need the result of the
8391 assignment) we can do this more efficiently with a
8392 test followed by an assignment, if necessary.
8394 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8395 things change so we do, this code should be enhanced to
8396 support it. */
8397 if (ignore
8398 && TREE_CODE (lhs) == COMPONENT_REF
8399 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8400 || TREE_CODE (rhs) == BIT_AND_EXPR)
8401 && TREE_OPERAND (rhs, 0) == lhs
8402 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8403 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8404 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8406 rtx label = gen_label_rtx ();
8408 do_jump (TREE_OPERAND (rhs, 1),
8409 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8410 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8411 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8412 (TREE_CODE (rhs) == BIT_IOR_EXPR
8413 ? integer_one_node
8414 : integer_zero_node)),
8415 0, 0);
8416 do_pending_stack_adjust ();
8417 emit_label (label);
8418 return const0_rtx;
8421 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8423 return temp;
8426 case RETURN_EXPR:
8427 if (!TREE_OPERAND (exp, 0))
8428 expand_null_return ();
8429 else
8430 expand_return (TREE_OPERAND (exp, 0));
8431 return const0_rtx;
8433 case PREINCREMENT_EXPR:
8434 case PREDECREMENT_EXPR:
8435 return expand_increment (exp, 0, ignore);
8437 case POSTINCREMENT_EXPR:
8438 case POSTDECREMENT_EXPR:
8439 /* Faster to treat as pre-increment if result is not used. */
8440 return expand_increment (exp, ! ignore, ignore);
8442 case ADDR_EXPR:
8443 /* Are we taking the address of a nested function? */
8444 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8445 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8446 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8447 && ! TREE_STATIC (exp))
8449 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8450 op0 = force_operand (op0, target);
8452 /* If we are taking the address of something erroneous, just
8453 return a zero. */
8454 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8455 return const0_rtx;
8456 /* If we are taking the address of a constant and are at the
8457 top level, we have to use output_constant_def since we can't
8458 call force_const_mem at top level. */
8459 else if (cfun == 0
8460 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8461 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
8462 == 'c')))
8463 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
8464 else
8466 /* We make sure to pass const0_rtx down if we came in with
8467 ignore set, to avoid doing the cleanups twice for something. */
8468 op0 = expand_expr (TREE_OPERAND (exp, 0),
8469 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8470 (modifier == EXPAND_INITIALIZER
8471 ? modifier : EXPAND_CONST_ADDRESS));
8473 /* If we are going to ignore the result, OP0 will have been set
8474 to const0_rtx, so just return it. Don't get confused and
8475 think we are taking the address of the constant. */
8476 if (ignore)
8477 return op0;
8479 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8480 clever and returns a REG when given a MEM. */
8481 op0 = protect_from_queue (op0, 1);
8483 /* We would like the object in memory. If it is a constant, we can
8484 have it be statically allocated into memory. For a non-constant,
8485 we need to allocate some memory and store the value into it. */
8487 if (CONSTANT_P (op0))
8488 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8489 op0);
8490 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8491 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8492 || GET_CODE (op0) == PARALLEL)
8494 /* If the operand is a SAVE_EXPR, we can deal with this by
8495 forcing the SAVE_EXPR into memory. */
8496 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
8498 put_var_into_stack (TREE_OPERAND (exp, 0));
8499 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
8501 else
8503 /* If this object is in a register, it can't be BLKmode. */
8504 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8505 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8507 if (GET_CODE (op0) == PARALLEL)
8508 /* Handle calls that pass values in multiple
8509 non-contiguous locations. The Irix 6 ABI has examples
8510 of this. */
8511 emit_group_store (memloc, op0,
8512 int_size_in_bytes (inner_type));
8513 else
8514 emit_move_insn (memloc, op0);
8516 op0 = memloc;
8520 if (GET_CODE (op0) != MEM)
8521 abort ();
8523 mark_temp_addr_taken (op0);
8524 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8526 op0 = XEXP (op0, 0);
8527 #ifdef POINTERS_EXTEND_UNSIGNED
8528 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8529 && mode == ptr_mode)
8530 op0 = convert_memory_address (ptr_mode, op0);
8531 #endif
8532 return op0;
8535 /* If OP0 is not aligned as least as much as the type requires, we
8536 need to make a temporary, copy OP0 to it, and take the address of
8537 the temporary. We want to use the alignment of the type, not of
8538 the operand. Note that this is incorrect for FUNCTION_TYPE, but
8539 the test for BLKmode means that can't happen. The test for
8540 BLKmode is because we never make mis-aligned MEMs with
8541 non-BLKmode.
8543 We don't need to do this at all if the machine doesn't have
8544 strict alignment. */
8545 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
8546 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
8547 > MEM_ALIGN (op0))
8548 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
8550 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8551 rtx new
8552 = assign_stack_temp_for_type
8553 (TYPE_MODE (inner_type),
8554 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
8555 : int_size_in_bytes (inner_type),
8556 1, build_qualified_type (inner_type,
8557 (TYPE_QUALS (inner_type)
8558 | TYPE_QUAL_CONST)));
8560 if (TYPE_ALIGN_OK (inner_type))
8561 abort ();
8563 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)));
8564 op0 = new;
8567 op0 = force_operand (XEXP (op0, 0), target);
8570 if (flag_force_addr
8571 && GET_CODE (op0) != REG
8572 && modifier != EXPAND_CONST_ADDRESS
8573 && modifier != EXPAND_INITIALIZER
8574 && modifier != EXPAND_SUM)
8575 op0 = force_reg (Pmode, op0);
8577 if (GET_CODE (op0) == REG
8578 && ! REG_USERVAR_P (op0))
8579 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8581 #ifdef POINTERS_EXTEND_UNSIGNED
8582 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8583 && mode == ptr_mode)
8584 op0 = convert_memory_address (ptr_mode, op0);
8585 #endif
8587 return op0;
8589 case ENTRY_VALUE_EXPR:
8590 abort ();
8592 /* COMPLEX type for Extended Pascal & Fortran */
8593 case COMPLEX_EXPR:
8595 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8596 rtx insns;
8598 /* Get the rtx code of the operands. */
8599 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8600 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8602 if (! target)
8603 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8605 start_sequence ();
8607 /* Move the real (op0) and imaginary (op1) parts to their location. */
8608 emit_move_insn (gen_realpart (mode, target), op0);
8609 emit_move_insn (gen_imagpart (mode, target), op1);
8611 insns = get_insns ();
8612 end_sequence ();
8614 /* Complex construction should appear as a single unit. */
8615 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8616 each with a separate pseudo as destination.
8617 It's not correct for flow to treat them as a unit. */
8618 if (GET_CODE (target) != CONCAT)
8619 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8620 else
8621 emit_insns (insns);
8623 return target;
8626 case REALPART_EXPR:
8627 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8628 return gen_realpart (mode, op0);
8630 case IMAGPART_EXPR:
8631 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8632 return gen_imagpart (mode, op0);
8634 case CONJ_EXPR:
8636 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8637 rtx imag_t;
8638 rtx insns;
8640 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8642 if (! target)
8643 target = gen_reg_rtx (mode);
8645 start_sequence ();
8647 /* Store the realpart and the negated imagpart to target. */
8648 emit_move_insn (gen_realpart (partmode, target),
8649 gen_realpart (partmode, op0));
8651 imag_t = gen_imagpart (partmode, target);
8652 temp = expand_unop (partmode,
8653 ! unsignedp && flag_trapv
8654 && (GET_MODE_CLASS(partmode) == MODE_INT)
8655 ? negv_optab : neg_optab,
8656 gen_imagpart (partmode, op0), imag_t, 0);
8657 if (temp != imag_t)
8658 emit_move_insn (imag_t, temp);
8660 insns = get_insns ();
8661 end_sequence ();
8663 /* Conjugate should appear as a single unit
8664 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8665 each with a separate pseudo as destination.
8666 It's not correct for flow to treat them as a unit. */
8667 if (GET_CODE (target) != CONCAT)
8668 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8669 else
8670 emit_insns (insns);
8672 return target;
8675 case TRY_CATCH_EXPR:
8677 tree handler = TREE_OPERAND (exp, 1);
8679 expand_eh_region_start ();
8681 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8683 expand_eh_region_end_cleanup (handler);
8685 return op0;
8688 case TRY_FINALLY_EXPR:
8690 tree try_block = TREE_OPERAND (exp, 0);
8691 tree finally_block = TREE_OPERAND (exp, 1);
8692 rtx finally_label = gen_label_rtx ();
8693 rtx done_label = gen_label_rtx ();
8694 rtx return_link = gen_reg_rtx (Pmode);
8695 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8696 (tree) finally_label, (tree) return_link);
8697 TREE_SIDE_EFFECTS (cleanup) = 1;
8699 /* Start a new binding layer that will keep track of all cleanup
8700 actions to be performed. */
8701 expand_start_bindings (2);
8703 target_temp_slot_level = temp_slot_level;
8705 expand_decl_cleanup (NULL_TREE, cleanup);
8706 op0 = expand_expr (try_block, target, tmode, modifier);
8708 preserve_temp_slots (op0);
8709 expand_end_bindings (NULL_TREE, 0, 0);
8710 emit_jump (done_label);
8711 emit_label (finally_label);
8712 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8713 emit_indirect_jump (return_link);
8714 emit_label (done_label);
8715 return op0;
8718 case GOTO_SUBROUTINE_EXPR:
8720 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8721 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8722 rtx return_address = gen_label_rtx ();
8723 emit_move_insn (return_link,
8724 gen_rtx_LABEL_REF (Pmode, return_address));
8725 emit_jump (subr);
8726 emit_label (return_address);
8727 return const0_rtx;
8730 case VA_ARG_EXPR:
8731 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8733 case EXC_PTR_EXPR:
8734 return get_exception_pointer (cfun);
8736 case FDESC_EXPR:
8737 /* Function descriptors are not valid except for as
8738 initialization constants, and should not be expanded. */
8739 abort ();
8741 default:
8742 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8745 /* Here to do an ordinary binary operator, generating an instruction
8746 from the optab already placed in `this_optab'. */
8747 binop:
8748 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8749 subtarget = 0;
8750 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8751 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8752 binop2:
8753 temp = expand_binop (mode, this_optab, op0, op1, target,
8754 unsignedp, OPTAB_LIB_WIDEN);
8755 if (temp == 0)
8756 abort ();
8757 return temp;
8760 /* Return the tree node if a ARG corresponds to a string constant or zero
8761 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
8762 in bytes within the string that ARG is accessing. The type of the
8763 offset will be `sizetype'. */
8765 tree
8766 string_constant (arg, ptr_offset)
8767 tree arg;
8768 tree *ptr_offset;
8770 STRIP_NOPS (arg);
8772 if (TREE_CODE (arg) == ADDR_EXPR
8773 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8775 *ptr_offset = size_zero_node;
8776 return TREE_OPERAND (arg, 0);
8778 else if (TREE_CODE (arg) == PLUS_EXPR)
8780 tree arg0 = TREE_OPERAND (arg, 0);
8781 tree arg1 = TREE_OPERAND (arg, 1);
8783 STRIP_NOPS (arg0);
8784 STRIP_NOPS (arg1);
8786 if (TREE_CODE (arg0) == ADDR_EXPR
8787 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
8789 *ptr_offset = convert (sizetype, arg1);
8790 return TREE_OPERAND (arg0, 0);
8792 else if (TREE_CODE (arg1) == ADDR_EXPR
8793 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
8795 *ptr_offset = convert (sizetype, arg0);
8796 return TREE_OPERAND (arg1, 0);
8800 return 0;
8803 /* Expand code for a post- or pre- increment or decrement
8804 and return the RTX for the result.
8805 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
8807 static rtx
8808 expand_increment (exp, post, ignore)
8809 tree exp;
8810 int post, ignore;
8812 rtx op0, op1;
8813 rtx temp, value;
8814 tree incremented = TREE_OPERAND (exp, 0);
8815 optab this_optab = add_optab;
8816 int icode;
8817 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
8818 int op0_is_copy = 0;
8819 int single_insn = 0;
8820 /* 1 means we can't store into OP0 directly,
8821 because it is a subreg narrower than a word,
8822 and we don't dare clobber the rest of the word. */
8823 int bad_subreg = 0;
8825 /* Stabilize any component ref that might need to be
8826 evaluated more than once below. */
8827 if (!post
8828 || TREE_CODE (incremented) == BIT_FIELD_REF
8829 || (TREE_CODE (incremented) == COMPONENT_REF
8830 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
8831 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
8832 incremented = stabilize_reference (incremented);
8833 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
8834 ones into save exprs so that they don't accidentally get evaluated
8835 more than once by the code below. */
8836 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
8837 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
8838 incremented = save_expr (incremented);
8840 /* Compute the operands as RTX.
8841 Note whether OP0 is the actual lvalue or a copy of it:
8842 I believe it is a copy iff it is a register or subreg
8843 and insns were generated in computing it. */
8845 temp = get_last_insn ();
8846 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
8848 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
8849 in place but instead must do sign- or zero-extension during assignment,
8850 so we copy it into a new register and let the code below use it as
8851 a copy.
8853 Note that we can safely modify this SUBREG since it is know not to be
8854 shared (it was made by the expand_expr call above). */
8856 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
8858 if (post)
8859 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
8860 else
8861 bad_subreg = 1;
8863 else if (GET_CODE (op0) == SUBREG
8864 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
8866 /* We cannot increment this SUBREG in place. If we are
8867 post-incrementing, get a copy of the old value. Otherwise,
8868 just mark that we cannot increment in place. */
8869 if (post)
8870 op0 = copy_to_reg (op0);
8871 else
8872 bad_subreg = 1;
8875 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
8876 && temp != get_last_insn ());
8877 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8879 /* Decide whether incrementing or decrementing. */
8880 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
8881 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8882 this_optab = sub_optab;
8884 /* Convert decrement by a constant into a negative increment. */
8885 if (this_optab == sub_optab
8886 && GET_CODE (op1) == CONST_INT)
8888 op1 = GEN_INT (-INTVAL (op1));
8889 this_optab = add_optab;
8892 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
8893 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
8895 /* For a preincrement, see if we can do this with a single instruction. */
8896 if (!post)
8898 icode = (int) this_optab->handlers[(int) mode].insn_code;
8899 if (icode != (int) CODE_FOR_nothing
8900 /* Make sure that OP0 is valid for operands 0 and 1
8901 of the insn we want to queue. */
8902 && (*insn_data[icode].operand[0].predicate) (op0, mode)
8903 && (*insn_data[icode].operand[1].predicate) (op0, mode)
8904 && (*insn_data[icode].operand[2].predicate) (op1, mode))
8905 single_insn = 1;
8908 /* If OP0 is not the actual lvalue, but rather a copy in a register,
8909 then we cannot just increment OP0. We must therefore contrive to
8910 increment the original value. Then, for postincrement, we can return
8911 OP0 since it is a copy of the old value. For preincrement, expand here
8912 unless we can do it with a single insn.
8914 Likewise if storing directly into OP0 would clobber high bits
8915 we need to preserve (bad_subreg). */
8916 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
8918 /* This is the easiest way to increment the value wherever it is.
8919 Problems with multiple evaluation of INCREMENTED are prevented
8920 because either (1) it is a component_ref or preincrement,
8921 in which case it was stabilized above, or (2) it is an array_ref
8922 with constant index in an array in a register, which is
8923 safe to reevaluate. */
8924 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
8925 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8926 ? MINUS_EXPR : PLUS_EXPR),
8927 TREE_TYPE (exp),
8928 incremented,
8929 TREE_OPERAND (exp, 1));
8931 while (TREE_CODE (incremented) == NOP_EXPR
8932 || TREE_CODE (incremented) == CONVERT_EXPR)
8934 newexp = convert (TREE_TYPE (incremented), newexp);
8935 incremented = TREE_OPERAND (incremented, 0);
8938 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
8939 return post ? op0 : temp;
8942 if (post)
8944 /* We have a true reference to the value in OP0.
8945 If there is an insn to add or subtract in this mode, queue it.
8946 Queueing the increment insn avoids the register shuffling
8947 that often results if we must increment now and first save
8948 the old value for subsequent use. */
8950 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
8951 op0 = stabilize (op0);
8952 #endif
8954 icode = (int) this_optab->handlers[(int) mode].insn_code;
8955 if (icode != (int) CODE_FOR_nothing
8956 /* Make sure that OP0 is valid for operands 0 and 1
8957 of the insn we want to queue. */
8958 && (*insn_data[icode].operand[0].predicate) (op0, mode)
8959 && (*insn_data[icode].operand[1].predicate) (op0, mode))
8961 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
8962 op1 = force_reg (mode, op1);
8964 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
8966 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
8968 rtx addr = (general_operand (XEXP (op0, 0), mode)
8969 ? force_reg (Pmode, XEXP (op0, 0))
8970 : copy_to_reg (XEXP (op0, 0)));
8971 rtx temp, result;
8973 op0 = replace_equiv_address (op0, addr);
8974 temp = force_reg (GET_MODE (op0), op0);
8975 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
8976 op1 = force_reg (mode, op1);
8978 /* The increment queue is LIFO, thus we have to `queue'
8979 the instructions in reverse order. */
8980 enqueue_insn (op0, gen_move_insn (op0, temp));
8981 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
8982 return result;
8986 /* Preincrement, or we can't increment with one simple insn. */
8987 if (post)
8988 /* Save a copy of the value before inc or dec, to return it later. */
8989 temp = value = copy_to_reg (op0);
8990 else
8991 /* Arrange to return the incremented value. */
8992 /* Copy the rtx because expand_binop will protect from the queue,
8993 and the results of that would be invalid for us to return
8994 if our caller does emit_queue before using our result. */
8995 temp = copy_rtx (value = op0);
8997 /* Increment however we can. */
8998 op1 = expand_binop (mode, this_optab, value, op1, op0,
8999 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9001 /* Make sure the value is stored into OP0. */
9002 if (op1 != op0)
9003 emit_move_insn (op0, op1);
9005 return temp;
9008 /* At the start of a function, record that we have no previously-pushed
9009 arguments waiting to be popped. */
9011 void
9012 init_pending_stack_adjust ()
9014 pending_stack_adjust = 0;
9017 /* When exiting from function, if safe, clear out any pending stack adjust
9018 so the adjustment won't get done.
9020 Note, if the current function calls alloca, then it must have a
9021 frame pointer regardless of the value of flag_omit_frame_pointer. */
9023 void
9024 clear_pending_stack_adjust ()
9026 #ifdef EXIT_IGNORE_STACK
9027 if (optimize > 0
9028 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9029 && EXIT_IGNORE_STACK
9030 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9031 && ! flag_inline_functions)
9033 stack_pointer_delta -= pending_stack_adjust,
9034 pending_stack_adjust = 0;
9036 #endif
9039 /* Pop any previously-pushed arguments that have not been popped yet. */
9041 void
9042 do_pending_stack_adjust ()
9044 if (inhibit_defer_pop == 0)
9046 if (pending_stack_adjust != 0)
9047 adjust_stack (GEN_INT (pending_stack_adjust));
9048 pending_stack_adjust = 0;
9052 /* Expand conditional expressions. */
9054 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9055 LABEL is an rtx of code CODE_LABEL, in this function and all the
9056 functions here. */
9058 void
9059 jumpifnot (exp, label)
9060 tree exp;
9061 rtx label;
9063 do_jump (exp, label, NULL_RTX);
9066 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9068 void
9069 jumpif (exp, label)
9070 tree exp;
9071 rtx label;
9073 do_jump (exp, NULL_RTX, label);
9076 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9077 the result is zero, or IF_TRUE_LABEL if the result is one.
9078 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9079 meaning fall through in that case.
9081 do_jump always does any pending stack adjust except when it does not
9082 actually perform a jump. An example where there is no jump
9083 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9085 This function is responsible for optimizing cases such as
9086 &&, || and comparison operators in EXP. */
9088 void
9089 do_jump (exp, if_false_label, if_true_label)
9090 tree exp;
9091 rtx if_false_label, if_true_label;
9093 enum tree_code code = TREE_CODE (exp);
9094 /* Some cases need to create a label to jump to
9095 in order to properly fall through.
9096 These cases set DROP_THROUGH_LABEL nonzero. */
9097 rtx drop_through_label = 0;
9098 rtx temp;
9099 int i;
9100 tree type;
9101 enum machine_mode mode;
9103 #ifdef MAX_INTEGER_COMPUTATION_MODE
9104 check_max_integer_computation_mode (exp);
9105 #endif
9107 emit_queue ();
9109 switch (code)
9111 case ERROR_MARK:
9112 break;
9114 case INTEGER_CST:
9115 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9116 if (temp)
9117 emit_jump (temp);
9118 break;
9120 #if 0
9121 /* This is not true with #pragma weak */
9122 case ADDR_EXPR:
9123 /* The address of something can never be zero. */
9124 if (if_true_label)
9125 emit_jump (if_true_label);
9126 break;
9127 #endif
9129 case NOP_EXPR:
9130 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9131 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9132 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
9133 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
9134 goto normal;
9135 case CONVERT_EXPR:
9136 /* If we are narrowing the operand, we have to do the compare in the
9137 narrower mode. */
9138 if ((TYPE_PRECISION (TREE_TYPE (exp))
9139 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9140 goto normal;
9141 case NON_LVALUE_EXPR:
9142 case REFERENCE_EXPR:
9143 case ABS_EXPR:
9144 case NEGATE_EXPR:
9145 case LROTATE_EXPR:
9146 case RROTATE_EXPR:
9147 /* These cannot change zero->non-zero or vice versa. */
9148 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9149 break;
9151 case WITH_RECORD_EXPR:
9152 /* Put the object on the placeholder list, recurse through our first
9153 operand, and pop the list. */
9154 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9155 placeholder_list);
9156 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9157 placeholder_list = TREE_CHAIN (placeholder_list);
9158 break;
9160 #if 0
9161 /* This is never less insns than evaluating the PLUS_EXPR followed by
9162 a test and can be longer if the test is eliminated. */
9163 case PLUS_EXPR:
9164 /* Reduce to minus. */
9165 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9166 TREE_OPERAND (exp, 0),
9167 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9168 TREE_OPERAND (exp, 1))));
9169 /* Process as MINUS. */
9170 #endif
9172 case MINUS_EXPR:
9173 /* Non-zero iff operands of minus differ. */
9174 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9175 TREE_OPERAND (exp, 0),
9176 TREE_OPERAND (exp, 1)),
9177 NE, NE, if_false_label, if_true_label);
9178 break;
9180 case BIT_AND_EXPR:
9181 /* If we are AND'ing with a small constant, do this comparison in the
9182 smallest type that fits. If the machine doesn't have comparisons
9183 that small, it will be converted back to the wider comparison.
9184 This helps if we are testing the sign bit of a narrower object.
9185 combine can't do this for us because it can't know whether a
9186 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9188 if (! SLOW_BYTE_ACCESS
9189 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9190 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9191 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9192 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9193 && (type = type_for_mode (mode, 1)) != 0
9194 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9195 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9196 != CODE_FOR_nothing))
9198 do_jump (convert (type, exp), if_false_label, if_true_label);
9199 break;
9201 goto normal;
9203 case TRUTH_NOT_EXPR:
9204 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9205 break;
9207 case TRUTH_ANDIF_EXPR:
9208 if (if_false_label == 0)
9209 if_false_label = drop_through_label = gen_label_rtx ();
9210 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9211 start_cleanup_deferral ();
9212 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9213 end_cleanup_deferral ();
9214 break;
9216 case TRUTH_ORIF_EXPR:
9217 if (if_true_label == 0)
9218 if_true_label = drop_through_label = gen_label_rtx ();
9219 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9220 start_cleanup_deferral ();
9221 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9222 end_cleanup_deferral ();
9223 break;
9225 case COMPOUND_EXPR:
9226 push_temp_slots ();
9227 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9228 preserve_temp_slots (NULL_RTX);
9229 free_temp_slots ();
9230 pop_temp_slots ();
9231 emit_queue ();
9232 do_pending_stack_adjust ();
9233 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9234 break;
9236 case COMPONENT_REF:
9237 case BIT_FIELD_REF:
9238 case ARRAY_REF:
9239 case ARRAY_RANGE_REF:
9241 HOST_WIDE_INT bitsize, bitpos;
9242 int unsignedp;
9243 enum machine_mode mode;
9244 tree type;
9245 tree offset;
9246 int volatilep = 0;
9248 /* Get description of this reference. We don't actually care
9249 about the underlying object here. */
9250 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9251 &unsignedp, &volatilep);
9253 type = type_for_size (bitsize, unsignedp);
9254 if (! SLOW_BYTE_ACCESS
9255 && type != 0 && bitsize >= 0
9256 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9257 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9258 != CODE_FOR_nothing))
9260 do_jump (convert (type, exp), if_false_label, if_true_label);
9261 break;
9263 goto normal;
9266 case COND_EXPR:
9267 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9268 if (integer_onep (TREE_OPERAND (exp, 1))
9269 && integer_zerop (TREE_OPERAND (exp, 2)))
9270 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9272 else if (integer_zerop (TREE_OPERAND (exp, 1))
9273 && integer_onep (TREE_OPERAND (exp, 2)))
9274 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9276 else
9278 rtx label1 = gen_label_rtx ();
9279 drop_through_label = gen_label_rtx ();
9281 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9283 start_cleanup_deferral ();
9284 /* Now the THEN-expression. */
9285 do_jump (TREE_OPERAND (exp, 1),
9286 if_false_label ? if_false_label : drop_through_label,
9287 if_true_label ? if_true_label : drop_through_label);
9288 /* In case the do_jump just above never jumps. */
9289 do_pending_stack_adjust ();
9290 emit_label (label1);
9292 /* Now the ELSE-expression. */
9293 do_jump (TREE_OPERAND (exp, 2),
9294 if_false_label ? if_false_label : drop_through_label,
9295 if_true_label ? if_true_label : drop_through_label);
9296 end_cleanup_deferral ();
9298 break;
9300 case EQ_EXPR:
9302 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9304 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9305 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9307 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9308 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9309 do_jump
9310 (fold
9311 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9312 fold (build (EQ_EXPR, TREE_TYPE (exp),
9313 fold (build1 (REALPART_EXPR,
9314 TREE_TYPE (inner_type),
9315 exp0)),
9316 fold (build1 (REALPART_EXPR,
9317 TREE_TYPE (inner_type),
9318 exp1)))),
9319 fold (build (EQ_EXPR, TREE_TYPE (exp),
9320 fold (build1 (IMAGPART_EXPR,
9321 TREE_TYPE (inner_type),
9322 exp0)),
9323 fold (build1 (IMAGPART_EXPR,
9324 TREE_TYPE (inner_type),
9325 exp1)))))),
9326 if_false_label, if_true_label);
9329 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9330 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9332 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9333 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9334 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9335 else
9336 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9337 break;
9340 case NE_EXPR:
9342 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9344 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9345 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9347 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9348 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9349 do_jump
9350 (fold
9351 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9352 fold (build (NE_EXPR, TREE_TYPE (exp),
9353 fold (build1 (REALPART_EXPR,
9354 TREE_TYPE (inner_type),
9355 exp0)),
9356 fold (build1 (REALPART_EXPR,
9357 TREE_TYPE (inner_type),
9358 exp1)))),
9359 fold (build (NE_EXPR, TREE_TYPE (exp),
9360 fold (build1 (IMAGPART_EXPR,
9361 TREE_TYPE (inner_type),
9362 exp0)),
9363 fold (build1 (IMAGPART_EXPR,
9364 TREE_TYPE (inner_type),
9365 exp1)))))),
9366 if_false_label, if_true_label);
9369 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9370 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9372 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9373 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9374 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9375 else
9376 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9377 break;
9380 case LT_EXPR:
9381 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9382 if (GET_MODE_CLASS (mode) == MODE_INT
9383 && ! can_compare_p (LT, mode, ccp_jump))
9384 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9385 else
9386 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9387 break;
9389 case LE_EXPR:
9390 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9391 if (GET_MODE_CLASS (mode) == MODE_INT
9392 && ! can_compare_p (LE, mode, ccp_jump))
9393 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9394 else
9395 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9396 break;
9398 case GT_EXPR:
9399 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9400 if (GET_MODE_CLASS (mode) == MODE_INT
9401 && ! can_compare_p (GT, mode, ccp_jump))
9402 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9403 else
9404 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9405 break;
9407 case GE_EXPR:
9408 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9409 if (GET_MODE_CLASS (mode) == MODE_INT
9410 && ! can_compare_p (GE, mode, ccp_jump))
9411 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9412 else
9413 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9414 break;
9416 case UNORDERED_EXPR:
9417 case ORDERED_EXPR:
9419 enum rtx_code cmp, rcmp;
9420 int do_rev;
9422 if (code == UNORDERED_EXPR)
9423 cmp = UNORDERED, rcmp = ORDERED;
9424 else
9425 cmp = ORDERED, rcmp = UNORDERED;
9426 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9428 do_rev = 0;
9429 if (! can_compare_p (cmp, mode, ccp_jump)
9430 && (can_compare_p (rcmp, mode, ccp_jump)
9431 /* If the target doesn't provide either UNORDERED or ORDERED
9432 comparisons, canonicalize on UNORDERED for the library. */
9433 || rcmp == UNORDERED))
9434 do_rev = 1;
9436 if (! do_rev)
9437 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9438 else
9439 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9441 break;
9444 enum rtx_code rcode1;
9445 enum tree_code tcode2;
9447 case UNLT_EXPR:
9448 rcode1 = UNLT;
9449 tcode2 = LT_EXPR;
9450 goto unordered_bcc;
9451 case UNLE_EXPR:
9452 rcode1 = UNLE;
9453 tcode2 = LE_EXPR;
9454 goto unordered_bcc;
9455 case UNGT_EXPR:
9456 rcode1 = UNGT;
9457 tcode2 = GT_EXPR;
9458 goto unordered_bcc;
9459 case UNGE_EXPR:
9460 rcode1 = UNGE;
9461 tcode2 = GE_EXPR;
9462 goto unordered_bcc;
9463 case UNEQ_EXPR:
9464 rcode1 = UNEQ;
9465 tcode2 = EQ_EXPR;
9466 goto unordered_bcc;
9468 unordered_bcc:
9469 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9470 if (can_compare_p (rcode1, mode, ccp_jump))
9471 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9472 if_true_label);
9473 else
9475 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9476 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9477 tree cmp0, cmp1;
9479 /* If the target doesn't support combined unordered
9480 compares, decompose into UNORDERED + comparison. */
9481 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9482 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9483 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9484 do_jump (exp, if_false_label, if_true_label);
9487 break;
9489 /* Special case:
9490 __builtin_expect (<test>, 0) and
9491 __builtin_expect (<test>, 1)
9493 We need to do this here, so that <test> is not converted to a SCC
9494 operation on machines that use condition code registers and COMPARE
9495 like the PowerPC, and then the jump is done based on whether the SCC
9496 operation produced a 1 or 0. */
9497 case CALL_EXPR:
9498 /* Check for a built-in function. */
9499 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
9501 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
9502 tree arglist = TREE_OPERAND (exp, 1);
9504 if (TREE_CODE (fndecl) == FUNCTION_DECL
9505 && DECL_BUILT_IN (fndecl)
9506 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
9507 && arglist != NULL_TREE
9508 && TREE_CHAIN (arglist) != NULL_TREE)
9510 rtx seq = expand_builtin_expect_jump (exp, if_false_label,
9511 if_true_label);
9513 if (seq != NULL_RTX)
9515 emit_insn (seq);
9516 return;
9520 /* fall through and generate the normal code. */
9522 default:
9523 normal:
9524 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9525 #if 0
9526 /* This is not needed any more and causes poor code since it causes
9527 comparisons and tests from non-SI objects to have different code
9528 sequences. */
9529 /* Copy to register to avoid generating bad insns by cse
9530 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9531 if (!cse_not_expected && GET_CODE (temp) == MEM)
9532 temp = copy_to_reg (temp);
9533 #endif
9534 do_pending_stack_adjust ();
9535 /* Do any postincrements in the expression that was tested. */
9536 emit_queue ();
9538 if (GET_CODE (temp) == CONST_INT
9539 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
9540 || GET_CODE (temp) == LABEL_REF)
9542 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9543 if (target)
9544 emit_jump (target);
9546 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9547 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9548 /* Note swapping the labels gives us not-equal. */
9549 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9550 else if (GET_MODE (temp) != VOIDmode)
9551 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9552 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9553 GET_MODE (temp), NULL_RTX,
9554 if_false_label, if_true_label);
9555 else
9556 abort ();
9559 if (drop_through_label)
9561 /* If do_jump produces code that might be jumped around,
9562 do any stack adjusts from that code, before the place
9563 where control merges in. */
9564 do_pending_stack_adjust ();
9565 emit_label (drop_through_label);
9569 /* Given a comparison expression EXP for values too wide to be compared
9570 with one insn, test the comparison and jump to the appropriate label.
9571 The code of EXP is ignored; we always test GT if SWAP is 0,
9572 and LT if SWAP is 1. */
9574 static void
9575 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9576 tree exp;
9577 int swap;
9578 rtx if_false_label, if_true_label;
9580 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9581 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9582 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9583 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9585 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9588 /* Compare OP0 with OP1, word at a time, in mode MODE.
9589 UNSIGNEDP says to do unsigned comparison.
9590 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9592 void
9593 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9594 enum machine_mode mode;
9595 int unsignedp;
9596 rtx op0, op1;
9597 rtx if_false_label, if_true_label;
9599 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9600 rtx drop_through_label = 0;
9601 int i;
9603 if (! if_true_label || ! if_false_label)
9604 drop_through_label = gen_label_rtx ();
9605 if (! if_true_label)
9606 if_true_label = drop_through_label;
9607 if (! if_false_label)
9608 if_false_label = drop_through_label;
9610 /* Compare a word at a time, high order first. */
9611 for (i = 0; i < nwords; i++)
9613 rtx op0_word, op1_word;
9615 if (WORDS_BIG_ENDIAN)
9617 op0_word = operand_subword_force (op0, i, mode);
9618 op1_word = operand_subword_force (op1, i, mode);
9620 else
9622 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9623 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9626 /* All but high-order word must be compared as unsigned. */
9627 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9628 (unsignedp || i > 0), word_mode, NULL_RTX,
9629 NULL_RTX, if_true_label);
9631 /* Consider lower words only if these are equal. */
9632 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9633 NULL_RTX, NULL_RTX, if_false_label);
9636 if (if_false_label)
9637 emit_jump (if_false_label);
9638 if (drop_through_label)
9639 emit_label (drop_through_label);
9642 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9643 with one insn, test the comparison and jump to the appropriate label. */
9645 static void
9646 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9647 tree exp;
9648 rtx if_false_label, if_true_label;
9650 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9651 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9652 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9653 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9654 int i;
9655 rtx drop_through_label = 0;
9657 if (! if_false_label)
9658 drop_through_label = if_false_label = gen_label_rtx ();
9660 for (i = 0; i < nwords; i++)
9661 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
9662 operand_subword_force (op1, i, mode),
9663 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9664 word_mode, NULL_RTX, if_false_label, NULL_RTX);
9666 if (if_true_label)
9667 emit_jump (if_true_label);
9668 if (drop_through_label)
9669 emit_label (drop_through_label);
9672 /* Jump according to whether OP0 is 0.
9673 We assume that OP0 has an integer mode that is too wide
9674 for the available compare insns. */
9676 void
9677 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9678 rtx op0;
9679 rtx if_false_label, if_true_label;
9681 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9682 rtx part;
9683 int i;
9684 rtx drop_through_label = 0;
9686 /* The fastest way of doing this comparison on almost any machine is to
9687 "or" all the words and compare the result. If all have to be loaded
9688 from memory and this is a very wide item, it's possible this may
9689 be slower, but that's highly unlikely. */
9691 part = gen_reg_rtx (word_mode);
9692 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
9693 for (i = 1; i < nwords && part != 0; i++)
9694 part = expand_binop (word_mode, ior_optab, part,
9695 operand_subword_force (op0, i, GET_MODE (op0)),
9696 part, 1, OPTAB_WIDEN);
9698 if (part != 0)
9700 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
9701 NULL_RTX, if_false_label, if_true_label);
9703 return;
9706 /* If we couldn't do the "or" simply, do this with a series of compares. */
9707 if (! if_false_label)
9708 drop_through_label = if_false_label = gen_label_rtx ();
9710 for (i = 0; i < nwords; i++)
9711 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
9712 const0_rtx, EQ, 1, word_mode, NULL_RTX,
9713 if_false_label, NULL_RTX);
9715 if (if_true_label)
9716 emit_jump (if_true_label);
9718 if (drop_through_label)
9719 emit_label (drop_through_label);
9722 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
9723 (including code to compute the values to be compared)
9724 and set (CC0) according to the result.
9725 The decision as to signed or unsigned comparison must be made by the caller.
9727 We force a stack adjustment unless there are currently
9728 things pushed on the stack that aren't yet used.
9730 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9731 compared. */
9734 compare_from_rtx (op0, op1, code, unsignedp, mode, size)
9735 rtx op0, op1;
9736 enum rtx_code code;
9737 int unsignedp;
9738 enum machine_mode mode;
9739 rtx size;
9741 rtx tem;
9743 /* If one operand is constant, make it the second one. Only do this
9744 if the other operand is not constant as well. */
9746 if (swap_commutative_operands_p (op0, op1))
9748 tem = op0;
9749 op0 = op1;
9750 op1 = tem;
9751 code = swap_condition (code);
9754 if (flag_force_mem)
9756 op0 = force_not_mem (op0);
9757 op1 = force_not_mem (op1);
9760 do_pending_stack_adjust ();
9762 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9763 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9764 return tem;
9766 #if 0
9767 /* There's no need to do this now that combine.c can eliminate lots of
9768 sign extensions. This can be less efficient in certain cases on other
9769 machines. */
9771 /* If this is a signed equality comparison, we can do it as an
9772 unsigned comparison since zero-extension is cheaper than sign
9773 extension and comparisons with zero are done as unsigned. This is
9774 the case even on machines that can do fast sign extension, since
9775 zero-extension is easier to combine with other operations than
9776 sign-extension is. If we are comparing against a constant, we must
9777 convert it to what it would look like unsigned. */
9778 if ((code == EQ || code == NE) && ! unsignedp
9779 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9781 if (GET_CODE (op1) == CONST_INT
9782 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9783 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9784 unsignedp = 1;
9786 #endif
9788 emit_cmp_insn (op0, op1, code, size, mode, unsignedp);
9790 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
9793 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
9794 The decision as to signed or unsigned comparison must be made by the caller.
9796 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9797 compared. */
9799 void
9800 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size,
9801 if_false_label, if_true_label)
9802 rtx op0, op1;
9803 enum rtx_code code;
9804 int unsignedp;
9805 enum machine_mode mode;
9806 rtx size;
9807 rtx if_false_label, if_true_label;
9809 rtx tem;
9810 int dummy_true_label = 0;
9812 /* Reverse the comparison if that is safe and we want to jump if it is
9813 false. */
9814 if (! if_true_label && ! FLOAT_MODE_P (mode))
9816 if_true_label = if_false_label;
9817 if_false_label = 0;
9818 code = reverse_condition (code);
9821 /* If one operand is constant, make it the second one. Only do this
9822 if the other operand is not constant as well. */
9824 if (swap_commutative_operands_p (op0, op1))
9826 tem = op0;
9827 op0 = op1;
9828 op1 = tem;
9829 code = swap_condition (code);
9832 if (flag_force_mem)
9834 op0 = force_not_mem (op0);
9835 op1 = force_not_mem (op1);
9838 do_pending_stack_adjust ();
9840 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9841 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9843 if (tem == const_true_rtx)
9845 if (if_true_label)
9846 emit_jump (if_true_label);
9848 else
9850 if (if_false_label)
9851 emit_jump (if_false_label);
9853 return;
9856 #if 0
9857 /* There's no need to do this now that combine.c can eliminate lots of
9858 sign extensions. This can be less efficient in certain cases on other
9859 machines. */
9861 /* If this is a signed equality comparison, we can do it as an
9862 unsigned comparison since zero-extension is cheaper than sign
9863 extension and comparisons with zero are done as unsigned. This is
9864 the case even on machines that can do fast sign extension, since
9865 zero-extension is easier to combine with other operations than
9866 sign-extension is. If we are comparing against a constant, we must
9867 convert it to what it would look like unsigned. */
9868 if ((code == EQ || code == NE) && ! unsignedp
9869 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9871 if (GET_CODE (op1) == CONST_INT
9872 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9873 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9874 unsignedp = 1;
9876 #endif
9878 if (! if_true_label)
9880 dummy_true_label = 1;
9881 if_true_label = gen_label_rtx ();
9884 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp,
9885 if_true_label);
9887 if (if_false_label)
9888 emit_jump (if_false_label);
9889 if (dummy_true_label)
9890 emit_label (if_true_label);
9893 /* Generate code for a comparison expression EXP (including code to compute
9894 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
9895 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
9896 generated code will drop through.
9897 SIGNED_CODE should be the rtx operation for this comparison for
9898 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
9900 We force a stack adjustment unless there are currently
9901 things pushed on the stack that aren't yet used. */
9903 static void
9904 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
9905 if_true_label)
9906 tree exp;
9907 enum rtx_code signed_code, unsigned_code;
9908 rtx if_false_label, if_true_label;
9910 rtx op0, op1;
9911 tree type;
9912 enum machine_mode mode;
9913 int unsignedp;
9914 enum rtx_code code;
9916 /* Don't crash if the comparison was erroneous. */
9917 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9918 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
9919 return;
9921 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9922 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
9923 return;
9925 type = TREE_TYPE (TREE_OPERAND (exp, 0));
9926 mode = TYPE_MODE (type);
9927 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
9928 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
9929 || (GET_MODE_BITSIZE (mode)
9930 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
9931 1)))))))
9933 /* op0 might have been replaced by promoted constant, in which
9934 case the type of second argument should be used. */
9935 type = TREE_TYPE (TREE_OPERAND (exp, 1));
9936 mode = TYPE_MODE (type);
9938 unsignedp = TREE_UNSIGNED (type);
9939 code = unsignedp ? unsigned_code : signed_code;
9941 #ifdef HAVE_canonicalize_funcptr_for_compare
9942 /* If function pointers need to be "canonicalized" before they can
9943 be reliably compared, then canonicalize them. */
9944 if (HAVE_canonicalize_funcptr_for_compare
9945 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9946 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9947 == FUNCTION_TYPE))
9949 rtx new_op0 = gen_reg_rtx (mode);
9951 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
9952 op0 = new_op0;
9955 if (HAVE_canonicalize_funcptr_for_compare
9956 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9957 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9958 == FUNCTION_TYPE))
9960 rtx new_op1 = gen_reg_rtx (mode);
9962 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
9963 op1 = new_op1;
9965 #endif
9967 /* Do any postincrements in the expression that was tested. */
9968 emit_queue ();
9970 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
9971 ((mode == BLKmode)
9972 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
9973 if_false_label, if_true_label);
9976 /* Generate code to calculate EXP using a store-flag instruction
9977 and return an rtx for the result. EXP is either a comparison
9978 or a TRUTH_NOT_EXPR whose operand is a comparison.
9980 If TARGET is nonzero, store the result there if convenient.
9982 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
9983 cheap.
9985 Return zero if there is no suitable set-flag instruction
9986 available on this machine.
9988 Once expand_expr has been called on the arguments of the comparison,
9989 we are committed to doing the store flag, since it is not safe to
9990 re-evaluate the expression. We emit the store-flag insn by calling
9991 emit_store_flag, but only expand the arguments if we have a reason
9992 to believe that emit_store_flag will be successful. If we think that
9993 it will, but it isn't, we have to simulate the store-flag with a
9994 set/jump/set sequence. */
9996 static rtx
9997 do_store_flag (exp, target, mode, only_cheap)
9998 tree exp;
9999 rtx target;
10000 enum machine_mode mode;
10001 int only_cheap;
10003 enum rtx_code code;
10004 tree arg0, arg1, type;
10005 tree tem;
10006 enum machine_mode operand_mode;
10007 int invert = 0;
10008 int unsignedp;
10009 rtx op0, op1;
10010 enum insn_code icode;
10011 rtx subtarget = target;
10012 rtx result, label;
10014 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10015 result at the end. We can't simply invert the test since it would
10016 have already been inverted if it were valid. This case occurs for
10017 some floating-point comparisons. */
10019 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10020 invert = 1, exp = TREE_OPERAND (exp, 0);
10022 arg0 = TREE_OPERAND (exp, 0);
10023 arg1 = TREE_OPERAND (exp, 1);
10025 /* Don't crash if the comparison was erroneous. */
10026 if (arg0 == error_mark_node || arg1 == error_mark_node)
10027 return const0_rtx;
10029 type = TREE_TYPE (arg0);
10030 operand_mode = TYPE_MODE (type);
10031 unsignedp = TREE_UNSIGNED (type);
10033 /* We won't bother with BLKmode store-flag operations because it would mean
10034 passing a lot of information to emit_store_flag. */
10035 if (operand_mode == BLKmode)
10036 return 0;
10038 /* We won't bother with store-flag operations involving function pointers
10039 when function pointers must be canonicalized before comparisons. */
10040 #ifdef HAVE_canonicalize_funcptr_for_compare
10041 if (HAVE_canonicalize_funcptr_for_compare
10042 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10043 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10044 == FUNCTION_TYPE))
10045 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10046 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10047 == FUNCTION_TYPE))))
10048 return 0;
10049 #endif
10051 STRIP_NOPS (arg0);
10052 STRIP_NOPS (arg1);
10054 /* Get the rtx comparison code to use. We know that EXP is a comparison
10055 operation of some type. Some comparisons against 1 and -1 can be
10056 converted to comparisons with zero. Do so here so that the tests
10057 below will be aware that we have a comparison with zero. These
10058 tests will not catch constants in the first operand, but constants
10059 are rarely passed as the first operand. */
10061 switch (TREE_CODE (exp))
10063 case EQ_EXPR:
10064 code = EQ;
10065 break;
10066 case NE_EXPR:
10067 code = NE;
10068 break;
10069 case LT_EXPR:
10070 if (integer_onep (arg1))
10071 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10072 else
10073 code = unsignedp ? LTU : LT;
10074 break;
10075 case LE_EXPR:
10076 if (! unsignedp && integer_all_onesp (arg1))
10077 arg1 = integer_zero_node, code = LT;
10078 else
10079 code = unsignedp ? LEU : LE;
10080 break;
10081 case GT_EXPR:
10082 if (! unsignedp && integer_all_onesp (arg1))
10083 arg1 = integer_zero_node, code = GE;
10084 else
10085 code = unsignedp ? GTU : GT;
10086 break;
10087 case GE_EXPR:
10088 if (integer_onep (arg1))
10089 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10090 else
10091 code = unsignedp ? GEU : GE;
10092 break;
10094 case UNORDERED_EXPR:
10095 code = UNORDERED;
10096 break;
10097 case ORDERED_EXPR:
10098 code = ORDERED;
10099 break;
10100 case UNLT_EXPR:
10101 code = UNLT;
10102 break;
10103 case UNLE_EXPR:
10104 code = UNLE;
10105 break;
10106 case UNGT_EXPR:
10107 code = UNGT;
10108 break;
10109 case UNGE_EXPR:
10110 code = UNGE;
10111 break;
10112 case UNEQ_EXPR:
10113 code = UNEQ;
10114 break;
10116 default:
10117 abort ();
10120 /* Put a constant second. */
10121 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10123 tem = arg0; arg0 = arg1; arg1 = tem;
10124 code = swap_condition (code);
10127 /* If this is an equality or inequality test of a single bit, we can
10128 do this by shifting the bit being tested to the low-order bit and
10129 masking the result with the constant 1. If the condition was EQ,
10130 we xor it with 1. This does not require an scc insn and is faster
10131 than an scc insn even if we have it. */
10133 if ((code == NE || code == EQ)
10134 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10135 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10137 tree inner = TREE_OPERAND (arg0, 0);
10138 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10139 int ops_unsignedp;
10141 /* If INNER is a right shift of a constant and it plus BITNUM does
10142 not overflow, adjust BITNUM and INNER. */
10144 if (TREE_CODE (inner) == RSHIFT_EXPR
10145 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10146 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10147 && bitnum < TYPE_PRECISION (type)
10148 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10149 bitnum - TYPE_PRECISION (type)))
10151 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10152 inner = TREE_OPERAND (inner, 0);
10155 /* If we are going to be able to omit the AND below, we must do our
10156 operations as unsigned. If we must use the AND, we have a choice.
10157 Normally unsigned is faster, but for some machines signed is. */
10158 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10159 #ifdef LOAD_EXTEND_OP
10160 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10161 #else
10163 #endif
10166 if (! get_subtarget (subtarget)
10167 || GET_MODE (subtarget) != operand_mode
10168 || ! safe_from_p (subtarget, inner, 1))
10169 subtarget = 0;
10171 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10173 if (bitnum != 0)
10174 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10175 size_int (bitnum), subtarget, ops_unsignedp);
10177 if (GET_MODE (op0) != mode)
10178 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10180 if ((code == EQ && ! invert) || (code == NE && invert))
10181 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10182 ops_unsignedp, OPTAB_LIB_WIDEN);
10184 /* Put the AND last so it can combine with more things. */
10185 if (bitnum != TYPE_PRECISION (type) - 1)
10186 op0 = expand_and (op0, const1_rtx, subtarget);
10188 return op0;
10191 /* Now see if we are likely to be able to do this. Return if not. */
10192 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10193 return 0;
10195 icode = setcc_gen_code[(int) code];
10196 if (icode == CODE_FOR_nothing
10197 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10199 /* We can only do this if it is one of the special cases that
10200 can be handled without an scc insn. */
10201 if ((code == LT && integer_zerop (arg1))
10202 || (! only_cheap && code == GE && integer_zerop (arg1)))
10204 else if (BRANCH_COST >= 0
10205 && ! only_cheap && (code == NE || code == EQ)
10206 && TREE_CODE (type) != REAL_TYPE
10207 && ((abs_optab->handlers[(int) operand_mode].insn_code
10208 != CODE_FOR_nothing)
10209 || (ffs_optab->handlers[(int) operand_mode].insn_code
10210 != CODE_FOR_nothing)))
10212 else
10213 return 0;
10216 if (! get_subtarget (target)
10217 || GET_MODE (subtarget) != operand_mode
10218 || ! safe_from_p (subtarget, arg1, 1))
10219 subtarget = 0;
10221 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10222 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10224 if (target == 0)
10225 target = gen_reg_rtx (mode);
10227 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10228 because, if the emit_store_flag does anything it will succeed and
10229 OP0 and OP1 will not be used subsequently. */
10231 result = emit_store_flag (target, code,
10232 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10233 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10234 operand_mode, unsignedp, 1);
10236 if (result)
10238 if (invert)
10239 result = expand_binop (mode, xor_optab, result, const1_rtx,
10240 result, 0, OPTAB_LIB_WIDEN);
10241 return result;
10244 /* If this failed, we have to do this with set/compare/jump/set code. */
10245 if (GET_CODE (target) != REG
10246 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10247 target = gen_reg_rtx (GET_MODE (target));
10249 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10250 result = compare_from_rtx (op0, op1, code, unsignedp,
10251 operand_mode, NULL_RTX);
10252 if (GET_CODE (result) == CONST_INT)
10253 return (((result == const0_rtx && ! invert)
10254 || (result != const0_rtx && invert))
10255 ? const0_rtx : const1_rtx);
10257 /* The code of RESULT may not match CODE if compare_from_rtx
10258 decided to swap its operands and reverse the original code.
10260 We know that compare_from_rtx returns either a CONST_INT or
10261 a new comparison code, so it is safe to just extract the
10262 code from RESULT. */
10263 code = GET_CODE (result);
10265 label = gen_label_rtx ();
10266 if (bcc_gen_fctn[(int) code] == 0)
10267 abort ();
10269 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10270 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10271 emit_label (label);
10273 return target;
10277 /* Stubs in case we haven't got a casesi insn. */
10278 #ifndef HAVE_casesi
10279 # define HAVE_casesi 0
10280 # define gen_casesi(a, b, c, d, e) (0)
10281 # define CODE_FOR_casesi CODE_FOR_nothing
10282 #endif
10284 /* If the machine does not have a case insn that compares the bounds,
10285 this means extra overhead for dispatch tables, which raises the
10286 threshold for using them. */
10287 #ifndef CASE_VALUES_THRESHOLD
10288 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10289 #endif /* CASE_VALUES_THRESHOLD */
10291 unsigned int
10292 case_values_threshold ()
10294 return CASE_VALUES_THRESHOLD;
10297 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10298 0 otherwise (i.e. if there is no casesi instruction). */
10300 try_casesi (index_type, index_expr, minval, range,
10301 table_label, default_label)
10302 tree index_type, index_expr, minval, range;
10303 rtx table_label ATTRIBUTE_UNUSED;
10304 rtx default_label;
10306 enum machine_mode index_mode = SImode;
10307 int index_bits = GET_MODE_BITSIZE (index_mode);
10308 rtx op1, op2, index;
10309 enum machine_mode op_mode;
10311 if (! HAVE_casesi)
10312 return 0;
10314 /* Convert the index to SImode. */
10315 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10317 enum machine_mode omode = TYPE_MODE (index_type);
10318 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
10320 /* We must handle the endpoints in the original mode. */
10321 index_expr = build (MINUS_EXPR, index_type,
10322 index_expr, minval);
10323 minval = integer_zero_node;
10324 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10325 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10326 omode, 1, default_label);
10327 /* Now we can safely truncate. */
10328 index = convert_to_mode (index_mode, index, 0);
10330 else
10332 if (TYPE_MODE (index_type) != index_mode)
10334 index_expr = convert (type_for_size (index_bits, 0),
10335 index_expr);
10336 index_type = TREE_TYPE (index_expr);
10339 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10341 emit_queue ();
10342 index = protect_from_queue (index, 0);
10343 do_pending_stack_adjust ();
10345 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10346 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10347 (index, op_mode))
10348 index = copy_to_mode_reg (op_mode, index);
10350 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10352 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10353 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10354 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
10355 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10356 (op1, op_mode))
10357 op1 = copy_to_mode_reg (op_mode, op1);
10359 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10361 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10362 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10363 op2, TREE_UNSIGNED (TREE_TYPE (range)));
10364 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10365 (op2, op_mode))
10366 op2 = copy_to_mode_reg (op_mode, op2);
10368 emit_jump_insn (gen_casesi (index, op1, op2,
10369 table_label, default_label));
10370 return 1;
10373 /* Attempt to generate a tablejump instruction; same concept. */
10374 #ifndef HAVE_tablejump
10375 #define HAVE_tablejump 0
10376 #define gen_tablejump(x, y) (0)
10377 #endif
10379 /* Subroutine of the next function.
10381 INDEX is the value being switched on, with the lowest value
10382 in the table already subtracted.
10383 MODE is its expected mode (needed if INDEX is constant).
10384 RANGE is the length of the jump table.
10385 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10387 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10388 index value is out of range. */
10390 static void
10391 do_tablejump (index, mode, range, table_label, default_label)
10392 rtx index, range, table_label, default_label;
10393 enum machine_mode mode;
10395 rtx temp, vector;
10397 /* Do an unsigned comparison (in the proper mode) between the index
10398 expression and the value which represents the length of the range.
10399 Since we just finished subtracting the lower bound of the range
10400 from the index expression, this comparison allows us to simultaneously
10401 check that the original index expression value is both greater than
10402 or equal to the minimum value of the range and less than or equal to
10403 the maximum value of the range. */
10405 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10406 default_label);
10408 /* If index is in range, it must fit in Pmode.
10409 Convert to Pmode so we can index with it. */
10410 if (mode != Pmode)
10411 index = convert_to_mode (Pmode, index, 1);
10413 /* Don't let a MEM slip thru, because then INDEX that comes
10414 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10415 and break_out_memory_refs will go to work on it and mess it up. */
10416 #ifdef PIC_CASE_VECTOR_ADDRESS
10417 if (flag_pic && GET_CODE (index) != REG)
10418 index = copy_to_mode_reg (Pmode, index);
10419 #endif
10421 /* If flag_force_addr were to affect this address
10422 it could interfere with the tricky assumptions made
10423 about addresses that contain label-refs,
10424 which may be valid only very near the tablejump itself. */
10425 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10426 GET_MODE_SIZE, because this indicates how large insns are. The other
10427 uses should all be Pmode, because they are addresses. This code
10428 could fail if addresses and insns are not the same size. */
10429 index = gen_rtx_PLUS (Pmode,
10430 gen_rtx_MULT (Pmode, index,
10431 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10432 gen_rtx_LABEL_REF (Pmode, table_label));
10433 #ifdef PIC_CASE_VECTOR_ADDRESS
10434 if (flag_pic)
10435 index = PIC_CASE_VECTOR_ADDRESS (index);
10436 else
10437 #endif
10438 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10439 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10440 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10441 RTX_UNCHANGING_P (vector) = 1;
10442 convert_move (temp, vector, 0);
10444 emit_jump_insn (gen_tablejump (temp, table_label));
10446 /* If we are generating PIC code or if the table is PC-relative, the
10447 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10448 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10449 emit_barrier ();
10453 try_tablejump (index_type, index_expr, minval, range,
10454 table_label, default_label)
10455 tree index_type, index_expr, minval, range;
10456 rtx table_label, default_label;
10458 rtx index;
10460 if (! HAVE_tablejump)
10461 return 0;
10463 index_expr = fold (build (MINUS_EXPR, index_type,
10464 convert (index_type, index_expr),
10465 convert (index_type, minval)));
10466 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10467 emit_queue ();
10468 index = protect_from_queue (index, 0);
10469 do_pending_stack_adjust ();
10471 do_tablejump (index, TYPE_MODE (index_type),
10472 convert_modes (TYPE_MODE (index_type),
10473 TYPE_MODE (TREE_TYPE (range)),
10474 expand_expr (range, NULL_RTX,
10475 VOIDmode, 0),
10476 TREE_UNSIGNED (TREE_TYPE (range))),
10477 table_label, default_label);
10478 return 1;