* decl.c (add_decl_to_level): Remove TREE_PERMANENT assertion.
[official-gcc.git] / gcc / expr.c
blobc4d0f143fe823ace687e800b8888097df26295c2
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 92-98, 1999 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "obstack.h"
28 #include "flags.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "except.h"
32 #include "function.h"
33 #include "insn-flags.h"
34 #include "insn-codes.h"
35 #include "insn-config.h"
36 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
37 #include "expr.h"
38 #include "recog.h"
39 #include "output.h"
40 #include "typeclass.h"
41 #include "defaults.h"
42 #include "toplev.h"
43 #include "ggc.h"
45 #define CEIL(x,y) (((x) + (y) - 1) / (y))
47 /* Decide whether a function's arguments should be processed
48 from first to last or from last to first.
50 They should if the stack and args grow in opposite directions, but
51 only if we have push insns. */
53 #ifdef PUSH_ROUNDING
55 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
56 #define PUSH_ARGS_REVERSED /* If it's last to first */
57 #endif
59 #endif
61 #ifndef STACK_PUSH_CODE
62 #ifdef STACK_GROWS_DOWNWARD
63 #define STACK_PUSH_CODE PRE_DEC
64 #else
65 #define STACK_PUSH_CODE PRE_INC
66 #endif
67 #endif
69 /* Assume that case vectors are not pc-relative. */
70 #ifndef CASE_VECTOR_PC_RELATIVE
71 #define CASE_VECTOR_PC_RELATIVE 0
72 #endif
74 /* If this is nonzero, we do not bother generating VOLATILE
75 around volatile memory references, and we are willing to
76 output indirect addresses. If cse is to follow, we reject
77 indirect addresses so a useful potential cse is generated;
78 if it is used only once, instruction combination will produce
79 the same indirect address eventually. */
80 int cse_not_expected;
82 /* Nonzero to generate code for all the subroutines within an
83 expression before generating the upper levels of the expression.
84 Nowadays this is never zero. */
85 int do_preexpand_calls = 1;
87 /* Don't check memory usage, since code is being emitted to check a memory
88 usage. Used when current_function_check_memory_usage is true, to avoid
89 infinite recursion. */
90 static int in_check_memory_usage;
92 /* This structure is used by move_by_pieces to describe the move to
93 be performed. */
94 struct move_by_pieces
96 rtx to;
97 rtx to_addr;
98 int autinc_to;
99 int explicit_inc_to;
100 int to_struct;
101 int to_readonly;
102 rtx from;
103 rtx from_addr;
104 int autinc_from;
105 int explicit_inc_from;
106 int from_struct;
107 int from_readonly;
108 int len;
109 int offset;
110 int reverse;
113 /* This structure is used by clear_by_pieces to describe the clear to
114 be performed. */
116 struct clear_by_pieces
118 rtx to;
119 rtx to_addr;
120 int autinc_to;
121 int explicit_inc_to;
122 int to_struct;
123 int len;
124 int offset;
125 int reverse;
128 extern struct obstack permanent_obstack;
130 static rtx get_push_address PROTO ((int));
132 static rtx enqueue_insn PROTO((rtx, rtx));
133 static int move_by_pieces_ninsns PROTO((unsigned int, int));
134 static void move_by_pieces_1 PROTO((rtx (*) (rtx, ...), enum machine_mode,
135 struct move_by_pieces *));
136 static void clear_by_pieces PROTO((rtx, int, int));
137 static void clear_by_pieces_1 PROTO((rtx (*) (rtx, ...),
138 enum machine_mode,
139 struct clear_by_pieces *));
140 static int is_zeros_p PROTO((tree));
141 static int mostly_zeros_p PROTO((tree));
142 static void store_constructor_field PROTO((rtx, int, int, enum machine_mode,
143 tree, tree, int, int));
144 static void store_constructor PROTO((tree, rtx, int, int));
145 static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
146 enum machine_mode, int, int,
147 int, int));
148 static enum memory_use_mode
149 get_memory_usage_from_modifier PROTO((enum expand_modifier));
150 static tree save_noncopied_parts PROTO((tree, tree));
151 static tree init_noncopied_parts PROTO((tree, tree));
152 static int safe_from_p PROTO((rtx, tree, int));
153 static int fixed_type_p PROTO((tree));
154 static rtx var_rtx PROTO((tree));
155 static rtx expand_increment PROTO((tree, int, int));
156 static void preexpand_calls PROTO((tree));
157 static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
158 static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
159 static void do_compare_and_jump PROTO((tree, enum rtx_code, enum rtx_code, rtx, rtx));
160 static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
162 /* Record for each mode whether we can move a register directly to or
163 from an object of that mode in memory. If we can't, we won't try
164 to use that mode directly when accessing a field of that mode. */
166 static char direct_load[NUM_MACHINE_MODES];
167 static char direct_store[NUM_MACHINE_MODES];
169 /* If a memory-to-memory move would take MOVE_RATIO or more simple
170 move-instruction sequences, we will do a movstr or libcall instead. */
172 #ifndef MOVE_RATIO
173 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
174 #define MOVE_RATIO 2
175 #else
176 /* If we are optimizing for space (-Os), cut down the default move ratio */
177 #define MOVE_RATIO (optimize_size ? 3 : 15)
178 #endif
179 #endif
181 /* This macro is used to determine whether move_by_pieces should be called
182 to perform a structure copy. */
183 #ifndef MOVE_BY_PIECES_P
184 #define MOVE_BY_PIECES_P(SIZE, ALIGN) (move_by_pieces_ninsns \
185 (SIZE, ALIGN) < MOVE_RATIO)
186 #endif
188 /* This array records the insn_code of insns to perform block moves. */
189 enum insn_code movstr_optab[NUM_MACHINE_MODES];
191 /* This array records the insn_code of insns to perform block clears. */
192 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
194 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
196 #ifndef SLOW_UNALIGNED_ACCESS
197 #define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
198 #endif
200 /* This is run once per compilation to set up which modes can be used
201 directly in memory and to initialize the block move optab. */
203 void
204 init_expr_once ()
206 rtx insn, pat;
207 enum machine_mode mode;
208 int num_clobbers;
209 rtx mem, mem1;
210 char *free_point;
212 start_sequence ();
214 /* Since we are on the permanent obstack, we must be sure we save this
215 spot AFTER we call start_sequence, since it will reuse the rtl it
216 makes. */
217 free_point = (char *) oballoc (0);
219 /* Try indexing by frame ptr and try by stack ptr.
220 It is known that on the Convex the stack ptr isn't a valid index.
221 With luck, one or the other is valid on any machine. */
222 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
223 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
225 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
226 pat = PATTERN (insn);
228 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
229 mode = (enum machine_mode) ((int) mode + 1))
231 int regno;
232 rtx reg;
234 direct_load[(int) mode] = direct_store[(int) mode] = 0;
235 PUT_MODE (mem, mode);
236 PUT_MODE (mem1, mode);
238 /* See if there is some register that can be used in this mode and
239 directly loaded or stored from memory. */
241 if (mode != VOIDmode && mode != BLKmode)
242 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
243 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
244 regno++)
246 if (! HARD_REGNO_MODE_OK (regno, mode))
247 continue;
249 reg = gen_rtx_REG (mode, regno);
251 SET_SRC (pat) = mem;
252 SET_DEST (pat) = reg;
253 if (recog (pat, insn, &num_clobbers) >= 0)
254 direct_load[(int) mode] = 1;
256 SET_SRC (pat) = mem1;
257 SET_DEST (pat) = reg;
258 if (recog (pat, insn, &num_clobbers) >= 0)
259 direct_load[(int) mode] = 1;
261 SET_SRC (pat) = reg;
262 SET_DEST (pat) = mem;
263 if (recog (pat, insn, &num_clobbers) >= 0)
264 direct_store[(int) mode] = 1;
266 SET_SRC (pat) = reg;
267 SET_DEST (pat) = mem1;
268 if (recog (pat, insn, &num_clobbers) >= 0)
269 direct_store[(int) mode] = 1;
273 end_sequence ();
274 obfree (free_point);
277 /* This is run at the start of compiling a function. */
279 void
280 init_expr ()
282 current_function->expr
283 = (struct expr_status *) xmalloc (sizeof (struct expr_status));
285 pending_chain = 0;
286 pending_stack_adjust = 0;
287 inhibit_defer_pop = 0;
288 saveregs_value = 0;
289 apply_args_value = 0;
290 forced_labels = 0;
293 /* Small sanity check that the queue is empty at the end of a function. */
294 void
295 finish_expr_for_function ()
297 if (pending_chain)
298 abort ();
301 /* Manage the queue of increment instructions to be output
302 for POSTINCREMENT_EXPR expressions, etc. */
304 /* Queue up to increment (or change) VAR later. BODY says how:
305 BODY should be the same thing you would pass to emit_insn
306 to increment right away. It will go to emit_insn later on.
308 The value is a QUEUED expression to be used in place of VAR
309 where you want to guarantee the pre-incrementation value of VAR. */
311 static rtx
312 enqueue_insn (var, body)
313 rtx var, body;
315 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
316 body, pending_chain);
317 return pending_chain;
320 /* Use protect_from_queue to convert a QUEUED expression
321 into something that you can put immediately into an instruction.
322 If the queued incrementation has not happened yet,
323 protect_from_queue returns the variable itself.
324 If the incrementation has happened, protect_from_queue returns a temp
325 that contains a copy of the old value of the variable.
327 Any time an rtx which might possibly be a QUEUED is to be put
328 into an instruction, it must be passed through protect_from_queue first.
329 QUEUED expressions are not meaningful in instructions.
331 Do not pass a value through protect_from_queue and then hold
332 on to it for a while before putting it in an instruction!
333 If the queue is flushed in between, incorrect code will result. */
336 protect_from_queue (x, modify)
337 register rtx x;
338 int modify;
340 register RTX_CODE code = GET_CODE (x);
342 #if 0 /* A QUEUED can hang around after the queue is forced out. */
343 /* Shortcut for most common case. */
344 if (pending_chain == 0)
345 return x;
346 #endif
348 if (code != QUEUED)
350 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
351 use of autoincrement. Make a copy of the contents of the memory
352 location rather than a copy of the address, but not if the value is
353 of mode BLKmode. Don't modify X in place since it might be
354 shared. */
355 if (code == MEM && GET_MODE (x) != BLKmode
356 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
358 register rtx y = XEXP (x, 0);
359 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
361 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
362 MEM_COPY_ATTRIBUTES (new, x);
363 MEM_ALIAS_SET (new) = MEM_ALIAS_SET (x);
365 if (QUEUED_INSN (y))
367 register rtx temp = gen_reg_rtx (GET_MODE (new));
368 emit_insn_before (gen_move_insn (temp, new),
369 QUEUED_INSN (y));
370 return temp;
372 return new;
374 /* Otherwise, recursively protect the subexpressions of all
375 the kinds of rtx's that can contain a QUEUED. */
376 if (code == MEM)
378 rtx tem = protect_from_queue (XEXP (x, 0), 0);
379 if (tem != XEXP (x, 0))
381 x = copy_rtx (x);
382 XEXP (x, 0) = tem;
385 else if (code == PLUS || code == MULT)
387 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
388 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
389 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
391 x = copy_rtx (x);
392 XEXP (x, 0) = new0;
393 XEXP (x, 1) = new1;
396 return x;
398 /* If the increment has not happened, use the variable itself. */
399 if (QUEUED_INSN (x) == 0)
400 return QUEUED_VAR (x);
401 /* If the increment has happened and a pre-increment copy exists,
402 use that copy. */
403 if (QUEUED_COPY (x) != 0)
404 return QUEUED_COPY (x);
405 /* The increment has happened but we haven't set up a pre-increment copy.
406 Set one up now, and use it. */
407 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
408 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
409 QUEUED_INSN (x));
410 return QUEUED_COPY (x);
413 /* Return nonzero if X contains a QUEUED expression:
414 if it contains anything that will be altered by a queued increment.
415 We handle only combinations of MEM, PLUS, MINUS and MULT operators
416 since memory addresses generally contain only those. */
419 queued_subexp_p (x)
420 rtx x;
422 register enum rtx_code code = GET_CODE (x);
423 switch (code)
425 case QUEUED:
426 return 1;
427 case MEM:
428 return queued_subexp_p (XEXP (x, 0));
429 case MULT:
430 case PLUS:
431 case MINUS:
432 return (queued_subexp_p (XEXP (x, 0))
433 || queued_subexp_p (XEXP (x, 1)));
434 default:
435 return 0;
439 /* Perform all the pending incrementations. */
441 void
442 emit_queue ()
444 register rtx p;
445 while ((p = pending_chain))
447 rtx body = QUEUED_BODY (p);
449 if (GET_CODE (body) == SEQUENCE)
451 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
452 emit_insn (QUEUED_BODY (p));
454 else
455 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
456 pending_chain = QUEUED_NEXT (p);
460 /* Copy data from FROM to TO, where the machine modes are not the same.
461 Both modes may be integer, or both may be floating.
462 UNSIGNEDP should be nonzero if FROM is an unsigned type.
463 This causes zero-extension instead of sign-extension. */
465 void
466 convert_move (to, from, unsignedp)
467 register rtx to, from;
468 int unsignedp;
470 enum machine_mode to_mode = GET_MODE (to);
471 enum machine_mode from_mode = GET_MODE (from);
472 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
473 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
474 enum insn_code code;
475 rtx libcall;
477 /* rtx code for making an equivalent value. */
478 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
480 to = protect_from_queue (to, 1);
481 from = protect_from_queue (from, 0);
483 if (to_real != from_real)
484 abort ();
486 /* If FROM is a SUBREG that indicates that we have already done at least
487 the required extension, strip it. We don't handle such SUBREGs as
488 TO here. */
490 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
491 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
492 >= GET_MODE_SIZE (to_mode))
493 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
494 from = gen_lowpart (to_mode, from), from_mode = to_mode;
496 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
497 abort ();
499 if (to_mode == from_mode
500 || (from_mode == VOIDmode && CONSTANT_P (from)))
502 emit_move_insn (to, from);
503 return;
506 if (to_real)
508 rtx value;
510 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
512 /* Try converting directly if the insn is supported. */
513 if ((code = can_extend_p (to_mode, from_mode, 0))
514 != CODE_FOR_nothing)
516 emit_unop_insn (code, to, from, UNKNOWN);
517 return;
521 #ifdef HAVE_trunchfqf2
522 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
524 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
525 return;
527 #endif
528 #ifdef HAVE_trunctqfqf2
529 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
531 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
532 return;
534 #endif
535 #ifdef HAVE_truncsfqf2
536 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
538 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
539 return;
541 #endif
542 #ifdef HAVE_truncdfqf2
543 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
545 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
546 return;
548 #endif
549 #ifdef HAVE_truncxfqf2
550 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
552 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
553 return;
555 #endif
556 #ifdef HAVE_trunctfqf2
557 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
559 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
560 return;
562 #endif
564 #ifdef HAVE_trunctqfhf2
565 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
567 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
568 return;
570 #endif
571 #ifdef HAVE_truncsfhf2
572 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
574 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
575 return;
577 #endif
578 #ifdef HAVE_truncdfhf2
579 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
581 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
582 return;
584 #endif
585 #ifdef HAVE_truncxfhf2
586 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
588 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
589 return;
591 #endif
592 #ifdef HAVE_trunctfhf2
593 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
595 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
596 return;
598 #endif
600 #ifdef HAVE_truncsftqf2
601 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
603 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
604 return;
606 #endif
607 #ifdef HAVE_truncdftqf2
608 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
610 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
611 return;
613 #endif
614 #ifdef HAVE_truncxftqf2
615 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
617 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
618 return;
620 #endif
621 #ifdef HAVE_trunctftqf2
622 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
624 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
625 return;
627 #endif
629 #ifdef HAVE_truncdfsf2
630 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
632 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
633 return;
635 #endif
636 #ifdef HAVE_truncxfsf2
637 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
639 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
640 return;
642 #endif
643 #ifdef HAVE_trunctfsf2
644 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
646 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
647 return;
649 #endif
650 #ifdef HAVE_truncxfdf2
651 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
653 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
654 return;
656 #endif
657 #ifdef HAVE_trunctfdf2
658 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
660 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
661 return;
663 #endif
665 libcall = (rtx) 0;
666 switch (from_mode)
668 case SFmode:
669 switch (to_mode)
671 case DFmode:
672 libcall = extendsfdf2_libfunc;
673 break;
675 case XFmode:
676 libcall = extendsfxf2_libfunc;
677 break;
679 case TFmode:
680 libcall = extendsftf2_libfunc;
681 break;
683 default:
684 break;
686 break;
688 case DFmode:
689 switch (to_mode)
691 case SFmode:
692 libcall = truncdfsf2_libfunc;
693 break;
695 case XFmode:
696 libcall = extenddfxf2_libfunc;
697 break;
699 case TFmode:
700 libcall = extenddftf2_libfunc;
701 break;
703 default:
704 break;
706 break;
708 case XFmode:
709 switch (to_mode)
711 case SFmode:
712 libcall = truncxfsf2_libfunc;
713 break;
715 case DFmode:
716 libcall = truncxfdf2_libfunc;
717 break;
719 default:
720 break;
722 break;
724 case TFmode:
725 switch (to_mode)
727 case SFmode:
728 libcall = trunctfsf2_libfunc;
729 break;
731 case DFmode:
732 libcall = trunctfdf2_libfunc;
733 break;
735 default:
736 break;
738 break;
740 default:
741 break;
744 if (libcall == (rtx) 0)
745 /* This conversion is not implemented yet. */
746 abort ();
748 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
749 1, from, from_mode);
750 emit_move_insn (to, value);
751 return;
754 /* Now both modes are integers. */
756 /* Handle expanding beyond a word. */
757 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
758 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
760 rtx insns;
761 rtx lowpart;
762 rtx fill_value;
763 rtx lowfrom;
764 int i;
765 enum machine_mode lowpart_mode;
766 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
768 /* Try converting directly if the insn is supported. */
769 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
770 != CODE_FOR_nothing)
772 /* If FROM is a SUBREG, put it into a register. Do this
773 so that we always generate the same set of insns for
774 better cse'ing; if an intermediate assignment occurred,
775 we won't be doing the operation directly on the SUBREG. */
776 if (optimize > 0 && GET_CODE (from) == SUBREG)
777 from = force_reg (from_mode, from);
778 emit_unop_insn (code, to, from, equiv_code);
779 return;
781 /* Next, try converting via full word. */
782 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
783 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
784 != CODE_FOR_nothing))
786 if (GET_CODE (to) == REG)
787 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
788 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
789 emit_unop_insn (code, to,
790 gen_lowpart (word_mode, to), equiv_code);
791 return;
794 /* No special multiword conversion insn; do it by hand. */
795 start_sequence ();
797 /* Since we will turn this into a no conflict block, we must ensure
798 that the source does not overlap the target. */
800 if (reg_overlap_mentioned_p (to, from))
801 from = force_reg (from_mode, from);
803 /* Get a copy of FROM widened to a word, if necessary. */
804 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
805 lowpart_mode = word_mode;
806 else
807 lowpart_mode = from_mode;
809 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
811 lowpart = gen_lowpart (lowpart_mode, to);
812 emit_move_insn (lowpart, lowfrom);
814 /* Compute the value to put in each remaining word. */
815 if (unsignedp)
816 fill_value = const0_rtx;
817 else
819 #ifdef HAVE_slt
820 if (HAVE_slt
821 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
822 && STORE_FLAG_VALUE == -1)
824 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
825 lowpart_mode, 0, 0);
826 fill_value = gen_reg_rtx (word_mode);
827 emit_insn (gen_slt (fill_value));
829 else
830 #endif
832 fill_value
833 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
834 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
835 NULL_RTX, 0);
836 fill_value = convert_to_mode (word_mode, fill_value, 1);
840 /* Fill the remaining words. */
841 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
843 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
844 rtx subword = operand_subword (to, index, 1, to_mode);
846 if (subword == 0)
847 abort ();
849 if (fill_value != subword)
850 emit_move_insn (subword, fill_value);
853 insns = get_insns ();
854 end_sequence ();
856 emit_no_conflict_block (insns, to, from, NULL_RTX,
857 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
858 return;
861 /* Truncating multi-word to a word or less. */
862 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
863 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
865 if (!((GET_CODE (from) == MEM
866 && ! MEM_VOLATILE_P (from)
867 && direct_load[(int) to_mode]
868 && ! mode_dependent_address_p (XEXP (from, 0)))
869 || GET_CODE (from) == REG
870 || GET_CODE (from) == SUBREG))
871 from = force_reg (from_mode, from);
872 convert_move (to, gen_lowpart (word_mode, from), 0);
873 return;
876 /* Handle pointer conversion */ /* SPEE 900220 */
877 if (to_mode == PQImode)
879 if (from_mode != QImode)
880 from = convert_to_mode (QImode, from, unsignedp);
882 #ifdef HAVE_truncqipqi2
883 if (HAVE_truncqipqi2)
885 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
886 return;
888 #endif /* HAVE_truncqipqi2 */
889 abort ();
892 if (from_mode == PQImode)
894 if (to_mode != QImode)
896 from = convert_to_mode (QImode, from, unsignedp);
897 from_mode = QImode;
899 else
901 #ifdef HAVE_extendpqiqi2
902 if (HAVE_extendpqiqi2)
904 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
905 return;
907 #endif /* HAVE_extendpqiqi2 */
908 abort ();
912 if (to_mode == PSImode)
914 if (from_mode != SImode)
915 from = convert_to_mode (SImode, from, unsignedp);
917 #ifdef HAVE_truncsipsi2
918 if (HAVE_truncsipsi2)
920 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
921 return;
923 #endif /* HAVE_truncsipsi2 */
924 abort ();
927 if (from_mode == PSImode)
929 if (to_mode != SImode)
931 from = convert_to_mode (SImode, from, unsignedp);
932 from_mode = SImode;
934 else
936 #ifdef HAVE_extendpsisi2
937 if (HAVE_extendpsisi2)
939 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
940 return;
942 #endif /* HAVE_extendpsisi2 */
943 abort ();
947 if (to_mode == PDImode)
949 if (from_mode != DImode)
950 from = convert_to_mode (DImode, from, unsignedp);
952 #ifdef HAVE_truncdipdi2
953 if (HAVE_truncdipdi2)
955 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
956 return;
958 #endif /* HAVE_truncdipdi2 */
959 abort ();
962 if (from_mode == PDImode)
964 if (to_mode != DImode)
966 from = convert_to_mode (DImode, from, unsignedp);
967 from_mode = DImode;
969 else
971 #ifdef HAVE_extendpdidi2
972 if (HAVE_extendpdidi2)
974 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
975 return;
977 #endif /* HAVE_extendpdidi2 */
978 abort ();
982 /* Now follow all the conversions between integers
983 no more than a word long. */
985 /* For truncation, usually we can just refer to FROM in a narrower mode. */
986 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
987 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
988 GET_MODE_BITSIZE (from_mode)))
990 if (!((GET_CODE (from) == MEM
991 && ! MEM_VOLATILE_P (from)
992 && direct_load[(int) to_mode]
993 && ! mode_dependent_address_p (XEXP (from, 0)))
994 || GET_CODE (from) == REG
995 || GET_CODE (from) == SUBREG))
996 from = force_reg (from_mode, from);
997 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
998 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
999 from = copy_to_reg (from);
1000 emit_move_insn (to, gen_lowpart (to_mode, from));
1001 return;
1004 /* Handle extension. */
1005 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1007 /* Convert directly if that works. */
1008 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1009 != CODE_FOR_nothing)
1011 emit_unop_insn (code, to, from, equiv_code);
1012 return;
1014 else
1016 enum machine_mode intermediate;
1017 rtx tmp;
1018 tree shift_amount;
1020 /* Search for a mode to convert via. */
1021 for (intermediate = from_mode; intermediate != VOIDmode;
1022 intermediate = GET_MODE_WIDER_MODE (intermediate))
1023 if (((can_extend_p (to_mode, intermediate, unsignedp)
1024 != CODE_FOR_nothing)
1025 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1026 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1027 GET_MODE_BITSIZE (intermediate))))
1028 && (can_extend_p (intermediate, from_mode, unsignedp)
1029 != CODE_FOR_nothing))
1031 convert_move (to, convert_to_mode (intermediate, from,
1032 unsignedp), unsignedp);
1033 return;
1036 /* No suitable intermediate mode.
1037 Generate what we need with shifts. */
1038 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1039 - GET_MODE_BITSIZE (from_mode), 0);
1040 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1041 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1042 to, unsignedp);
1043 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1044 to, unsignedp);
1045 if (tmp != to)
1046 emit_move_insn (to, tmp);
1047 return;
1051 /* Support special truncate insns for certain modes. */
1053 if (from_mode == DImode && to_mode == SImode)
1055 #ifdef HAVE_truncdisi2
1056 if (HAVE_truncdisi2)
1058 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1059 return;
1061 #endif
1062 convert_move (to, force_reg (from_mode, from), unsignedp);
1063 return;
1066 if (from_mode == DImode && to_mode == HImode)
1068 #ifdef HAVE_truncdihi2
1069 if (HAVE_truncdihi2)
1071 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1072 return;
1074 #endif
1075 convert_move (to, force_reg (from_mode, from), unsignedp);
1076 return;
1079 if (from_mode == DImode && to_mode == QImode)
1081 #ifdef HAVE_truncdiqi2
1082 if (HAVE_truncdiqi2)
1084 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1085 return;
1087 #endif
1088 convert_move (to, force_reg (from_mode, from), unsignedp);
1089 return;
1092 if (from_mode == SImode && to_mode == HImode)
1094 #ifdef HAVE_truncsihi2
1095 if (HAVE_truncsihi2)
1097 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1098 return;
1100 #endif
1101 convert_move (to, force_reg (from_mode, from), unsignedp);
1102 return;
1105 if (from_mode == SImode && to_mode == QImode)
1107 #ifdef HAVE_truncsiqi2
1108 if (HAVE_truncsiqi2)
1110 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1111 return;
1113 #endif
1114 convert_move (to, force_reg (from_mode, from), unsignedp);
1115 return;
1118 if (from_mode == HImode && to_mode == QImode)
1120 #ifdef HAVE_trunchiqi2
1121 if (HAVE_trunchiqi2)
1123 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1124 return;
1126 #endif
1127 convert_move (to, force_reg (from_mode, from), unsignedp);
1128 return;
1131 if (from_mode == TImode && to_mode == DImode)
1133 #ifdef HAVE_trunctidi2
1134 if (HAVE_trunctidi2)
1136 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1137 return;
1139 #endif
1140 convert_move (to, force_reg (from_mode, from), unsignedp);
1141 return;
1144 if (from_mode == TImode && to_mode == SImode)
1146 #ifdef HAVE_trunctisi2
1147 if (HAVE_trunctisi2)
1149 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1150 return;
1152 #endif
1153 convert_move (to, force_reg (from_mode, from), unsignedp);
1154 return;
1157 if (from_mode == TImode && to_mode == HImode)
1159 #ifdef HAVE_trunctihi2
1160 if (HAVE_trunctihi2)
1162 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1163 return;
1165 #endif
1166 convert_move (to, force_reg (from_mode, from), unsignedp);
1167 return;
1170 if (from_mode == TImode && to_mode == QImode)
1172 #ifdef HAVE_trunctiqi2
1173 if (HAVE_trunctiqi2)
1175 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1176 return;
1178 #endif
1179 convert_move (to, force_reg (from_mode, from), unsignedp);
1180 return;
1183 /* Handle truncation of volatile memrefs, and so on;
1184 the things that couldn't be truncated directly,
1185 and for which there was no special instruction. */
1186 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1188 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1189 emit_move_insn (to, temp);
1190 return;
1193 /* Mode combination is not recognized. */
1194 abort ();
1197 /* Return an rtx for a value that would result
1198 from converting X to mode MODE.
1199 Both X and MODE may be floating, or both integer.
1200 UNSIGNEDP is nonzero if X is an unsigned value.
1201 This can be done by referring to a part of X in place
1202 or by copying to a new temporary with conversion.
1204 This function *must not* call protect_from_queue
1205 except when putting X into an insn (in which case convert_move does it). */
1208 convert_to_mode (mode, x, unsignedp)
1209 enum machine_mode mode;
1210 rtx x;
1211 int unsignedp;
1213 return convert_modes (mode, VOIDmode, x, unsignedp);
1216 /* Return an rtx for a value that would result
1217 from converting X from mode OLDMODE to mode MODE.
1218 Both modes may be floating, or both integer.
1219 UNSIGNEDP is nonzero if X is an unsigned value.
1221 This can be done by referring to a part of X in place
1222 or by copying to a new temporary with conversion.
1224 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1226 This function *must not* call protect_from_queue
1227 except when putting X into an insn (in which case convert_move does it). */
1230 convert_modes (mode, oldmode, x, unsignedp)
1231 enum machine_mode mode, oldmode;
1232 rtx x;
1233 int unsignedp;
1235 register rtx temp;
1237 /* If FROM is a SUBREG that indicates that we have already done at least
1238 the required extension, strip it. */
1240 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1241 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1242 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1243 x = gen_lowpart (mode, x);
1245 if (GET_MODE (x) != VOIDmode)
1246 oldmode = GET_MODE (x);
1248 if (mode == oldmode)
1249 return x;
1251 /* There is one case that we must handle specially: If we are converting
1252 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1253 we are to interpret the constant as unsigned, gen_lowpart will do
1254 the wrong if the constant appears negative. What we want to do is
1255 make the high-order word of the constant zero, not all ones. */
1257 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1258 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1259 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1261 HOST_WIDE_INT val = INTVAL (x);
1263 if (oldmode != VOIDmode
1264 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1266 int width = GET_MODE_BITSIZE (oldmode);
1268 /* We need to zero extend VAL. */
1269 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1272 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1275 /* We can do this with a gen_lowpart if both desired and current modes
1276 are integer, and this is either a constant integer, a register, or a
1277 non-volatile MEM. Except for the constant case where MODE is no
1278 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1280 if ((GET_CODE (x) == CONST_INT
1281 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1282 || (GET_MODE_CLASS (mode) == MODE_INT
1283 && GET_MODE_CLASS (oldmode) == MODE_INT
1284 && (GET_CODE (x) == CONST_DOUBLE
1285 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1286 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1287 && direct_load[(int) mode])
1288 || (GET_CODE (x) == REG
1289 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1290 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1292 /* ?? If we don't know OLDMODE, we have to assume here that
1293 X does not need sign- or zero-extension. This may not be
1294 the case, but it's the best we can do. */
1295 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1296 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1298 HOST_WIDE_INT val = INTVAL (x);
1299 int width = GET_MODE_BITSIZE (oldmode);
1301 /* We must sign or zero-extend in this case. Start by
1302 zero-extending, then sign extend if we need to. */
1303 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1304 if (! unsignedp
1305 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1306 val |= (HOST_WIDE_INT) (-1) << width;
1308 return GEN_INT (val);
1311 return gen_lowpart (mode, x);
1314 temp = gen_reg_rtx (mode);
1315 convert_move (temp, x, unsignedp);
1316 return temp;
1320 /* This macro is used to determine what the largest unit size that
1321 move_by_pieces can use is. */
1323 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1324 move efficiently, as opposed to MOVE_MAX which is the maximum
1325 number of bhytes we can move with a single instruction. */
1327 #ifndef MOVE_MAX_PIECES
1328 #define MOVE_MAX_PIECES MOVE_MAX
1329 #endif
1331 /* Generate several move instructions to copy LEN bytes
1332 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1333 The caller must pass FROM and TO
1334 through protect_from_queue before calling.
1335 ALIGN (in bytes) is maximum alignment we can assume. */
1337 void
1338 move_by_pieces (to, from, len, align)
1339 rtx to, from;
1340 int len, align;
1342 struct move_by_pieces data;
1343 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1344 int max_size = MOVE_MAX_PIECES + 1;
1345 enum machine_mode mode = VOIDmode, tmode;
1346 enum insn_code icode;
1348 data.offset = 0;
1349 data.to_addr = to_addr;
1350 data.from_addr = from_addr;
1351 data.to = to;
1352 data.from = from;
1353 data.autinc_to
1354 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1355 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1356 data.autinc_from
1357 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1358 || GET_CODE (from_addr) == POST_INC
1359 || GET_CODE (from_addr) == POST_DEC);
1361 data.explicit_inc_from = 0;
1362 data.explicit_inc_to = 0;
1363 data.reverse
1364 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1365 if (data.reverse) data.offset = len;
1366 data.len = len;
1368 data.to_struct = MEM_IN_STRUCT_P (to);
1369 data.from_struct = MEM_IN_STRUCT_P (from);
1370 data.to_readonly = RTX_UNCHANGING_P (to);
1371 data.from_readonly = RTX_UNCHANGING_P (from);
1373 /* If copying requires more than two move insns,
1374 copy addresses to registers (to make displacements shorter)
1375 and use post-increment if available. */
1376 if (!(data.autinc_from && data.autinc_to)
1377 && move_by_pieces_ninsns (len, align) > 2)
1379 /* Find the mode of the largest move... */
1380 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1381 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1382 if (GET_MODE_SIZE (tmode) < max_size)
1383 mode = tmode;
1385 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1387 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1388 data.autinc_from = 1;
1389 data.explicit_inc_from = -1;
1391 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1393 data.from_addr = copy_addr_to_reg (from_addr);
1394 data.autinc_from = 1;
1395 data.explicit_inc_from = 1;
1397 if (!data.autinc_from && CONSTANT_P (from_addr))
1398 data.from_addr = copy_addr_to_reg (from_addr);
1399 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1401 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1402 data.autinc_to = 1;
1403 data.explicit_inc_to = -1;
1405 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1407 data.to_addr = copy_addr_to_reg (to_addr);
1408 data.autinc_to = 1;
1409 data.explicit_inc_to = 1;
1411 if (!data.autinc_to && CONSTANT_P (to_addr))
1412 data.to_addr = copy_addr_to_reg (to_addr);
1415 if (! SLOW_UNALIGNED_ACCESS
1416 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1417 align = MOVE_MAX;
1419 /* First move what we can in the largest integer mode, then go to
1420 successively smaller modes. */
1422 while (max_size > 1)
1424 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1425 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1426 if (GET_MODE_SIZE (tmode) < max_size)
1427 mode = tmode;
1429 if (mode == VOIDmode)
1430 break;
1432 icode = mov_optab->handlers[(int) mode].insn_code;
1433 if (icode != CODE_FOR_nothing
1434 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1435 GET_MODE_SIZE (mode)))
1436 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1438 max_size = GET_MODE_SIZE (mode);
1441 /* The code above should have handled everything. */
1442 if (data.len > 0)
1443 abort ();
1446 /* Return number of insns required to move L bytes by pieces.
1447 ALIGN (in bytes) is maximum alignment we can assume. */
1449 static int
1450 move_by_pieces_ninsns (l, align)
1451 unsigned int l;
1452 int align;
1454 register int n_insns = 0;
1455 int max_size = MOVE_MAX + 1;
1457 if (! SLOW_UNALIGNED_ACCESS
1458 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1459 align = MOVE_MAX;
1461 while (max_size > 1)
1463 enum machine_mode mode = VOIDmode, tmode;
1464 enum insn_code icode;
1466 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1467 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1468 if (GET_MODE_SIZE (tmode) < max_size)
1469 mode = tmode;
1471 if (mode == VOIDmode)
1472 break;
1474 icode = mov_optab->handlers[(int) mode].insn_code;
1475 if (icode != CODE_FOR_nothing
1476 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1477 GET_MODE_SIZE (mode)))
1478 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1480 max_size = GET_MODE_SIZE (mode);
1483 return n_insns;
1486 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1487 with move instructions for mode MODE. GENFUN is the gen_... function
1488 to make a move insn for that mode. DATA has all the other info. */
1490 static void
1491 move_by_pieces_1 (genfun, mode, data)
1492 rtx (*genfun) PROTO ((rtx, ...));
1493 enum machine_mode mode;
1494 struct move_by_pieces *data;
1496 register int size = GET_MODE_SIZE (mode);
1497 register rtx to1, from1;
1499 while (data->len >= size)
1501 if (data->reverse) data->offset -= size;
1503 to1 = (data->autinc_to
1504 ? gen_rtx_MEM (mode, data->to_addr)
1505 : copy_rtx (change_address (data->to, mode,
1506 plus_constant (data->to_addr,
1507 data->offset))));
1508 MEM_IN_STRUCT_P (to1) = data->to_struct;
1509 RTX_UNCHANGING_P (to1) = data->to_readonly;
1511 from1
1512 = (data->autinc_from
1513 ? gen_rtx_MEM (mode, data->from_addr)
1514 : copy_rtx (change_address (data->from, mode,
1515 plus_constant (data->from_addr,
1516 data->offset))));
1517 MEM_IN_STRUCT_P (from1) = data->from_struct;
1518 RTX_UNCHANGING_P (from1) = data->from_readonly;
1520 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1521 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1522 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1523 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1525 emit_insn ((*genfun) (to1, from1));
1526 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1527 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1528 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1529 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1531 if (! data->reverse) data->offset += size;
1533 data->len -= size;
1537 /* Emit code to move a block Y to a block X.
1538 This may be done with string-move instructions,
1539 with multiple scalar move instructions, or with a library call.
1541 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1542 with mode BLKmode.
1543 SIZE is an rtx that says how long they are.
1544 ALIGN is the maximum alignment we can assume they have,
1545 measured in bytes.
1547 Return the address of the new block, if memcpy is called and returns it,
1548 0 otherwise. */
1551 emit_block_move (x, y, size, align)
1552 rtx x, y;
1553 rtx size;
1554 int align;
1556 rtx retval = 0;
1557 #ifdef TARGET_MEM_FUNCTIONS
1558 static tree fn;
1559 tree call_expr, arg_list;
1560 #endif
1562 if (GET_MODE (x) != BLKmode)
1563 abort ();
1565 if (GET_MODE (y) != BLKmode)
1566 abort ();
1568 x = protect_from_queue (x, 1);
1569 y = protect_from_queue (y, 0);
1570 size = protect_from_queue (size, 0);
1572 if (GET_CODE (x) != MEM)
1573 abort ();
1574 if (GET_CODE (y) != MEM)
1575 abort ();
1576 if (size == 0)
1577 abort ();
1579 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1580 move_by_pieces (x, y, INTVAL (size), align);
1581 else
1583 /* Try the most limited insn first, because there's no point
1584 including more than one in the machine description unless
1585 the more limited one has some advantage. */
1587 rtx opalign = GEN_INT (align);
1588 enum machine_mode mode;
1590 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1591 mode = GET_MODE_WIDER_MODE (mode))
1593 enum insn_code code = movstr_optab[(int) mode];
1594 insn_operand_predicate_fn pred;
1596 if (code != CODE_FOR_nothing
1597 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1598 here because if SIZE is less than the mode mask, as it is
1599 returned by the macro, it will definitely be less than the
1600 actual mode mask. */
1601 && ((GET_CODE (size) == CONST_INT
1602 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1603 <= (GET_MODE_MASK (mode) >> 1)))
1604 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1605 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1606 || (*pred) (x, BLKmode))
1607 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1608 || (*pred) (y, BLKmode))
1609 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1610 || (*pred) (opalign, VOIDmode)))
1612 rtx op2;
1613 rtx last = get_last_insn ();
1614 rtx pat;
1616 op2 = convert_to_mode (mode, size, 1);
1617 pred = insn_data[(int) code].operand[2].predicate;
1618 if (pred != 0 && ! (*pred) (op2, mode))
1619 op2 = copy_to_mode_reg (mode, op2);
1621 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1622 if (pat)
1624 emit_insn (pat);
1625 return 0;
1627 else
1628 delete_insns_since (last);
1632 /* X, Y, or SIZE may have been passed through protect_from_queue.
1634 It is unsafe to save the value generated by protect_from_queue
1635 and reuse it later. Consider what happens if emit_queue is
1636 called before the return value from protect_from_queue is used.
1638 Expansion of the CALL_EXPR below will call emit_queue before
1639 we are finished emitting RTL for argument setup. So if we are
1640 not careful we could get the wrong value for an argument.
1642 To avoid this problem we go ahead and emit code to copy X, Y &
1643 SIZE into new pseudos. We can then place those new pseudos
1644 into an RTL_EXPR and use them later, even after a call to
1645 emit_queue.
1647 Note this is not strictly needed for library calls since they
1648 do not call emit_queue before loading their arguments. However,
1649 we may need to have library calls call emit_queue in the future
1650 since failing to do so could cause problems for targets which
1651 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1652 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1653 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1655 #ifdef TARGET_MEM_FUNCTIONS
1656 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1657 #else
1658 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1659 TREE_UNSIGNED (integer_type_node));
1660 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1661 #endif
1663 #ifdef TARGET_MEM_FUNCTIONS
1664 /* It is incorrect to use the libcall calling conventions to call
1665 memcpy in this context.
1667 This could be a user call to memcpy and the user may wish to
1668 examine the return value from memcpy.
1670 For targets where libcalls and normal calls have different conventions
1671 for returning pointers, we could end up generating incorrect code.
1673 So instead of using a libcall sequence we build up a suitable
1674 CALL_EXPR and expand the call in the normal fashion. */
1675 if (fn == NULL_TREE)
1677 tree fntype;
1679 /* This was copied from except.c, I don't know if all this is
1680 necessary in this context or not. */
1681 fn = get_identifier ("memcpy");
1682 push_obstacks_nochange ();
1683 end_temporary_allocation ();
1684 fntype = build_pointer_type (void_type_node);
1685 fntype = build_function_type (fntype, NULL_TREE);
1686 fn = build_decl (FUNCTION_DECL, fn, fntype);
1687 ggc_add_tree_root (&fn, 1);
1688 DECL_EXTERNAL (fn) = 1;
1689 TREE_PUBLIC (fn) = 1;
1690 DECL_ARTIFICIAL (fn) = 1;
1691 make_decl_rtl (fn, NULL_PTR, 1);
1692 assemble_external (fn);
1693 pop_obstacks ();
1696 /* We need to make an argument list for the function call.
1698 memcpy has three arguments, the first two are void * addresses and
1699 the last is a size_t byte count for the copy. */
1700 arg_list
1701 = build_tree_list (NULL_TREE,
1702 make_tree (build_pointer_type (void_type_node), x));
1703 TREE_CHAIN (arg_list)
1704 = build_tree_list (NULL_TREE,
1705 make_tree (build_pointer_type (void_type_node), y));
1706 TREE_CHAIN (TREE_CHAIN (arg_list))
1707 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1708 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1710 /* Now we have to build up the CALL_EXPR itself. */
1711 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1712 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1713 call_expr, arg_list, NULL_TREE);
1714 TREE_SIDE_EFFECTS (call_expr) = 1;
1716 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1717 #else
1718 emit_library_call (bcopy_libfunc, 0,
1719 VOIDmode, 3, y, Pmode, x, Pmode,
1720 convert_to_mode (TYPE_MODE (integer_type_node), size,
1721 TREE_UNSIGNED (integer_type_node)),
1722 TYPE_MODE (integer_type_node));
1723 #endif
1726 return retval;
1729 /* Copy all or part of a value X into registers starting at REGNO.
1730 The number of registers to be filled is NREGS. */
1732 void
1733 move_block_to_reg (regno, x, nregs, mode)
1734 int regno;
1735 rtx x;
1736 int nregs;
1737 enum machine_mode mode;
1739 int i;
1740 #ifdef HAVE_load_multiple
1741 rtx pat;
1742 rtx last;
1743 #endif
1745 if (nregs == 0)
1746 return;
1748 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1749 x = validize_mem (force_const_mem (mode, x));
1751 /* See if the machine can do this with a load multiple insn. */
1752 #ifdef HAVE_load_multiple
1753 if (HAVE_load_multiple)
1755 last = get_last_insn ();
1756 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1757 GEN_INT (nregs));
1758 if (pat)
1760 emit_insn (pat);
1761 return;
1763 else
1764 delete_insns_since (last);
1766 #endif
1768 for (i = 0; i < nregs; i++)
1769 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1770 operand_subword_force (x, i, mode));
1773 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1774 The number of registers to be filled is NREGS. SIZE indicates the number
1775 of bytes in the object X. */
1778 void
1779 move_block_from_reg (regno, x, nregs, size)
1780 int regno;
1781 rtx x;
1782 int nregs;
1783 int size;
1785 int i;
1786 #ifdef HAVE_store_multiple
1787 rtx pat;
1788 rtx last;
1789 #endif
1790 enum machine_mode mode;
1792 /* If SIZE is that of a mode no bigger than a word, just use that
1793 mode's store operation. */
1794 if (size <= UNITS_PER_WORD
1795 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1797 emit_move_insn (change_address (x, mode, NULL),
1798 gen_rtx_REG (mode, regno));
1799 return;
1802 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1803 to the left before storing to memory. Note that the previous test
1804 doesn't handle all cases (e.g. SIZE == 3). */
1805 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1807 rtx tem = operand_subword (x, 0, 1, BLKmode);
1808 rtx shift;
1810 if (tem == 0)
1811 abort ();
1813 shift = expand_shift (LSHIFT_EXPR, word_mode,
1814 gen_rtx_REG (word_mode, regno),
1815 build_int_2 ((UNITS_PER_WORD - size)
1816 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1817 emit_move_insn (tem, shift);
1818 return;
1821 /* See if the machine can do this with a store multiple insn. */
1822 #ifdef HAVE_store_multiple
1823 if (HAVE_store_multiple)
1825 last = get_last_insn ();
1826 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1827 GEN_INT (nregs));
1828 if (pat)
1830 emit_insn (pat);
1831 return;
1833 else
1834 delete_insns_since (last);
1836 #endif
1838 for (i = 0; i < nregs; i++)
1840 rtx tem = operand_subword (x, i, 1, BLKmode);
1842 if (tem == 0)
1843 abort ();
1845 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1849 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1850 registers represented by a PARALLEL. SSIZE represents the total size of
1851 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1852 SRC in bits. */
1853 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1854 the balance will be in what would be the low-order memory addresses, i.e.
1855 left justified for big endian, right justified for little endian. This
1856 happens to be true for the targets currently using this support. If this
1857 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1858 would be needed. */
1860 void
1861 emit_group_load (dst, orig_src, ssize, align)
1862 rtx dst, orig_src;
1863 int align, ssize;
1865 rtx *tmps, src;
1866 int start, i;
1868 if (GET_CODE (dst) != PARALLEL)
1869 abort ();
1871 /* Check for a NULL entry, used to indicate that the parameter goes
1872 both on the stack and in registers. */
1873 if (XEXP (XVECEXP (dst, 0, 0), 0))
1874 start = 0;
1875 else
1876 start = 1;
1878 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (dst, 0));
1880 /* If we won't be loading directly from memory, protect the real source
1881 from strange tricks we might play. */
1882 src = orig_src;
1883 if (GET_CODE (src) != MEM)
1885 src = gen_reg_rtx (GET_MODE (orig_src));
1886 emit_move_insn (src, orig_src);
1889 /* Process the pieces. */
1890 for (i = start; i < XVECLEN (dst, 0); i++)
1892 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1893 int bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1894 int bytelen = GET_MODE_SIZE (mode);
1895 int shift = 0;
1897 /* Handle trailing fragments that run over the size of the struct. */
1898 if (ssize >= 0 && bytepos + bytelen > ssize)
1900 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1901 bytelen = ssize - bytepos;
1902 if (bytelen <= 0)
1903 abort();
1906 /* Optimize the access just a bit. */
1907 if (GET_CODE (src) == MEM
1908 && align*BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
1909 && bytepos*BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1910 && bytelen == GET_MODE_SIZE (mode))
1912 tmps[i] = gen_reg_rtx (mode);
1913 emit_move_insn (tmps[i],
1914 change_address (src, mode,
1915 plus_constant (XEXP (src, 0),
1916 bytepos)));
1918 else if (GET_CODE (src) == CONCAT)
1920 if (bytepos == 0
1921 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
1922 tmps[i] = XEXP (src, 0);
1923 else if (bytepos == GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
1924 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
1925 tmps[i] = XEXP (src, 1);
1926 else
1927 abort ();
1929 else
1931 tmps[i] = extract_bit_field (src, bytelen*BITS_PER_UNIT,
1932 bytepos*BITS_PER_UNIT, 1, NULL_RTX,
1933 mode, mode, align, ssize);
1936 if (BYTES_BIG_ENDIAN && shift)
1938 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
1939 tmps[i], 0, OPTAB_WIDEN);
1942 emit_queue();
1944 /* Copy the extracted pieces into the proper (probable) hard regs. */
1945 for (i = start; i < XVECLEN (dst, 0); i++)
1946 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
1949 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
1950 registers represented by a PARALLEL. SSIZE represents the total size of
1951 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
1953 void
1954 emit_group_store (orig_dst, src, ssize, align)
1955 rtx orig_dst, src;
1956 int ssize, align;
1958 rtx *tmps, dst;
1959 int start, i;
1961 if (GET_CODE (src) != PARALLEL)
1962 abort ();
1964 /* Check for a NULL entry, used to indicate that the parameter goes
1965 both on the stack and in registers. */
1966 if (XEXP (XVECEXP (src, 0, 0), 0))
1967 start = 0;
1968 else
1969 start = 1;
1971 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (src, 0));
1973 /* Copy the (probable) hard regs into pseudos. */
1974 for (i = start; i < XVECLEN (src, 0); i++)
1976 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1977 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1978 emit_move_insn (tmps[i], reg);
1980 emit_queue();
1982 /* If we won't be storing directly into memory, protect the real destination
1983 from strange tricks we might play. */
1984 dst = orig_dst;
1985 if (GET_CODE (dst) == PARALLEL)
1987 rtx temp;
1989 /* We can get a PARALLEL dst if there is a conditional expression in
1990 a return statement. In that case, the dst and src are the same,
1991 so no action is necessary. */
1992 if (rtx_equal_p (dst, src))
1993 return;
1995 /* It is unclear if we can ever reach here, but we may as well handle
1996 it. Allocate a temporary, and split this into a store/load to/from
1997 the temporary. */
1999 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2000 emit_group_store (temp, src, ssize, align);
2001 emit_group_load (dst, temp, ssize, align);
2002 return;
2004 else if (GET_CODE (dst) != MEM)
2006 dst = gen_reg_rtx (GET_MODE (orig_dst));
2007 /* Make life a bit easier for combine. */
2008 emit_move_insn (dst, const0_rtx);
2010 else if (! MEM_IN_STRUCT_P (dst))
2012 /* store_bit_field requires that memory operations have
2013 mem_in_struct_p set; we might not. */
2015 dst = copy_rtx (orig_dst);
2016 MEM_SET_IN_STRUCT_P (dst, 1);
2019 /* Process the pieces. */
2020 for (i = start; i < XVECLEN (src, 0); i++)
2022 int bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2023 enum machine_mode mode = GET_MODE (tmps[i]);
2024 int bytelen = GET_MODE_SIZE (mode);
2026 /* Handle trailing fragments that run over the size of the struct. */
2027 if (ssize >= 0 && bytepos + bytelen > ssize)
2029 if (BYTES_BIG_ENDIAN)
2031 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2032 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2033 tmps[i], 0, OPTAB_WIDEN);
2035 bytelen = ssize - bytepos;
2038 /* Optimize the access just a bit. */
2039 if (GET_CODE (dst) == MEM
2040 && align*BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
2041 && bytepos*BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2042 && bytelen == GET_MODE_SIZE (mode))
2044 emit_move_insn (change_address (dst, mode,
2045 plus_constant (XEXP (dst, 0),
2046 bytepos)),
2047 tmps[i]);
2049 else
2051 store_bit_field (dst, bytelen*BITS_PER_UNIT, bytepos*BITS_PER_UNIT,
2052 mode, tmps[i], align, ssize);
2055 emit_queue();
2057 /* Copy from the pseudo into the (probable) hard reg. */
2058 if (GET_CODE (dst) == REG)
2059 emit_move_insn (orig_dst, dst);
2062 /* Generate code to copy a BLKmode object of TYPE out of a
2063 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2064 is null, a stack temporary is created. TGTBLK is returned.
2066 The primary purpose of this routine is to handle functions
2067 that return BLKmode structures in registers. Some machines
2068 (the PA for example) want to return all small structures
2069 in registers regardless of the structure's alignment.
2073 copy_blkmode_from_reg(tgtblk,srcreg,type)
2074 rtx tgtblk;
2075 rtx srcreg;
2076 tree type;
2078 int bytes = int_size_in_bytes (type);
2079 rtx src = NULL, dst = NULL;
2080 int bitsize = MIN (TYPE_ALIGN (type), (unsigned int) BITS_PER_WORD);
2081 int bitpos, xbitpos, big_endian_correction = 0;
2083 if (tgtblk == 0)
2085 tgtblk = assign_stack_temp (BLKmode, bytes, 0);
2086 MEM_SET_IN_STRUCT_P (tgtblk, AGGREGATE_TYPE_P (type));
2087 preserve_temp_slots (tgtblk);
2090 /* This code assumes srcreg is at least a full word. If it isn't,
2091 copy it into a new pseudo which is a full word. */
2092 if (GET_MODE (srcreg) != BLKmode
2093 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2094 srcreg = convert_to_mode (word_mode, srcreg,
2095 TREE_UNSIGNED (type));
2097 /* Structures whose size is not a multiple of a word are aligned
2098 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2099 machine, this means we must skip the empty high order bytes when
2100 calculating the bit offset. */
2101 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2102 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2103 * BITS_PER_UNIT));
2105 /* Copy the structure BITSIZE bites at a time.
2107 We could probably emit more efficient code for machines
2108 which do not use strict alignment, but it doesn't seem
2109 worth the effort at the current time. */
2110 for (bitpos = 0, xbitpos = big_endian_correction;
2111 bitpos < bytes * BITS_PER_UNIT;
2112 bitpos += bitsize, xbitpos += bitsize)
2115 /* We need a new source operand each time xbitpos is on a
2116 word boundary and when xbitpos == big_endian_correction
2117 (the first time through). */
2118 if (xbitpos % BITS_PER_WORD == 0
2119 || xbitpos == big_endian_correction)
2120 src = operand_subword_force (srcreg,
2121 xbitpos / BITS_PER_WORD,
2122 BLKmode);
2124 /* We need a new destination operand each time bitpos is on
2125 a word boundary. */
2126 if (bitpos % BITS_PER_WORD == 0)
2127 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2129 /* Use xbitpos for the source extraction (right justified) and
2130 xbitpos for the destination store (left justified). */
2131 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2132 extract_bit_field (src, bitsize,
2133 xbitpos % BITS_PER_WORD, 1,
2134 NULL_RTX, word_mode,
2135 word_mode,
2136 bitsize / BITS_PER_UNIT,
2137 BITS_PER_WORD),
2138 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
2140 return tgtblk;
2144 /* Add a USE expression for REG to the (possibly empty) list pointed
2145 to by CALL_FUSAGE. REG must denote a hard register. */
2147 void
2148 use_reg (call_fusage, reg)
2149 rtx *call_fusage, reg;
2151 if (GET_CODE (reg) != REG
2152 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2153 abort();
2155 *call_fusage
2156 = gen_rtx_EXPR_LIST (VOIDmode,
2157 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2160 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2161 starting at REGNO. All of these registers must be hard registers. */
2163 void
2164 use_regs (call_fusage, regno, nregs)
2165 rtx *call_fusage;
2166 int regno;
2167 int nregs;
2169 int i;
2171 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2172 abort ();
2174 for (i = 0; i < nregs; i++)
2175 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2178 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2179 PARALLEL REGS. This is for calls that pass values in multiple
2180 non-contiguous locations. The Irix 6 ABI has examples of this. */
2182 void
2183 use_group_regs (call_fusage, regs)
2184 rtx *call_fusage;
2185 rtx regs;
2187 int i;
2189 for (i = 0; i < XVECLEN (regs, 0); i++)
2191 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2193 /* A NULL entry means the parameter goes both on the stack and in
2194 registers. This can also be a MEM for targets that pass values
2195 partially on the stack and partially in registers. */
2196 if (reg != 0 && GET_CODE (reg) == REG)
2197 use_reg (call_fusage, reg);
2201 /* Generate several move instructions to clear LEN bytes of block TO.
2202 (A MEM rtx with BLKmode). The caller must pass TO through
2203 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
2204 we can assume. */
2206 static void
2207 clear_by_pieces (to, len, align)
2208 rtx to;
2209 int len, align;
2211 struct clear_by_pieces data;
2212 rtx to_addr = XEXP (to, 0);
2213 int max_size = MOVE_MAX_PIECES + 1;
2214 enum machine_mode mode = VOIDmode, tmode;
2215 enum insn_code icode;
2217 data.offset = 0;
2218 data.to_addr = to_addr;
2219 data.to = to;
2220 data.autinc_to
2221 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2222 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2224 data.explicit_inc_to = 0;
2225 data.reverse
2226 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2227 if (data.reverse) data.offset = len;
2228 data.len = len;
2230 data.to_struct = MEM_IN_STRUCT_P (to);
2232 /* If copying requires more than two move insns,
2233 copy addresses to registers (to make displacements shorter)
2234 and use post-increment if available. */
2235 if (!data.autinc_to
2236 && move_by_pieces_ninsns (len, align) > 2)
2238 /* Determine the main mode we'll be using */
2239 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2240 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2241 if (GET_MODE_SIZE (tmode) < max_size)
2242 mode = tmode;
2244 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
2246 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
2247 data.autinc_to = 1;
2248 data.explicit_inc_to = -1;
2250 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
2252 data.to_addr = copy_addr_to_reg (to_addr);
2253 data.autinc_to = 1;
2254 data.explicit_inc_to = 1;
2256 if (!data.autinc_to && CONSTANT_P (to_addr))
2257 data.to_addr = copy_addr_to_reg (to_addr);
2260 if (! SLOW_UNALIGNED_ACCESS
2261 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
2262 align = MOVE_MAX;
2264 /* First move what we can in the largest integer mode, then go to
2265 successively smaller modes. */
2267 while (max_size > 1)
2269 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2270 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2271 if (GET_MODE_SIZE (tmode) < max_size)
2272 mode = tmode;
2274 if (mode == VOIDmode)
2275 break;
2277 icode = mov_optab->handlers[(int) mode].insn_code;
2278 if (icode != CODE_FOR_nothing
2279 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
2280 GET_MODE_SIZE (mode)))
2281 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2283 max_size = GET_MODE_SIZE (mode);
2286 /* The code above should have handled everything. */
2287 if (data.len != 0)
2288 abort ();
2291 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2292 with move instructions for mode MODE. GENFUN is the gen_... function
2293 to make a move insn for that mode. DATA has all the other info. */
2295 static void
2296 clear_by_pieces_1 (genfun, mode, data)
2297 rtx (*genfun) PROTO ((rtx, ...));
2298 enum machine_mode mode;
2299 struct clear_by_pieces *data;
2301 register int size = GET_MODE_SIZE (mode);
2302 register rtx to1;
2304 while (data->len >= size)
2306 if (data->reverse) data->offset -= size;
2308 to1 = (data->autinc_to
2309 ? gen_rtx_MEM (mode, data->to_addr)
2310 : copy_rtx (change_address (data->to, mode,
2311 plus_constant (data->to_addr,
2312 data->offset))));
2313 MEM_IN_STRUCT_P (to1) = data->to_struct;
2315 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2316 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2318 emit_insn ((*genfun) (to1, const0_rtx));
2319 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2320 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2322 if (! data->reverse) data->offset += size;
2324 data->len -= size;
2328 /* Write zeros through the storage of OBJECT.
2329 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
2330 the maximum alignment we can is has, measured in bytes.
2332 If we call a function that returns the length of the block, return it. */
2335 clear_storage (object, size, align)
2336 rtx object;
2337 rtx size;
2338 int align;
2340 #ifdef TARGET_MEM_FUNCTIONS
2341 static tree fn;
2342 tree call_expr, arg_list;
2343 #endif
2344 rtx retval = 0;
2346 if (GET_MODE (object) == BLKmode)
2348 object = protect_from_queue (object, 1);
2349 size = protect_from_queue (size, 0);
2351 if (GET_CODE (size) == CONST_INT
2352 && MOVE_BY_PIECES_P (INTVAL (size), align))
2353 clear_by_pieces (object, INTVAL (size), align);
2355 else
2357 /* Try the most limited insn first, because there's no point
2358 including more than one in the machine description unless
2359 the more limited one has some advantage. */
2361 rtx opalign = GEN_INT (align);
2362 enum machine_mode mode;
2364 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2365 mode = GET_MODE_WIDER_MODE (mode))
2367 enum insn_code code = clrstr_optab[(int) mode];
2368 insn_operand_predicate_fn pred;
2370 if (code != CODE_FOR_nothing
2371 /* We don't need MODE to be narrower than
2372 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2373 the mode mask, as it is returned by the macro, it will
2374 definitely be less than the actual mode mask. */
2375 && ((GET_CODE (size) == CONST_INT
2376 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2377 <= (GET_MODE_MASK (mode) >> 1)))
2378 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2379 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2380 || (*pred) (object, BLKmode))
2381 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2382 || (*pred) (opalign, VOIDmode)))
2384 rtx op1;
2385 rtx last = get_last_insn ();
2386 rtx pat;
2388 op1 = convert_to_mode (mode, size, 1);
2389 pred = insn_data[(int) code].operand[1].predicate;
2390 if (pred != 0 && ! (*pred) (op1, mode))
2391 op1 = copy_to_mode_reg (mode, op1);
2393 pat = GEN_FCN ((int) code) (object, op1, opalign);
2394 if (pat)
2396 emit_insn (pat);
2397 return 0;
2399 else
2400 delete_insns_since (last);
2404 /* OBJECT or SIZE may have been passed through protect_from_queue.
2406 It is unsafe to save the value generated by protect_from_queue
2407 and reuse it later. Consider what happens if emit_queue is
2408 called before the return value from protect_from_queue is used.
2410 Expansion of the CALL_EXPR below will call emit_queue before
2411 we are finished emitting RTL for argument setup. So if we are
2412 not careful we could get the wrong value for an argument.
2414 To avoid this problem we go ahead and emit code to copy OBJECT
2415 and SIZE into new pseudos. We can then place those new pseudos
2416 into an RTL_EXPR and use them later, even after a call to
2417 emit_queue.
2419 Note this is not strictly needed for library calls since they
2420 do not call emit_queue before loading their arguments. However,
2421 we may need to have library calls call emit_queue in the future
2422 since failing to do so could cause problems for targets which
2423 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2424 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2426 #ifdef TARGET_MEM_FUNCTIONS
2427 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2428 #else
2429 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2430 TREE_UNSIGNED (integer_type_node));
2431 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2432 #endif
2435 #ifdef TARGET_MEM_FUNCTIONS
2436 /* It is incorrect to use the libcall calling conventions to call
2437 memset in this context.
2439 This could be a user call to memset and the user may wish to
2440 examine the return value from memset.
2442 For targets where libcalls and normal calls have different
2443 conventions for returning pointers, we could end up generating
2444 incorrect code.
2446 So instead of using a libcall sequence we build up a suitable
2447 CALL_EXPR and expand the call in the normal fashion. */
2448 if (fn == NULL_TREE)
2450 tree fntype;
2452 /* This was copied from except.c, I don't know if all this is
2453 necessary in this context or not. */
2454 fn = get_identifier ("memset");
2455 push_obstacks_nochange ();
2456 end_temporary_allocation ();
2457 fntype = build_pointer_type (void_type_node);
2458 fntype = build_function_type (fntype, NULL_TREE);
2459 fn = build_decl (FUNCTION_DECL, fn, fntype);
2460 ggc_add_tree_root (&fn, 1);
2461 DECL_EXTERNAL (fn) = 1;
2462 TREE_PUBLIC (fn) = 1;
2463 DECL_ARTIFICIAL (fn) = 1;
2464 make_decl_rtl (fn, NULL_PTR, 1);
2465 assemble_external (fn);
2466 pop_obstacks ();
2469 /* We need to make an argument list for the function call.
2471 memset has three arguments, the first is a void * addresses, the
2472 second a integer with the initialization value, the last is a
2473 size_t byte count for the copy. */
2474 arg_list
2475 = build_tree_list (NULL_TREE,
2476 make_tree (build_pointer_type (void_type_node),
2477 object));
2478 TREE_CHAIN (arg_list)
2479 = build_tree_list (NULL_TREE,
2480 make_tree (integer_type_node, const0_rtx));
2481 TREE_CHAIN (TREE_CHAIN (arg_list))
2482 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2483 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2485 /* Now we have to build up the CALL_EXPR itself. */
2486 call_expr = build1 (ADDR_EXPR,
2487 build_pointer_type (TREE_TYPE (fn)), fn);
2488 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2489 call_expr, arg_list, NULL_TREE);
2490 TREE_SIDE_EFFECTS (call_expr) = 1;
2492 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2493 #else
2494 emit_library_call (bzero_libfunc, 0,
2495 VOIDmode, 2, object, Pmode, size,
2496 TYPE_MODE (integer_type_node));
2497 #endif
2500 else
2501 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2503 return retval;
2506 /* Generate code to copy Y into X.
2507 Both Y and X must have the same mode, except that
2508 Y can be a constant with VOIDmode.
2509 This mode cannot be BLKmode; use emit_block_move for that.
2511 Return the last instruction emitted. */
2514 emit_move_insn (x, y)
2515 rtx x, y;
2517 enum machine_mode mode = GET_MODE (x);
2519 x = protect_from_queue (x, 1);
2520 y = protect_from_queue (y, 0);
2522 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2523 abort ();
2525 /* Never force constant_p_rtx to memory. */
2526 if (GET_CODE (y) == CONSTANT_P_RTX)
2528 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2529 y = force_const_mem (mode, y);
2531 /* If X or Y are memory references, verify that their addresses are valid
2532 for the machine. */
2533 if (GET_CODE (x) == MEM
2534 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2535 && ! push_operand (x, GET_MODE (x)))
2536 || (flag_force_addr
2537 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2538 x = change_address (x, VOIDmode, XEXP (x, 0));
2540 if (GET_CODE (y) == MEM
2541 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2542 || (flag_force_addr
2543 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2544 y = change_address (y, VOIDmode, XEXP (y, 0));
2546 if (mode == BLKmode)
2547 abort ();
2549 return emit_move_insn_1 (x, y);
2552 /* Low level part of emit_move_insn.
2553 Called just like emit_move_insn, but assumes X and Y
2554 are basically valid. */
2557 emit_move_insn_1 (x, y)
2558 rtx x, y;
2560 enum machine_mode mode = GET_MODE (x);
2561 enum machine_mode submode;
2562 enum mode_class class = GET_MODE_CLASS (mode);
2563 int i;
2565 if (mode >= MAX_MACHINE_MODE)
2566 abort ();
2568 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2569 return
2570 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2572 /* Expand complex moves by moving real part and imag part, if possible. */
2573 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2574 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2575 * BITS_PER_UNIT),
2576 (class == MODE_COMPLEX_INT
2577 ? MODE_INT : MODE_FLOAT),
2579 && (mov_optab->handlers[(int) submode].insn_code
2580 != CODE_FOR_nothing))
2582 /* Don't split destination if it is a stack push. */
2583 int stack = push_operand (x, GET_MODE (x));
2585 /* If this is a stack, push the highpart first, so it
2586 will be in the argument order.
2588 In that case, change_address is used only to convert
2589 the mode, not to change the address. */
2590 if (stack)
2592 /* Note that the real part always precedes the imag part in memory
2593 regardless of machine's endianness. */
2594 #ifdef STACK_GROWS_DOWNWARD
2595 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2596 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2597 gen_imagpart (submode, y)));
2598 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2599 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2600 gen_realpart (submode, y)));
2601 #else
2602 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2603 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2604 gen_realpart (submode, y)));
2605 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2606 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2607 gen_imagpart (submode, y)));
2608 #endif
2610 else
2612 /* Show the output dies here. This is necessary for pseudos;
2613 hard regs shouldn't appear here except as return values.
2614 We never want to emit such a clobber after reload. */
2615 if (x != y
2616 && ! (reload_in_progress || reload_completed))
2618 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2621 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2622 (gen_realpart (submode, x), gen_realpart (submode, y)));
2623 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2624 (gen_imagpart (submode, x), gen_imagpart (submode, y)));
2627 return get_last_insn ();
2630 /* This will handle any multi-word mode that lacks a move_insn pattern.
2631 However, you will get better code if you define such patterns,
2632 even if they must turn into multiple assembler instructions. */
2633 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2635 rtx last_insn = 0;
2637 #ifdef PUSH_ROUNDING
2639 /* If X is a push on the stack, do the push now and replace
2640 X with a reference to the stack pointer. */
2641 if (push_operand (x, GET_MODE (x)))
2643 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2644 x = change_address (x, VOIDmode, stack_pointer_rtx);
2646 #endif
2648 /* Show the output dies here. This is necessary for pseudos;
2649 hard regs shouldn't appear here except as return values.
2650 We never want to emit such a clobber after reload. */
2651 if (x != y
2652 && ! (reload_in_progress || reload_completed))
2654 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2657 for (i = 0;
2658 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2659 i++)
2661 rtx xpart = operand_subword (x, i, 1, mode);
2662 rtx ypart = operand_subword (y, i, 1, mode);
2664 /* If we can't get a part of Y, put Y into memory if it is a
2665 constant. Otherwise, force it into a register. If we still
2666 can't get a part of Y, abort. */
2667 if (ypart == 0 && CONSTANT_P (y))
2669 y = force_const_mem (mode, y);
2670 ypart = operand_subword (y, i, 1, mode);
2672 else if (ypart == 0)
2673 ypart = operand_subword_force (y, i, mode);
2675 if (xpart == 0 || ypart == 0)
2676 abort ();
2678 last_insn = emit_move_insn (xpart, ypart);
2681 return last_insn;
2683 else
2684 abort ();
2687 /* Pushing data onto the stack. */
2689 /* Push a block of length SIZE (perhaps variable)
2690 and return an rtx to address the beginning of the block.
2691 Note that it is not possible for the value returned to be a QUEUED.
2692 The value may be virtual_outgoing_args_rtx.
2694 EXTRA is the number of bytes of padding to push in addition to SIZE.
2695 BELOW nonzero means this padding comes at low addresses;
2696 otherwise, the padding comes at high addresses. */
2699 push_block (size, extra, below)
2700 rtx size;
2701 int extra, below;
2703 register rtx temp;
2705 size = convert_modes (Pmode, ptr_mode, size, 1);
2706 if (CONSTANT_P (size))
2707 anti_adjust_stack (plus_constant (size, extra));
2708 else if (GET_CODE (size) == REG && extra == 0)
2709 anti_adjust_stack (size);
2710 else
2712 rtx temp = copy_to_mode_reg (Pmode, size);
2713 if (extra != 0)
2714 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2715 temp, 0, OPTAB_LIB_WIDEN);
2716 anti_adjust_stack (temp);
2719 #if defined (STACK_GROWS_DOWNWARD) \
2720 || (defined (ARGS_GROW_DOWNWARD) \
2721 && !defined (ACCUMULATE_OUTGOING_ARGS))
2723 /* Return the lowest stack address when STACK or ARGS grow downward and
2724 we are not aaccumulating outgoing arguments (the c4x port uses such
2725 conventions). */
2726 temp = virtual_outgoing_args_rtx;
2727 if (extra != 0 && below)
2728 temp = plus_constant (temp, extra);
2729 #else
2730 if (GET_CODE (size) == CONST_INT)
2731 temp = plus_constant (virtual_outgoing_args_rtx,
2732 - INTVAL (size) - (below ? 0 : extra));
2733 else if (extra != 0 && !below)
2734 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2735 negate_rtx (Pmode, plus_constant (size, extra)));
2736 else
2737 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2738 negate_rtx (Pmode, size));
2739 #endif
2741 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2745 gen_push_operand ()
2747 return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2750 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2751 block of SIZE bytes. */
2753 static rtx
2754 get_push_address (size)
2755 int size;
2757 register rtx temp;
2759 if (STACK_PUSH_CODE == POST_DEC)
2760 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2761 else if (STACK_PUSH_CODE == POST_INC)
2762 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2763 else
2764 temp = stack_pointer_rtx;
2766 return copy_to_reg (temp);
2769 /* Generate code to push X onto the stack, assuming it has mode MODE and
2770 type TYPE.
2771 MODE is redundant except when X is a CONST_INT (since they don't
2772 carry mode info).
2773 SIZE is an rtx for the size of data to be copied (in bytes),
2774 needed only if X is BLKmode.
2776 ALIGN (in bytes) is maximum alignment we can assume.
2778 If PARTIAL and REG are both nonzero, then copy that many of the first
2779 words of X into registers starting with REG, and push the rest of X.
2780 The amount of space pushed is decreased by PARTIAL words,
2781 rounded *down* to a multiple of PARM_BOUNDARY.
2782 REG must be a hard register in this case.
2783 If REG is zero but PARTIAL is not, take any all others actions for an
2784 argument partially in registers, but do not actually load any
2785 registers.
2787 EXTRA is the amount in bytes of extra space to leave next to this arg.
2788 This is ignored if an argument block has already been allocated.
2790 On a machine that lacks real push insns, ARGS_ADDR is the address of
2791 the bottom of the argument block for this call. We use indexing off there
2792 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2793 argument block has not been preallocated.
2795 ARGS_SO_FAR is the size of args previously pushed for this call.
2797 REG_PARM_STACK_SPACE is nonzero if functions require stack space
2798 for arguments passed in registers. If nonzero, it will be the number
2799 of bytes required. */
2801 void
2802 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2803 args_addr, args_so_far, reg_parm_stack_space)
2804 register rtx x;
2805 enum machine_mode mode;
2806 tree type;
2807 rtx size;
2808 int align;
2809 int partial;
2810 rtx reg;
2811 int extra;
2812 rtx args_addr;
2813 rtx args_so_far;
2814 int reg_parm_stack_space;
2816 rtx xinner;
2817 enum direction stack_direction
2818 #ifdef STACK_GROWS_DOWNWARD
2819 = downward;
2820 #else
2821 = upward;
2822 #endif
2824 /* Decide where to pad the argument: `downward' for below,
2825 `upward' for above, or `none' for don't pad it.
2826 Default is below for small data on big-endian machines; else above. */
2827 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2829 /* Invert direction if stack is post-update. */
2830 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2831 if (where_pad != none)
2832 where_pad = (where_pad == downward ? upward : downward);
2834 xinner = x = protect_from_queue (x, 0);
2836 if (mode == BLKmode)
2838 /* Copy a block into the stack, entirely or partially. */
2840 register rtx temp;
2841 int used = partial * UNITS_PER_WORD;
2842 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2843 int skip;
2845 if (size == 0)
2846 abort ();
2848 used -= offset;
2850 /* USED is now the # of bytes we need not copy to the stack
2851 because registers will take care of them. */
2853 if (partial != 0)
2854 xinner = change_address (xinner, BLKmode,
2855 plus_constant (XEXP (xinner, 0), used));
2857 /* If the partial register-part of the arg counts in its stack size,
2858 skip the part of stack space corresponding to the registers.
2859 Otherwise, start copying to the beginning of the stack space,
2860 by setting SKIP to 0. */
2861 skip = (reg_parm_stack_space == 0) ? 0 : used;
2863 #ifdef PUSH_ROUNDING
2864 /* Do it with several push insns if that doesn't take lots of insns
2865 and if there is no difficulty with push insns that skip bytes
2866 on the stack for alignment purposes. */
2867 if (args_addr == 0
2868 && GET_CODE (size) == CONST_INT
2869 && skip == 0
2870 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
2871 /* Here we avoid the case of a structure whose weak alignment
2872 forces many pushes of a small amount of data,
2873 and such small pushes do rounding that causes trouble. */
2874 && ((! SLOW_UNALIGNED_ACCESS)
2875 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
2876 || PUSH_ROUNDING (align) == align)
2877 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2879 /* Push padding now if padding above and stack grows down,
2880 or if padding below and stack grows up.
2881 But if space already allocated, this has already been done. */
2882 if (extra && args_addr == 0
2883 && where_pad != none && where_pad != stack_direction)
2884 anti_adjust_stack (GEN_INT (extra));
2886 move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
2887 INTVAL (size) - used, align);
2889 if (current_function_check_memory_usage && ! in_check_memory_usage)
2891 rtx temp;
2893 in_check_memory_usage = 1;
2894 temp = get_push_address (INTVAL(size) - used);
2895 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
2896 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2897 temp, Pmode,
2898 XEXP (xinner, 0), Pmode,
2899 GEN_INT (INTVAL(size) - used),
2900 TYPE_MODE (sizetype));
2901 else
2902 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2903 temp, Pmode,
2904 GEN_INT (INTVAL(size) - used),
2905 TYPE_MODE (sizetype),
2906 GEN_INT (MEMORY_USE_RW),
2907 TYPE_MODE (integer_type_node));
2908 in_check_memory_usage = 0;
2911 else
2912 #endif /* PUSH_ROUNDING */
2914 /* Otherwise make space on the stack and copy the data
2915 to the address of that space. */
2917 /* Deduct words put into registers from the size we must copy. */
2918 if (partial != 0)
2920 if (GET_CODE (size) == CONST_INT)
2921 size = GEN_INT (INTVAL (size) - used);
2922 else
2923 size = expand_binop (GET_MODE (size), sub_optab, size,
2924 GEN_INT (used), NULL_RTX, 0,
2925 OPTAB_LIB_WIDEN);
2928 /* Get the address of the stack space.
2929 In this case, we do not deal with EXTRA separately.
2930 A single stack adjust will do. */
2931 if (! args_addr)
2933 temp = push_block (size, extra, where_pad == downward);
2934 extra = 0;
2936 else if (GET_CODE (args_so_far) == CONST_INT)
2937 temp = memory_address (BLKmode,
2938 plus_constant (args_addr,
2939 skip + INTVAL (args_so_far)));
2940 else
2941 temp = memory_address (BLKmode,
2942 plus_constant (gen_rtx_PLUS (Pmode,
2943 args_addr,
2944 args_so_far),
2945 skip));
2946 if (current_function_check_memory_usage && ! in_check_memory_usage)
2948 rtx target;
2950 in_check_memory_usage = 1;
2951 target = copy_to_reg (temp);
2952 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
2953 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2954 target, Pmode,
2955 XEXP (xinner, 0), Pmode,
2956 size, TYPE_MODE (sizetype));
2957 else
2958 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2959 target, Pmode,
2960 size, TYPE_MODE (sizetype),
2961 GEN_INT (MEMORY_USE_RW),
2962 TYPE_MODE (integer_type_node));
2963 in_check_memory_usage = 0;
2966 /* TEMP is the address of the block. Copy the data there. */
2967 if (GET_CODE (size) == CONST_INT
2968 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align)))
2970 move_by_pieces (gen_rtx_MEM (BLKmode, temp), xinner,
2971 INTVAL (size), align);
2972 goto ret;
2974 else
2976 rtx opalign = GEN_INT (align);
2977 enum machine_mode mode;
2978 rtx target = gen_rtx_MEM (BLKmode, temp);
2980 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2981 mode != VOIDmode;
2982 mode = GET_MODE_WIDER_MODE (mode))
2984 enum insn_code code = movstr_optab[(int) mode];
2985 insn_operand_predicate_fn pred;
2987 if (code != CODE_FOR_nothing
2988 && ((GET_CODE (size) == CONST_INT
2989 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2990 <= (GET_MODE_MASK (mode) >> 1)))
2991 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2992 && (!(pred = insn_data[(int) code].operand[0].predicate)
2993 || ((*pred) (target, BLKmode)))
2994 && (!(pred = insn_data[(int) code].operand[1].predicate)
2995 || ((*pred) (xinner, BLKmode)))
2996 && (!(pred = insn_data[(int) code].operand[3].predicate)
2997 || ((*pred) (opalign, VOIDmode))))
2999 rtx op2 = convert_to_mode (mode, size, 1);
3000 rtx last = get_last_insn ();
3001 rtx pat;
3003 pred = insn_data[(int) code].operand[2].predicate;
3004 if (pred != 0 && ! (*pred) (op2, mode))
3005 op2 = copy_to_mode_reg (mode, op2);
3007 pat = GEN_FCN ((int) code) (target, xinner,
3008 op2, opalign);
3009 if (pat)
3011 emit_insn (pat);
3012 goto ret;
3014 else
3015 delete_insns_since (last);
3020 #ifndef ACCUMULATE_OUTGOING_ARGS
3021 /* If the source is referenced relative to the stack pointer,
3022 copy it to another register to stabilize it. We do not need
3023 to do this if we know that we won't be changing sp. */
3025 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3026 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3027 temp = copy_to_reg (temp);
3028 #endif
3030 /* Make inhibit_defer_pop nonzero around the library call
3031 to force it to pop the bcopy-arguments right away. */
3032 NO_DEFER_POP;
3033 #ifdef TARGET_MEM_FUNCTIONS
3034 emit_library_call (memcpy_libfunc, 0,
3035 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3036 convert_to_mode (TYPE_MODE (sizetype),
3037 size, TREE_UNSIGNED (sizetype)),
3038 TYPE_MODE (sizetype));
3039 #else
3040 emit_library_call (bcopy_libfunc, 0,
3041 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3042 convert_to_mode (TYPE_MODE (integer_type_node),
3043 size,
3044 TREE_UNSIGNED (integer_type_node)),
3045 TYPE_MODE (integer_type_node));
3046 #endif
3047 OK_DEFER_POP;
3050 else if (partial > 0)
3052 /* Scalar partly in registers. */
3054 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3055 int i;
3056 int not_stack;
3057 /* # words of start of argument
3058 that we must make space for but need not store. */
3059 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3060 int args_offset = INTVAL (args_so_far);
3061 int skip;
3063 /* Push padding now if padding above and stack grows down,
3064 or if padding below and stack grows up.
3065 But if space already allocated, this has already been done. */
3066 if (extra && args_addr == 0
3067 && where_pad != none && where_pad != stack_direction)
3068 anti_adjust_stack (GEN_INT (extra));
3070 /* If we make space by pushing it, we might as well push
3071 the real data. Otherwise, we can leave OFFSET nonzero
3072 and leave the space uninitialized. */
3073 if (args_addr == 0)
3074 offset = 0;
3076 /* Now NOT_STACK gets the number of words that we don't need to
3077 allocate on the stack. */
3078 not_stack = partial - offset;
3080 /* If the partial register-part of the arg counts in its stack size,
3081 skip the part of stack space corresponding to the registers.
3082 Otherwise, start copying to the beginning of the stack space,
3083 by setting SKIP to 0. */
3084 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3086 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3087 x = validize_mem (force_const_mem (mode, x));
3089 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3090 SUBREGs of such registers are not allowed. */
3091 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3092 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3093 x = copy_to_reg (x);
3095 /* Loop over all the words allocated on the stack for this arg. */
3096 /* We can do it by words, because any scalar bigger than a word
3097 has a size a multiple of a word. */
3098 #ifndef PUSH_ARGS_REVERSED
3099 for (i = not_stack; i < size; i++)
3100 #else
3101 for (i = size - 1; i >= not_stack; i--)
3102 #endif
3103 if (i >= not_stack + offset)
3104 emit_push_insn (operand_subword_force (x, i, mode),
3105 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3106 0, args_addr,
3107 GEN_INT (args_offset + ((i - not_stack + skip)
3108 * UNITS_PER_WORD)),
3109 reg_parm_stack_space);
3111 else
3113 rtx addr;
3114 rtx target = NULL_RTX;
3116 /* Push padding now if padding above and stack grows down,
3117 or if padding below and stack grows up.
3118 But if space already allocated, this has already been done. */
3119 if (extra && args_addr == 0
3120 && where_pad != none && where_pad != stack_direction)
3121 anti_adjust_stack (GEN_INT (extra));
3123 #ifdef PUSH_ROUNDING
3124 if (args_addr == 0)
3125 addr = gen_push_operand ();
3126 else
3127 #endif
3129 if (GET_CODE (args_so_far) == CONST_INT)
3130 addr
3131 = memory_address (mode,
3132 plus_constant (args_addr,
3133 INTVAL (args_so_far)));
3134 else
3135 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3136 args_so_far));
3137 target = addr;
3140 emit_move_insn (gen_rtx_MEM (mode, addr), x);
3142 if (current_function_check_memory_usage && ! in_check_memory_usage)
3144 in_check_memory_usage = 1;
3145 if (target == 0)
3146 target = get_push_address (GET_MODE_SIZE (mode));
3148 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3149 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3150 target, Pmode,
3151 XEXP (x, 0), Pmode,
3152 GEN_INT (GET_MODE_SIZE (mode)),
3153 TYPE_MODE (sizetype));
3154 else
3155 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3156 target, Pmode,
3157 GEN_INT (GET_MODE_SIZE (mode)),
3158 TYPE_MODE (sizetype),
3159 GEN_INT (MEMORY_USE_RW),
3160 TYPE_MODE (integer_type_node));
3161 in_check_memory_usage = 0;
3165 ret:
3166 /* If part should go in registers, copy that part
3167 into the appropriate registers. Do this now, at the end,
3168 since mem-to-mem copies above may do function calls. */
3169 if (partial > 0 && reg != 0)
3171 /* Handle calls that pass values in multiple non-contiguous locations.
3172 The Irix 6 ABI has examples of this. */
3173 if (GET_CODE (reg) == PARALLEL)
3174 emit_group_load (reg, x, -1, align); /* ??? size? */
3175 else
3176 move_block_to_reg (REGNO (reg), x, partial, mode);
3179 if (extra && args_addr == 0 && where_pad == stack_direction)
3180 anti_adjust_stack (GEN_INT (extra));
3183 /* Expand an assignment that stores the value of FROM into TO.
3184 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3185 (This may contain a QUEUED rtx;
3186 if the value is constant, this rtx is a constant.)
3187 Otherwise, the returned value is NULL_RTX.
3189 SUGGEST_REG is no longer actually used.
3190 It used to mean, copy the value through a register
3191 and return that register, if that is possible.
3192 We now use WANT_VALUE to decide whether to do this. */
3195 expand_assignment (to, from, want_value, suggest_reg)
3196 tree to, from;
3197 int want_value;
3198 int suggest_reg ATTRIBUTE_UNUSED;
3200 register rtx to_rtx = 0;
3201 rtx result;
3203 /* Don't crash if the lhs of the assignment was erroneous. */
3205 if (TREE_CODE (to) == ERROR_MARK)
3207 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3208 return want_value ? result : NULL_RTX;
3211 /* Assignment of a structure component needs special treatment
3212 if the structure component's rtx is not simply a MEM.
3213 Assignment of an array element at a constant index, and assignment of
3214 an array element in an unaligned packed structure field, has the same
3215 problem. */
3217 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3218 || TREE_CODE (to) == ARRAY_REF)
3220 enum machine_mode mode1;
3221 int bitsize;
3222 int bitpos;
3223 tree offset;
3224 int unsignedp;
3225 int volatilep = 0;
3226 tree tem;
3227 int alignment;
3229 push_temp_slots ();
3230 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3231 &unsignedp, &volatilep, &alignment);
3233 /* If we are going to use store_bit_field and extract_bit_field,
3234 make sure to_rtx will be safe for multiple use. */
3236 if (mode1 == VOIDmode && want_value)
3237 tem = stabilize_reference (tem);
3239 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3240 if (offset != 0)
3242 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3244 if (GET_CODE (to_rtx) != MEM)
3245 abort ();
3247 if (GET_MODE (offset_rtx) != ptr_mode)
3249 #ifdef POINTERS_EXTEND_UNSIGNED
3250 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
3251 #else
3252 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3253 #endif
3256 /* A constant address in TO_RTX can have VOIDmode, we must not try
3257 to call force_reg for that case. Avoid that case. */
3258 if (GET_CODE (to_rtx) == MEM
3259 && GET_MODE (to_rtx) == BLKmode
3260 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3261 && bitsize
3262 && (bitpos % bitsize) == 0
3263 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3264 && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
3266 rtx temp = change_address (to_rtx, mode1,
3267 plus_constant (XEXP (to_rtx, 0),
3268 (bitpos /
3269 BITS_PER_UNIT)));
3270 if (GET_CODE (XEXP (temp, 0)) == REG)
3271 to_rtx = temp;
3272 else
3273 to_rtx = change_address (to_rtx, mode1,
3274 force_reg (GET_MODE (XEXP (temp, 0)),
3275 XEXP (temp, 0)));
3276 bitpos = 0;
3279 to_rtx = change_address (to_rtx, VOIDmode,
3280 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3281 force_reg (ptr_mode,
3282 offset_rtx)));
3285 if (volatilep)
3287 if (GET_CODE (to_rtx) == MEM)
3289 /* When the offset is zero, to_rtx is the address of the
3290 structure we are storing into, and hence may be shared.
3291 We must make a new MEM before setting the volatile bit. */
3292 if (offset == 0)
3293 to_rtx = copy_rtx (to_rtx);
3295 MEM_VOLATILE_P (to_rtx) = 1;
3297 #if 0 /* This was turned off because, when a field is volatile
3298 in an object which is not volatile, the object may be in a register,
3299 and then we would abort over here. */
3300 else
3301 abort ();
3302 #endif
3305 if (TREE_CODE (to) == COMPONENT_REF
3306 && TREE_READONLY (TREE_OPERAND (to, 1)))
3308 if (offset == 0)
3309 to_rtx = copy_rtx (to_rtx);
3311 RTX_UNCHANGING_P (to_rtx) = 1;
3314 /* Check the access. */
3315 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3317 rtx to_addr;
3318 int size;
3319 int best_mode_size;
3320 enum machine_mode best_mode;
3322 best_mode = get_best_mode (bitsize, bitpos,
3323 TYPE_ALIGN (TREE_TYPE (tem)),
3324 mode1, volatilep);
3325 if (best_mode == VOIDmode)
3326 best_mode = QImode;
3328 best_mode_size = GET_MODE_BITSIZE (best_mode);
3329 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3330 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3331 size *= GET_MODE_SIZE (best_mode);
3333 /* Check the access right of the pointer. */
3334 if (size)
3335 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3336 to_addr, Pmode,
3337 GEN_INT (size), TYPE_MODE (sizetype),
3338 GEN_INT (MEMORY_USE_WO),
3339 TYPE_MODE (integer_type_node));
3342 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3343 (want_value
3344 /* Spurious cast makes HPUX compiler happy. */
3345 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
3346 : VOIDmode),
3347 unsignedp,
3348 /* Required alignment of containing datum. */
3349 alignment,
3350 int_size_in_bytes (TREE_TYPE (tem)),
3351 get_alias_set (to));
3352 preserve_temp_slots (result);
3353 free_temp_slots ();
3354 pop_temp_slots ();
3356 /* If the value is meaningful, convert RESULT to the proper mode.
3357 Otherwise, return nothing. */
3358 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3359 TYPE_MODE (TREE_TYPE (from)),
3360 result,
3361 TREE_UNSIGNED (TREE_TYPE (to)))
3362 : NULL_RTX);
3365 /* If the rhs is a function call and its value is not an aggregate,
3366 call the function before we start to compute the lhs.
3367 This is needed for correct code for cases such as
3368 val = setjmp (buf) on machines where reference to val
3369 requires loading up part of an address in a separate insn.
3371 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
3372 a promoted variable where the zero- or sign- extension needs to be done.
3373 Handling this in the normal way is safe because no computation is done
3374 before the call. */
3375 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3376 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3377 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
3379 rtx value;
3381 push_temp_slots ();
3382 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3383 if (to_rtx == 0)
3384 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3386 /* Handle calls that return values in multiple non-contiguous locations.
3387 The Irix 6 ABI has examples of this. */
3388 if (GET_CODE (to_rtx) == PARALLEL)
3389 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3390 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3391 else if (GET_MODE (to_rtx) == BLKmode)
3392 emit_block_move (to_rtx, value, expr_size (from),
3393 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3394 else
3396 #ifdef POINTERS_EXTEND_UNSIGNED
3397 if (TREE_CODE (TREE_TYPE (to)) == REFERENCE_TYPE
3398 || TREE_CODE (TREE_TYPE (to)) == POINTER_TYPE)
3399 value = convert_memory_address (GET_MODE (to_rtx), value);
3400 #endif
3401 emit_move_insn (to_rtx, value);
3403 preserve_temp_slots (to_rtx);
3404 free_temp_slots ();
3405 pop_temp_slots ();
3406 return want_value ? to_rtx : NULL_RTX;
3409 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3410 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3412 if (to_rtx == 0)
3414 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3415 if (GET_CODE (to_rtx) == MEM)
3416 MEM_ALIAS_SET (to_rtx) = get_alias_set (to);
3419 /* Don't move directly into a return register. */
3420 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
3422 rtx temp;
3424 push_temp_slots ();
3425 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3426 emit_move_insn (to_rtx, temp);
3427 preserve_temp_slots (to_rtx);
3428 free_temp_slots ();
3429 pop_temp_slots ();
3430 return want_value ? to_rtx : NULL_RTX;
3433 /* In case we are returning the contents of an object which overlaps
3434 the place the value is being stored, use a safe function when copying
3435 a value through a pointer into a structure value return block. */
3436 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3437 && current_function_returns_struct
3438 && !current_function_returns_pcc_struct)
3440 rtx from_rtx, size;
3442 push_temp_slots ();
3443 size = expr_size (from);
3444 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3445 EXPAND_MEMORY_USE_DONT);
3447 /* Copy the rights of the bitmap. */
3448 if (current_function_check_memory_usage)
3449 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3450 XEXP (to_rtx, 0), Pmode,
3451 XEXP (from_rtx, 0), Pmode,
3452 convert_to_mode (TYPE_MODE (sizetype),
3453 size, TREE_UNSIGNED (sizetype)),
3454 TYPE_MODE (sizetype));
3456 #ifdef TARGET_MEM_FUNCTIONS
3457 emit_library_call (memcpy_libfunc, 0,
3458 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3459 XEXP (from_rtx, 0), Pmode,
3460 convert_to_mode (TYPE_MODE (sizetype),
3461 size, TREE_UNSIGNED (sizetype)),
3462 TYPE_MODE (sizetype));
3463 #else
3464 emit_library_call (bcopy_libfunc, 0,
3465 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3466 XEXP (to_rtx, 0), Pmode,
3467 convert_to_mode (TYPE_MODE (integer_type_node),
3468 size, TREE_UNSIGNED (integer_type_node)),
3469 TYPE_MODE (integer_type_node));
3470 #endif
3472 preserve_temp_slots (to_rtx);
3473 free_temp_slots ();
3474 pop_temp_slots ();
3475 return want_value ? to_rtx : NULL_RTX;
3478 /* Compute FROM and store the value in the rtx we got. */
3480 push_temp_slots ();
3481 result = store_expr (from, to_rtx, want_value);
3482 preserve_temp_slots (result);
3483 free_temp_slots ();
3484 pop_temp_slots ();
3485 return want_value ? result : NULL_RTX;
3488 /* Generate code for computing expression EXP,
3489 and storing the value into TARGET.
3490 TARGET may contain a QUEUED rtx.
3492 If WANT_VALUE is nonzero, return a copy of the value
3493 not in TARGET, so that we can be sure to use the proper
3494 value in a containing expression even if TARGET has something
3495 else stored in it. If possible, we copy the value through a pseudo
3496 and return that pseudo. Or, if the value is constant, we try to
3497 return the constant. In some cases, we return a pseudo
3498 copied *from* TARGET.
3500 If the mode is BLKmode then we may return TARGET itself.
3501 It turns out that in BLKmode it doesn't cause a problem.
3502 because C has no operators that could combine two different
3503 assignments into the same BLKmode object with different values
3504 with no sequence point. Will other languages need this to
3505 be more thorough?
3507 If WANT_VALUE is 0, we return NULL, to make sure
3508 to catch quickly any cases where the caller uses the value
3509 and fails to set WANT_VALUE. */
3512 store_expr (exp, target, want_value)
3513 register tree exp;
3514 register rtx target;
3515 int want_value;
3517 register rtx temp;
3518 int dont_return_target = 0;
3520 if (TREE_CODE (exp) == COMPOUND_EXPR)
3522 /* Perform first part of compound expression, then assign from second
3523 part. */
3524 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3525 emit_queue ();
3526 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3528 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3530 /* For conditional expression, get safe form of the target. Then
3531 test the condition, doing the appropriate assignment on either
3532 side. This avoids the creation of unnecessary temporaries.
3533 For non-BLKmode, it is more efficient not to do this. */
3535 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3537 emit_queue ();
3538 target = protect_from_queue (target, 1);
3540 do_pending_stack_adjust ();
3541 NO_DEFER_POP;
3542 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3543 start_cleanup_deferral ();
3544 store_expr (TREE_OPERAND (exp, 1), target, 0);
3545 end_cleanup_deferral ();
3546 emit_queue ();
3547 emit_jump_insn (gen_jump (lab2));
3548 emit_barrier ();
3549 emit_label (lab1);
3550 start_cleanup_deferral ();
3551 store_expr (TREE_OPERAND (exp, 2), target, 0);
3552 end_cleanup_deferral ();
3553 emit_queue ();
3554 emit_label (lab2);
3555 OK_DEFER_POP;
3557 return want_value ? target : NULL_RTX;
3559 else if (queued_subexp_p (target))
3560 /* If target contains a postincrement, let's not risk
3561 using it as the place to generate the rhs. */
3563 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3565 /* Expand EXP into a new pseudo. */
3566 temp = gen_reg_rtx (GET_MODE (target));
3567 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3569 else
3570 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3572 /* If target is volatile, ANSI requires accessing the value
3573 *from* the target, if it is accessed. So make that happen.
3574 In no case return the target itself. */
3575 if (! MEM_VOLATILE_P (target) && want_value)
3576 dont_return_target = 1;
3578 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3579 && GET_MODE (target) != BLKmode)
3580 /* If target is in memory and caller wants value in a register instead,
3581 arrange that. Pass TARGET as target for expand_expr so that,
3582 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3583 We know expand_expr will not use the target in that case.
3584 Don't do this if TARGET is volatile because we are supposed
3585 to write it and then read it. */
3587 temp = expand_expr (exp, target, GET_MODE (target), 0);
3588 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3589 temp = copy_to_reg (temp);
3590 dont_return_target = 1;
3592 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3593 /* If this is an scalar in a register that is stored in a wider mode
3594 than the declared mode, compute the result into its declared mode
3595 and then convert to the wider mode. Our value is the computed
3596 expression. */
3598 /* If we don't want a value, we can do the conversion inside EXP,
3599 which will often result in some optimizations. Do the conversion
3600 in two steps: first change the signedness, if needed, then
3601 the extend. But don't do this if the type of EXP is a subtype
3602 of something else since then the conversion might involve
3603 more than just converting modes. */
3604 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3605 && TREE_TYPE (TREE_TYPE (exp)) == 0)
3607 if (TREE_UNSIGNED (TREE_TYPE (exp))
3608 != SUBREG_PROMOTED_UNSIGNED_P (target))
3610 = convert
3611 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3612 TREE_TYPE (exp)),
3613 exp);
3615 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3616 SUBREG_PROMOTED_UNSIGNED_P (target)),
3617 exp);
3620 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3622 /* If TEMP is a volatile MEM and we want a result value, make
3623 the access now so it gets done only once. Likewise if
3624 it contains TARGET. */
3625 if (GET_CODE (temp) == MEM && want_value
3626 && (MEM_VOLATILE_P (temp)
3627 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
3628 temp = copy_to_reg (temp);
3630 /* If TEMP is a VOIDmode constant, use convert_modes to make
3631 sure that we properly convert it. */
3632 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3633 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3634 TYPE_MODE (TREE_TYPE (exp)), temp,
3635 SUBREG_PROMOTED_UNSIGNED_P (target));
3637 convert_move (SUBREG_REG (target), temp,
3638 SUBREG_PROMOTED_UNSIGNED_P (target));
3640 /* If we promoted a constant, change the mode back down to match
3641 target. Otherwise, the caller might get confused by a result whose
3642 mode is larger than expected. */
3644 if (want_value && GET_MODE (temp) != GET_MODE (target)
3645 && GET_MODE (temp) != VOIDmode)
3647 temp = gen_rtx_SUBREG (GET_MODE (target), temp, 0);
3648 SUBREG_PROMOTED_VAR_P (temp) = 1;
3649 SUBREG_PROMOTED_UNSIGNED_P (temp)
3650 = SUBREG_PROMOTED_UNSIGNED_P (target);
3653 return want_value ? temp : NULL_RTX;
3655 else
3657 temp = expand_expr (exp, target, GET_MODE (target), 0);
3658 /* Return TARGET if it's a specified hardware register.
3659 If TARGET is a volatile mem ref, either return TARGET
3660 or return a reg copied *from* TARGET; ANSI requires this.
3662 Otherwise, if TEMP is not TARGET, return TEMP
3663 if it is constant (for efficiency),
3664 or if we really want the correct value. */
3665 if (!(target && GET_CODE (target) == REG
3666 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3667 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3668 && ! rtx_equal_p (temp, target)
3669 && (CONSTANT_P (temp) || want_value))
3670 dont_return_target = 1;
3673 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3674 the same as that of TARGET, adjust the constant. This is needed, for
3675 example, in case it is a CONST_DOUBLE and we want only a word-sized
3676 value. */
3677 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3678 && TREE_CODE (exp) != ERROR_MARK
3679 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3680 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3681 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3683 if (current_function_check_memory_usage
3684 && GET_CODE (target) == MEM
3685 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
3687 if (GET_CODE (temp) == MEM)
3688 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3689 XEXP (target, 0), Pmode,
3690 XEXP (temp, 0), Pmode,
3691 expr_size (exp), TYPE_MODE (sizetype));
3692 else
3693 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3694 XEXP (target, 0), Pmode,
3695 expr_size (exp), TYPE_MODE (sizetype),
3696 GEN_INT (MEMORY_USE_WO),
3697 TYPE_MODE (integer_type_node));
3700 /* If value was not generated in the target, store it there.
3701 Convert the value to TARGET's type first if nec. */
3702 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
3703 one or both of them are volatile memory refs, we have to distinguish
3704 two cases:
3705 - expand_expr has used TARGET. In this case, we must not generate
3706 another copy. This can be detected by TARGET being equal according
3707 to == .
3708 - expand_expr has not used TARGET - that means that the source just
3709 happens to have the same RTX form. Since temp will have been created
3710 by expand_expr, it will compare unequal according to == .
3711 We must generate a copy in this case, to reach the correct number
3712 of volatile memory references. */
3714 if ((! rtx_equal_p (temp, target)
3715 || (temp != target && (side_effects_p (temp)
3716 || side_effects_p (target))))
3717 && TREE_CODE (exp) != ERROR_MARK)
3719 target = protect_from_queue (target, 1);
3720 if (GET_MODE (temp) != GET_MODE (target)
3721 && GET_MODE (temp) != VOIDmode)
3723 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3724 if (dont_return_target)
3726 /* In this case, we will return TEMP,
3727 so make sure it has the proper mode.
3728 But don't forget to store the value into TARGET. */
3729 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3730 emit_move_insn (target, temp);
3732 else
3733 convert_move (target, temp, unsignedp);
3736 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3738 /* Handle copying a string constant into an array.
3739 The string constant may be shorter than the array.
3740 So copy just the string's actual length, and clear the rest. */
3741 rtx size;
3742 rtx addr;
3744 /* Get the size of the data type of the string,
3745 which is actually the size of the target. */
3746 size = expr_size (exp);
3747 if (GET_CODE (size) == CONST_INT
3748 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3749 emit_block_move (target, temp, size,
3750 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3751 else
3753 /* Compute the size of the data to copy from the string. */
3754 tree copy_size
3755 = size_binop (MIN_EXPR,
3756 make_tree (sizetype, size),
3757 convert (sizetype,
3758 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
3759 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3760 VOIDmode, 0);
3761 rtx label = 0;
3763 /* Copy that much. */
3764 emit_block_move (target, temp, copy_size_rtx,
3765 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3767 /* Figure out how much is left in TARGET that we have to clear.
3768 Do all calculations in ptr_mode. */
3770 addr = XEXP (target, 0);
3771 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3773 if (GET_CODE (copy_size_rtx) == CONST_INT)
3775 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
3776 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
3778 else
3780 addr = force_reg (ptr_mode, addr);
3781 addr = expand_binop (ptr_mode, add_optab, addr,
3782 copy_size_rtx, NULL_RTX, 0,
3783 OPTAB_LIB_WIDEN);
3785 size = expand_binop (ptr_mode, sub_optab, size,
3786 copy_size_rtx, NULL_RTX, 0,
3787 OPTAB_LIB_WIDEN);
3789 label = gen_label_rtx ();
3790 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
3791 GET_MODE (size), 0, 0, label);
3794 if (size != const0_rtx)
3796 /* Be sure we can write on ADDR. */
3797 if (current_function_check_memory_usage)
3798 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3799 addr, Pmode,
3800 size, TYPE_MODE (sizetype),
3801 GEN_INT (MEMORY_USE_WO),
3802 TYPE_MODE (integer_type_node));
3803 #ifdef TARGET_MEM_FUNCTIONS
3804 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
3805 addr, ptr_mode,
3806 const0_rtx, TYPE_MODE (integer_type_node),
3807 convert_to_mode (TYPE_MODE (sizetype),
3808 size,
3809 TREE_UNSIGNED (sizetype)),
3810 TYPE_MODE (sizetype));
3811 #else
3812 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
3813 addr, ptr_mode,
3814 convert_to_mode (TYPE_MODE (integer_type_node),
3815 size,
3816 TREE_UNSIGNED (integer_type_node)),
3817 TYPE_MODE (integer_type_node));
3818 #endif
3821 if (label)
3822 emit_label (label);
3825 /* Handle calls that return values in multiple non-contiguous locations.
3826 The Irix 6 ABI has examples of this. */
3827 else if (GET_CODE (target) == PARALLEL)
3828 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
3829 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3830 else if (GET_MODE (temp) == BLKmode)
3831 emit_block_move (target, temp, expr_size (exp),
3832 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3833 else
3834 emit_move_insn (target, temp);
3837 /* If we don't want a value, return NULL_RTX. */
3838 if (! want_value)
3839 return NULL_RTX;
3841 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3842 ??? The latter test doesn't seem to make sense. */
3843 else if (dont_return_target && GET_CODE (temp) != MEM)
3844 return temp;
3846 /* Return TARGET itself if it is a hard register. */
3847 else if (want_value && GET_MODE (target) != BLKmode
3848 && ! (GET_CODE (target) == REG
3849 && REGNO (target) < FIRST_PSEUDO_REGISTER))
3850 return copy_to_reg (target);
3852 else
3853 return target;
3856 /* Return 1 if EXP just contains zeros. */
3858 static int
3859 is_zeros_p (exp)
3860 tree exp;
3862 tree elt;
3864 switch (TREE_CODE (exp))
3866 case CONVERT_EXPR:
3867 case NOP_EXPR:
3868 case NON_LVALUE_EXPR:
3869 return is_zeros_p (TREE_OPERAND (exp, 0));
3871 case INTEGER_CST:
3872 return TREE_INT_CST_LOW (exp) == 0 && TREE_INT_CST_HIGH (exp) == 0;
3874 case COMPLEX_CST:
3875 return
3876 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
3878 case REAL_CST:
3879 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
3881 case CONSTRUCTOR:
3882 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3883 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
3884 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3885 if (! is_zeros_p (TREE_VALUE (elt)))
3886 return 0;
3888 return 1;
3890 default:
3891 return 0;
3895 /* Return 1 if EXP contains mostly (3/4) zeros. */
3897 static int
3898 mostly_zeros_p (exp)
3899 tree exp;
3901 if (TREE_CODE (exp) == CONSTRUCTOR)
3903 int elts = 0, zeros = 0;
3904 tree elt = CONSTRUCTOR_ELTS (exp);
3905 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3907 /* If there are no ranges of true bits, it is all zero. */
3908 return elt == NULL_TREE;
3910 for (; elt; elt = TREE_CHAIN (elt))
3912 /* We do not handle the case where the index is a RANGE_EXPR,
3913 so the statistic will be somewhat inaccurate.
3914 We do make a more accurate count in store_constructor itself,
3915 so since this function is only used for nested array elements,
3916 this should be close enough. */
3917 if (mostly_zeros_p (TREE_VALUE (elt)))
3918 zeros++;
3919 elts++;
3922 return 4 * zeros >= 3 * elts;
3925 return is_zeros_p (exp);
3928 /* Helper function for store_constructor.
3929 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
3930 TYPE is the type of the CONSTRUCTOR, not the element type.
3931 ALIGN and CLEARED are as for store_constructor.
3933 This provides a recursive shortcut back to store_constructor when it isn't
3934 necessary to go through store_field. This is so that we can pass through
3935 the cleared field to let store_constructor know that we may not have to
3936 clear a substructure if the outer structure has already been cleared. */
3938 static void
3939 store_constructor_field (target, bitsize, bitpos,
3940 mode, exp, type, align, cleared)
3941 rtx target;
3942 int bitsize, bitpos;
3943 enum machine_mode mode;
3944 tree exp, type;
3945 int align;
3946 int cleared;
3948 if (TREE_CODE (exp) == CONSTRUCTOR
3949 && bitpos % BITS_PER_UNIT == 0
3950 /* If we have a non-zero bitpos for a register target, then we just
3951 let store_field do the bitfield handling. This is unlikely to
3952 generate unnecessary clear instructions anyways. */
3953 && (bitpos == 0 || GET_CODE (target) == MEM))
3955 if (bitpos != 0)
3956 target = change_address (target, VOIDmode,
3957 plus_constant (XEXP (target, 0),
3958 bitpos / BITS_PER_UNIT));
3959 store_constructor (exp, target, align, cleared);
3961 else
3962 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0,
3963 (align + BITS_PER_UNIT - 1) / BITS_PER_UNIT,
3964 int_size_in_bytes (type), cleared);
3967 /* Store the value of constructor EXP into the rtx TARGET.
3968 TARGET is either a REG or a MEM.
3969 ALIGN is the maximum known alignment for TARGET, in bits.
3970 CLEARED is true if TARGET is known to have been zero'd. */
3972 static void
3973 store_constructor (exp, target, align, cleared)
3974 tree exp;
3975 rtx target;
3976 int align;
3977 int cleared;
3979 tree type = TREE_TYPE (exp);
3980 #ifdef WORD_REGISTER_OPERATIONS
3981 rtx exp_size = expr_size (exp);
3982 #endif
3984 /* We know our target cannot conflict, since safe_from_p has been called. */
3985 #if 0
3986 /* Don't try copying piece by piece into a hard register
3987 since that is vulnerable to being clobbered by EXP.
3988 Instead, construct in a pseudo register and then copy it all. */
3989 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
3991 rtx temp = gen_reg_rtx (GET_MODE (target));
3992 store_constructor (exp, temp, 0);
3993 emit_move_insn (target, temp);
3994 return;
3996 #endif
3998 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
3999 || TREE_CODE (type) == QUAL_UNION_TYPE)
4001 register tree elt;
4003 /* Inform later passes that the whole union value is dead. */
4004 if (TREE_CODE (type) == UNION_TYPE
4005 || TREE_CODE (type) == QUAL_UNION_TYPE)
4006 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4008 /* If we are building a static constructor into a register,
4009 set the initial value as zero so we can fold the value into
4010 a constant. But if more than one register is involved,
4011 this probably loses. */
4012 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4013 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4015 if (! cleared)
4016 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4018 cleared = 1;
4021 /* If the constructor has fewer fields than the structure
4022 or if we are initializing the structure to mostly zeros,
4023 clear the whole structure first. */
4024 else if ((list_length (CONSTRUCTOR_ELTS (exp))
4025 != list_length (TYPE_FIELDS (type)))
4026 || mostly_zeros_p (exp))
4028 if (! cleared)
4029 clear_storage (target, expr_size (exp),
4030 (align + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
4032 cleared = 1;
4034 else
4035 /* Inform later passes that the old value is dead. */
4036 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4038 /* Store each element of the constructor into
4039 the corresponding field of TARGET. */
4041 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4043 register tree field = TREE_PURPOSE (elt);
4044 #ifdef WORD_REGISTER_OPERATIONS
4045 tree value = TREE_VALUE (elt);
4046 #endif
4047 register enum machine_mode mode;
4048 int bitsize;
4049 int bitpos = 0;
4050 int unsignedp;
4051 tree pos, constant = 0, offset = 0;
4052 rtx to_rtx = target;
4054 /* Just ignore missing fields.
4055 We cleared the whole structure, above,
4056 if any fields are missing. */
4057 if (field == 0)
4058 continue;
4060 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4061 continue;
4063 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
4064 unsignedp = TREE_UNSIGNED (field);
4065 mode = DECL_MODE (field);
4066 if (DECL_BIT_FIELD (field))
4067 mode = VOIDmode;
4069 pos = DECL_FIELD_BITPOS (field);
4070 if (TREE_CODE (pos) == INTEGER_CST)
4071 constant = pos;
4072 else if (TREE_CODE (pos) == PLUS_EXPR
4073 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4074 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
4075 else
4076 offset = pos;
4078 if (constant)
4079 bitpos = TREE_INT_CST_LOW (constant);
4081 if (offset)
4083 rtx offset_rtx;
4085 if (contains_placeholder_p (offset))
4086 offset = build (WITH_RECORD_EXPR, sizetype,
4087 offset, make_tree (TREE_TYPE (exp), target));
4089 offset = size_binop (FLOOR_DIV_EXPR, offset,
4090 size_int (BITS_PER_UNIT));
4092 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4093 if (GET_CODE (to_rtx) != MEM)
4094 abort ();
4096 if (GET_MODE (offset_rtx) != ptr_mode)
4098 #ifdef POINTERS_EXTEND_UNSIGNED
4099 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
4100 #else
4101 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4102 #endif
4105 to_rtx
4106 = change_address (to_rtx, VOIDmode,
4107 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
4108 force_reg (ptr_mode,
4109 offset_rtx)));
4112 if (TREE_READONLY (field))
4114 if (GET_CODE (to_rtx) == MEM)
4115 to_rtx = copy_rtx (to_rtx);
4117 RTX_UNCHANGING_P (to_rtx) = 1;
4120 #ifdef WORD_REGISTER_OPERATIONS
4121 /* If this initializes a field that is smaller than a word, at the
4122 start of a word, try to widen it to a full word.
4123 This special case allows us to output C++ member function
4124 initializations in a form that the optimizers can understand. */
4125 if (constant
4126 && GET_CODE (target) == REG
4127 && bitsize < BITS_PER_WORD
4128 && bitpos % BITS_PER_WORD == 0
4129 && GET_MODE_CLASS (mode) == MODE_INT
4130 && TREE_CODE (value) == INTEGER_CST
4131 && GET_CODE (exp_size) == CONST_INT
4132 && bitpos + BITS_PER_WORD <= INTVAL (exp_size) * BITS_PER_UNIT)
4134 tree type = TREE_TYPE (value);
4135 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4137 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4138 value = convert (type, value);
4140 if (BYTES_BIG_ENDIAN)
4141 value
4142 = fold (build (LSHIFT_EXPR, type, value,
4143 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4144 bitsize = BITS_PER_WORD;
4145 mode = word_mode;
4147 #endif
4148 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4149 TREE_VALUE (elt), type,
4150 MIN (align,
4151 DECL_ALIGN (TREE_PURPOSE (elt))),
4152 cleared);
4155 else if (TREE_CODE (type) == ARRAY_TYPE)
4157 register tree elt;
4158 register int i;
4159 int need_to_clear;
4160 tree domain = TYPE_DOMAIN (type);
4161 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
4162 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
4163 tree elttype = TREE_TYPE (type);
4165 /* If the constructor has fewer elements than the array,
4166 clear the whole array first. Similarly if this is
4167 static constructor of a non-BLKmode object. */
4168 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4169 need_to_clear = 1;
4170 else
4172 HOST_WIDE_INT count = 0, zero_count = 0;
4173 need_to_clear = 0;
4174 /* This loop is a more accurate version of the loop in
4175 mostly_zeros_p (it handles RANGE_EXPR in an index).
4176 It is also needed to check for missing elements. */
4177 for (elt = CONSTRUCTOR_ELTS (exp);
4178 elt != NULL_TREE;
4179 elt = TREE_CHAIN (elt))
4181 tree index = TREE_PURPOSE (elt);
4182 HOST_WIDE_INT this_node_count;
4183 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4185 tree lo_index = TREE_OPERAND (index, 0);
4186 tree hi_index = TREE_OPERAND (index, 1);
4187 if (TREE_CODE (lo_index) != INTEGER_CST
4188 || TREE_CODE (hi_index) != INTEGER_CST)
4190 need_to_clear = 1;
4191 break;
4193 this_node_count = TREE_INT_CST_LOW (hi_index)
4194 - TREE_INT_CST_LOW (lo_index) + 1;
4196 else
4197 this_node_count = 1;
4198 count += this_node_count;
4199 if (mostly_zeros_p (TREE_VALUE (elt)))
4200 zero_count += this_node_count;
4202 /* Clear the entire array first if there are any missing elements,
4203 or if the incidence of zero elements is >= 75%. */
4204 if (count < maxelt - minelt + 1
4205 || 4 * zero_count >= 3 * count)
4206 need_to_clear = 1;
4208 if (need_to_clear)
4210 if (! cleared)
4211 clear_storage (target, expr_size (exp),
4212 (align + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
4213 cleared = 1;
4215 else
4216 /* Inform later passes that the old value is dead. */
4217 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4219 /* Store each element of the constructor into
4220 the corresponding element of TARGET, determined
4221 by counting the elements. */
4222 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4223 elt;
4224 elt = TREE_CHAIN (elt), i++)
4226 register enum machine_mode mode;
4227 int bitsize;
4228 int bitpos;
4229 int unsignedp;
4230 tree value = TREE_VALUE (elt);
4231 int align = TYPE_ALIGN (TREE_TYPE (value));
4232 tree index = TREE_PURPOSE (elt);
4233 rtx xtarget = target;
4235 if (cleared && is_zeros_p (value))
4236 continue;
4238 mode = TYPE_MODE (elttype);
4239 bitsize = GET_MODE_BITSIZE (mode);
4240 unsignedp = TREE_UNSIGNED (elttype);
4242 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4244 tree lo_index = TREE_OPERAND (index, 0);
4245 tree hi_index = TREE_OPERAND (index, 1);
4246 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4247 struct nesting *loop;
4248 HOST_WIDE_INT lo, hi, count;
4249 tree position;
4251 /* If the range is constant and "small", unroll the loop. */
4252 if (TREE_CODE (lo_index) == INTEGER_CST
4253 && TREE_CODE (hi_index) == INTEGER_CST
4254 && (lo = TREE_INT_CST_LOW (lo_index),
4255 hi = TREE_INT_CST_LOW (hi_index),
4256 count = hi - lo + 1,
4257 (GET_CODE (target) != MEM
4258 || count <= 2
4259 || (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
4260 && TREE_INT_CST_LOW (TYPE_SIZE (elttype)) * count
4261 <= 40 * 8))))
4263 lo -= minelt; hi -= minelt;
4264 for (; lo <= hi; lo++)
4266 bitpos = lo * TREE_INT_CST_LOW (TYPE_SIZE (elttype));
4267 store_constructor_field (target, bitsize, bitpos, mode,
4268 value, type, align, cleared);
4271 else
4273 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4274 loop_top = gen_label_rtx ();
4275 loop_end = gen_label_rtx ();
4277 unsignedp = TREE_UNSIGNED (domain);
4279 index = build_decl (VAR_DECL, NULL_TREE, domain);
4281 DECL_RTL (index) = index_r
4282 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4283 &unsignedp, 0));
4285 if (TREE_CODE (value) == SAVE_EXPR
4286 && SAVE_EXPR_RTL (value) == 0)
4288 /* Make sure value gets expanded once before the
4289 loop. */
4290 expand_expr (value, const0_rtx, VOIDmode, 0);
4291 emit_queue ();
4293 store_expr (lo_index, index_r, 0);
4294 loop = expand_start_loop (0);
4296 /* Assign value to element index. */
4297 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
4298 size_int (BITS_PER_UNIT));
4299 position = size_binop (MULT_EXPR,
4300 size_binop (MINUS_EXPR, index,
4301 TYPE_MIN_VALUE (domain)),
4302 position);
4303 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4304 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4305 xtarget = change_address (target, mode, addr);
4306 if (TREE_CODE (value) == CONSTRUCTOR)
4307 store_constructor (value, xtarget, align, cleared);
4308 else
4309 store_expr (value, xtarget, 0);
4311 expand_exit_loop_if_false (loop,
4312 build (LT_EXPR, integer_type_node,
4313 index, hi_index));
4315 expand_increment (build (PREINCREMENT_EXPR,
4316 TREE_TYPE (index),
4317 index, integer_one_node), 0, 0);
4318 expand_end_loop ();
4319 emit_label (loop_end);
4321 /* Needed by stupid register allocation. to extend the
4322 lifetime of pseudo-regs used by target past the end
4323 of the loop. */
4324 emit_insn (gen_rtx_USE (GET_MODE (target), target));
4327 else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
4328 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
4330 rtx pos_rtx, addr;
4331 tree position;
4333 if (index == 0)
4334 index = size_int (i);
4336 if (minelt)
4337 index = size_binop (MINUS_EXPR, index,
4338 TYPE_MIN_VALUE (domain));
4339 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
4340 size_int (BITS_PER_UNIT));
4341 position = size_binop (MULT_EXPR, index, position);
4342 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4343 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4344 xtarget = change_address (target, mode, addr);
4345 store_expr (value, xtarget, 0);
4347 else
4349 if (index != 0)
4350 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
4351 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
4352 else
4353 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
4354 store_constructor_field (target, bitsize, bitpos, mode, value,
4355 type, align, cleared);
4359 /* set constructor assignments */
4360 else if (TREE_CODE (type) == SET_TYPE)
4362 tree elt = CONSTRUCTOR_ELTS (exp);
4363 int nbytes = int_size_in_bytes (type), nbits;
4364 tree domain = TYPE_DOMAIN (type);
4365 tree domain_min, domain_max, bitlength;
4367 /* The default implementation strategy is to extract the constant
4368 parts of the constructor, use that to initialize the target,
4369 and then "or" in whatever non-constant ranges we need in addition.
4371 If a large set is all zero or all ones, it is
4372 probably better to set it using memset (if available) or bzero.
4373 Also, if a large set has just a single range, it may also be
4374 better to first clear all the first clear the set (using
4375 bzero/memset), and set the bits we want. */
4377 /* Check for all zeros. */
4378 if (elt == NULL_TREE)
4380 if (!cleared)
4381 clear_storage (target, expr_size (exp),
4382 TYPE_ALIGN (type) / BITS_PER_UNIT);
4383 return;
4386 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4387 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4388 bitlength = size_binop (PLUS_EXPR,
4389 size_binop (MINUS_EXPR, domain_max, domain_min),
4390 size_one_node);
4392 if (nbytes < 0 || TREE_CODE (bitlength) != INTEGER_CST)
4393 abort ();
4394 nbits = TREE_INT_CST_LOW (bitlength);
4396 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4397 are "complicated" (more than one range), initialize (the
4398 constant parts) by copying from a constant. */
4399 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4400 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4402 int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4403 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4404 char *bit_buffer = (char *) alloca (nbits);
4405 HOST_WIDE_INT word = 0;
4406 int bit_pos = 0;
4407 int ibit = 0;
4408 int offset = 0; /* In bytes from beginning of set. */
4409 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4410 for (;;)
4412 if (bit_buffer[ibit])
4414 if (BYTES_BIG_ENDIAN)
4415 word |= (1 << (set_word_size - 1 - bit_pos));
4416 else
4417 word |= 1 << bit_pos;
4419 bit_pos++; ibit++;
4420 if (bit_pos >= set_word_size || ibit == nbits)
4422 if (word != 0 || ! cleared)
4424 rtx datum = GEN_INT (word);
4425 rtx to_rtx;
4426 /* The assumption here is that it is safe to use
4427 XEXP if the set is multi-word, but not if
4428 it's single-word. */
4429 if (GET_CODE (target) == MEM)
4431 to_rtx = plus_constant (XEXP (target, 0), offset);
4432 to_rtx = change_address (target, mode, to_rtx);
4434 else if (offset == 0)
4435 to_rtx = target;
4436 else
4437 abort ();
4438 emit_move_insn (to_rtx, datum);
4440 if (ibit == nbits)
4441 break;
4442 word = 0;
4443 bit_pos = 0;
4444 offset += set_word_size / BITS_PER_UNIT;
4448 else if (!cleared)
4450 /* Don't bother clearing storage if the set is all ones. */
4451 if (TREE_CHAIN (elt) != NULL_TREE
4452 || (TREE_PURPOSE (elt) == NULL_TREE
4453 ? nbits != 1
4454 : (TREE_CODE (TREE_VALUE (elt)) != INTEGER_CST
4455 || TREE_CODE (TREE_PURPOSE (elt)) != INTEGER_CST
4456 || (TREE_INT_CST_LOW (TREE_VALUE (elt))
4457 - TREE_INT_CST_LOW (TREE_PURPOSE (elt)) + 1
4458 != nbits))))
4459 clear_storage (target, expr_size (exp),
4460 TYPE_ALIGN (type) / BITS_PER_UNIT);
4463 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4465 /* start of range of element or NULL */
4466 tree startbit = TREE_PURPOSE (elt);
4467 /* end of range of element, or element value */
4468 tree endbit = TREE_VALUE (elt);
4469 #ifdef TARGET_MEM_FUNCTIONS
4470 HOST_WIDE_INT startb, endb;
4471 #endif
4472 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4474 bitlength_rtx = expand_expr (bitlength,
4475 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4477 /* handle non-range tuple element like [ expr ] */
4478 if (startbit == NULL_TREE)
4480 startbit = save_expr (endbit);
4481 endbit = startbit;
4483 startbit = convert (sizetype, startbit);
4484 endbit = convert (sizetype, endbit);
4485 if (! integer_zerop (domain_min))
4487 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4488 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4490 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4491 EXPAND_CONST_ADDRESS);
4492 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4493 EXPAND_CONST_ADDRESS);
4495 if (REG_P (target))
4497 targetx = assign_stack_temp (GET_MODE (target),
4498 GET_MODE_SIZE (GET_MODE (target)),
4500 emit_move_insn (targetx, target);
4502 else if (GET_CODE (target) == MEM)
4503 targetx = target;
4504 else
4505 abort ();
4507 #ifdef TARGET_MEM_FUNCTIONS
4508 /* Optimization: If startbit and endbit are
4509 constants divisible by BITS_PER_UNIT,
4510 call memset instead. */
4511 if (TREE_CODE (startbit) == INTEGER_CST
4512 && TREE_CODE (endbit) == INTEGER_CST
4513 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4514 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4516 emit_library_call (memset_libfunc, 0,
4517 VOIDmode, 3,
4518 plus_constant (XEXP (targetx, 0),
4519 startb / BITS_PER_UNIT),
4520 Pmode,
4521 constm1_rtx, TYPE_MODE (integer_type_node),
4522 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4523 TYPE_MODE (sizetype));
4525 else
4526 #endif
4528 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4529 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
4530 bitlength_rtx, TYPE_MODE (sizetype),
4531 startbit_rtx, TYPE_MODE (sizetype),
4532 endbit_rtx, TYPE_MODE (sizetype));
4534 if (REG_P (target))
4535 emit_move_insn (target, targetx);
4539 else
4540 abort ();
4543 /* Store the value of EXP (an expression tree)
4544 into a subfield of TARGET which has mode MODE and occupies
4545 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4546 If MODE is VOIDmode, it means that we are storing into a bit-field.
4548 If VALUE_MODE is VOIDmode, return nothing in particular.
4549 UNSIGNEDP is not used in this case.
4551 Otherwise, return an rtx for the value stored. This rtx
4552 has mode VALUE_MODE if that is convenient to do.
4553 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4555 ALIGN is the alignment that TARGET is known to have, measured in bytes.
4556 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
4558 ALIAS_SET is the alias set for the destination. This value will
4559 (in general) be different from that for TARGET, since TARGET is a
4560 reference to the containing structure. */
4562 static rtx
4563 store_field (target, bitsize, bitpos, mode, exp, value_mode,
4564 unsignedp, align, total_size, alias_set)
4565 rtx target;
4566 int bitsize, bitpos;
4567 enum machine_mode mode;
4568 tree exp;
4569 enum machine_mode value_mode;
4570 int unsignedp;
4571 int align;
4572 int total_size;
4573 int alias_set;
4575 HOST_WIDE_INT width_mask = 0;
4577 if (TREE_CODE (exp) == ERROR_MARK)
4578 return const0_rtx;
4580 if (bitsize < HOST_BITS_PER_WIDE_INT)
4581 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
4583 /* If we are storing into an unaligned field of an aligned union that is
4584 in a register, we may have the mode of TARGET being an integer mode but
4585 MODE == BLKmode. In that case, get an aligned object whose size and
4586 alignment are the same as TARGET and store TARGET into it (we can avoid
4587 the store if the field being stored is the entire width of TARGET). Then
4588 call ourselves recursively to store the field into a BLKmode version of
4589 that object. Finally, load from the object into TARGET. This is not
4590 very efficient in general, but should only be slightly more expensive
4591 than the otherwise-required unaligned accesses. Perhaps this can be
4592 cleaned up later. */
4594 if (mode == BLKmode
4595 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4597 rtx object = assign_stack_temp (GET_MODE (target),
4598 GET_MODE_SIZE (GET_MODE (target)), 0);
4599 rtx blk_object = copy_rtx (object);
4601 MEM_SET_IN_STRUCT_P (object, 1);
4602 MEM_SET_IN_STRUCT_P (blk_object, 1);
4603 PUT_MODE (blk_object, BLKmode);
4605 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4606 emit_move_insn (object, target);
4608 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4609 align, total_size, alias_set);
4611 /* Even though we aren't returning target, we need to
4612 give it the updated value. */
4613 emit_move_insn (target, object);
4615 return blk_object;
4618 /* If the structure is in a register or if the component
4619 is a bit field, we cannot use addressing to access it.
4620 Use bit-field techniques or SUBREG to store in it. */
4622 if (mode == VOIDmode
4623 || (mode != BLKmode && ! direct_store[(int) mode]
4624 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
4625 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
4626 || GET_CODE (target) == REG
4627 || GET_CODE (target) == SUBREG
4628 /* If the field isn't aligned enough to store as an ordinary memref,
4629 store it as a bit field. */
4630 || (SLOW_UNALIGNED_ACCESS
4631 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
4632 || (SLOW_UNALIGNED_ACCESS && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
4634 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4636 /* If BITSIZE is narrower than the size of the type of EXP
4637 we will be narrowing TEMP. Normally, what's wanted are the
4638 low-order bits. However, if EXP's type is a record and this is
4639 big-endian machine, we want the upper BITSIZE bits. */
4640 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4641 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4642 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4643 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4644 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4645 - bitsize),
4646 temp, 1);
4648 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4649 MODE. */
4650 if (mode != VOIDmode && mode != BLKmode
4651 && mode != TYPE_MODE (TREE_TYPE (exp)))
4652 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4654 /* If the modes of TARGET and TEMP are both BLKmode, both
4655 must be in memory and BITPOS must be aligned on a byte
4656 boundary. If so, we simply do a block copy. */
4657 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4659 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4660 || bitpos % BITS_PER_UNIT != 0)
4661 abort ();
4663 target = change_address (target, VOIDmode,
4664 plus_constant (XEXP (target, 0),
4665 bitpos / BITS_PER_UNIT));
4667 emit_block_move (target, temp,
4668 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4669 / BITS_PER_UNIT),
4672 return value_mode == VOIDmode ? const0_rtx : target;
4675 /* Store the value in the bitfield. */
4676 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4677 if (value_mode != VOIDmode)
4679 /* The caller wants an rtx for the value. */
4680 /* If possible, avoid refetching from the bitfield itself. */
4681 if (width_mask != 0
4682 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
4684 tree count;
4685 enum machine_mode tmode;
4687 if (unsignedp)
4688 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4689 tmode = GET_MODE (temp);
4690 if (tmode == VOIDmode)
4691 tmode = value_mode;
4692 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4693 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4694 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4696 return extract_bit_field (target, bitsize, bitpos, unsignedp,
4697 NULL_RTX, value_mode, 0, align,
4698 total_size);
4700 return const0_rtx;
4702 else
4704 rtx addr = XEXP (target, 0);
4705 rtx to_rtx;
4707 /* If a value is wanted, it must be the lhs;
4708 so make the address stable for multiple use. */
4710 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4711 && ! CONSTANT_ADDRESS_P (addr)
4712 /* A frame-pointer reference is already stable. */
4713 && ! (GET_CODE (addr) == PLUS
4714 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4715 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4716 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4717 addr = copy_to_reg (addr);
4719 /* Now build a reference to just the desired component. */
4721 to_rtx = copy_rtx (change_address (target, mode,
4722 plus_constant (addr,
4723 (bitpos
4724 / BITS_PER_UNIT))));
4725 MEM_SET_IN_STRUCT_P (to_rtx, 1);
4726 MEM_ALIAS_SET (to_rtx) = alias_set;
4728 return store_expr (exp, to_rtx, value_mode != VOIDmode);
4732 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4733 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
4734 ARRAY_REFs and find the ultimate containing object, which we return.
4736 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4737 bit position, and *PUNSIGNEDP to the signedness of the field.
4738 If the position of the field is variable, we store a tree
4739 giving the variable offset (in units) in *POFFSET.
4740 This offset is in addition to the bit position.
4741 If the position is not variable, we store 0 in *POFFSET.
4742 We set *PALIGNMENT to the alignment in bytes of the address that will be
4743 computed. This is the alignment of the thing we return if *POFFSET
4744 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
4746 If any of the extraction expressions is volatile,
4747 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4749 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4750 is a mode that can be used to access the field. In that case, *PBITSIZE
4751 is redundant.
4753 If the field describes a variable-sized object, *PMODE is set to
4754 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
4755 this case, but the address of the object can be found. */
4757 tree
4758 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
4759 punsignedp, pvolatilep, palignment)
4760 tree exp;
4761 int *pbitsize;
4762 int *pbitpos;
4763 tree *poffset;
4764 enum machine_mode *pmode;
4765 int *punsignedp;
4766 int *pvolatilep;
4767 int *palignment;
4769 tree orig_exp = exp;
4770 tree size_tree = 0;
4771 enum machine_mode mode = VOIDmode;
4772 tree offset = integer_zero_node;
4773 unsigned int alignment = BIGGEST_ALIGNMENT;
4775 if (TREE_CODE (exp) == COMPONENT_REF)
4777 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4778 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4779 mode = DECL_MODE (TREE_OPERAND (exp, 1));
4780 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
4782 else if (TREE_CODE (exp) == BIT_FIELD_REF)
4784 size_tree = TREE_OPERAND (exp, 1);
4785 *punsignedp = TREE_UNSIGNED (exp);
4787 else
4789 mode = TYPE_MODE (TREE_TYPE (exp));
4790 if (mode == BLKmode)
4791 size_tree = TYPE_SIZE (TREE_TYPE (exp));
4793 *pbitsize = GET_MODE_BITSIZE (mode);
4794 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4797 if (size_tree)
4799 if (TREE_CODE (size_tree) != INTEGER_CST)
4800 mode = BLKmode, *pbitsize = -1;
4801 else
4802 *pbitsize = TREE_INT_CST_LOW (size_tree);
4805 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4806 and find the ultimate containing object. */
4808 *pbitpos = 0;
4810 while (1)
4812 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
4814 tree pos = (TREE_CODE (exp) == COMPONENT_REF
4815 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
4816 : TREE_OPERAND (exp, 2));
4817 tree constant = integer_zero_node, var = pos;
4819 /* If this field hasn't been filled in yet, don't go
4820 past it. This should only happen when folding expressions
4821 made during type construction. */
4822 if (pos == 0)
4823 break;
4825 /* Assume here that the offset is a multiple of a unit.
4826 If not, there should be an explicitly added constant. */
4827 if (TREE_CODE (pos) == PLUS_EXPR
4828 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4829 constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
4830 else if (TREE_CODE (pos) == INTEGER_CST)
4831 constant = pos, var = integer_zero_node;
4833 *pbitpos += TREE_INT_CST_LOW (constant);
4834 offset = size_binop (PLUS_EXPR, offset,
4835 size_binop (EXACT_DIV_EXPR, var,
4836 size_int (BITS_PER_UNIT)));
4839 else if (TREE_CODE (exp) == ARRAY_REF)
4841 /* This code is based on the code in case ARRAY_REF in expand_expr
4842 below. We assume here that the size of an array element is
4843 always an integral multiple of BITS_PER_UNIT. */
4845 tree index = TREE_OPERAND (exp, 1);
4846 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
4847 tree low_bound
4848 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4849 tree index_type = TREE_TYPE (index);
4850 tree xindex;
4852 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
4854 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
4855 index);
4856 index_type = TREE_TYPE (index);
4859 /* Optimize the special-case of a zero lower bound.
4861 We convert the low_bound to sizetype to avoid some problems
4862 with constant folding. (E.g. suppose the lower bound is 1,
4863 and its mode is QI. Without the conversion, (ARRAY
4864 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
4865 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
4867 But sizetype isn't quite right either (especially if
4868 the lowbound is negative). FIXME */
4870 if (! integer_zerop (low_bound))
4871 index = fold (build (MINUS_EXPR, index_type, index,
4872 convert (sizetype, low_bound)));
4874 if (TREE_CODE (index) == INTEGER_CST)
4876 index = convert (sbitsizetype, index);
4877 index_type = TREE_TYPE (index);
4880 xindex = fold (build (MULT_EXPR, sbitsizetype, index,
4881 convert (sbitsizetype,
4882 TYPE_SIZE (TREE_TYPE (exp)))));
4884 if (TREE_CODE (xindex) == INTEGER_CST
4885 && TREE_INT_CST_HIGH (xindex) == 0)
4886 *pbitpos += TREE_INT_CST_LOW (xindex);
4887 else
4889 /* Either the bit offset calculated above is not constant, or
4890 it overflowed. In either case, redo the multiplication
4891 against the size in units. This is especially important
4892 in the non-constant case to avoid a division at runtime. */
4893 xindex = fold (build (MULT_EXPR, ssizetype, index,
4894 convert (ssizetype,
4895 TYPE_SIZE_UNIT (TREE_TYPE (exp)))));
4897 if (contains_placeholder_p (xindex))
4898 xindex = build (WITH_RECORD_EXPR, sizetype, xindex, exp);
4900 offset = size_binop (PLUS_EXPR, offset, xindex);
4903 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
4904 && ! ((TREE_CODE (exp) == NOP_EXPR
4905 || TREE_CODE (exp) == CONVERT_EXPR)
4906 && ! (TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
4907 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0)))
4908 != UNION_TYPE))
4909 && (TYPE_MODE (TREE_TYPE (exp))
4910 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
4911 break;
4913 /* If any reference in the chain is volatile, the effect is volatile. */
4914 if (TREE_THIS_VOLATILE (exp))
4915 *pvolatilep = 1;
4917 /* If the offset is non-constant already, then we can't assume any
4918 alignment more than the alignment here. */
4919 if (! integer_zerop (offset))
4920 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4922 exp = TREE_OPERAND (exp, 0);
4925 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
4926 alignment = MIN (alignment, DECL_ALIGN (exp));
4927 else if (TREE_TYPE (exp) != 0)
4928 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4930 if (integer_zerop (offset))
4931 offset = 0;
4933 if (offset != 0 && contains_placeholder_p (offset))
4934 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
4936 *pmode = mode;
4937 *poffset = offset;
4938 *palignment = alignment / BITS_PER_UNIT;
4939 return exp;
4942 /* Subroutine of expand_exp: compute memory_usage from modifier. */
4943 static enum memory_use_mode
4944 get_memory_usage_from_modifier (modifier)
4945 enum expand_modifier modifier;
4947 switch (modifier)
4949 case EXPAND_NORMAL:
4950 case EXPAND_SUM:
4951 return MEMORY_USE_RO;
4952 break;
4953 case EXPAND_MEMORY_USE_WO:
4954 return MEMORY_USE_WO;
4955 break;
4956 case EXPAND_MEMORY_USE_RW:
4957 return MEMORY_USE_RW;
4958 break;
4959 case EXPAND_MEMORY_USE_DONT:
4960 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
4961 MEMORY_USE_DONT, because they are modifiers to a call of
4962 expand_expr in the ADDR_EXPR case of expand_expr. */
4963 case EXPAND_CONST_ADDRESS:
4964 case EXPAND_INITIALIZER:
4965 return MEMORY_USE_DONT;
4966 case EXPAND_MEMORY_USE_BAD:
4967 default:
4968 abort ();
4972 /* Given an rtx VALUE that may contain additions and multiplications,
4973 return an equivalent value that just refers to a register or memory.
4974 This is done by generating instructions to perform the arithmetic
4975 and returning a pseudo-register containing the value.
4977 The returned value may be a REG, SUBREG, MEM or constant. */
4980 force_operand (value, target)
4981 rtx value, target;
4983 register optab binoptab = 0;
4984 /* Use a temporary to force order of execution of calls to
4985 `force_operand'. */
4986 rtx tmp;
4987 register rtx op2;
4988 /* Use subtarget as the target for operand 0 of a binary operation. */
4989 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4991 /* Check for a PIC address load. */
4992 if (flag_pic
4993 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
4994 && XEXP (value, 0) == pic_offset_table_rtx
4995 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
4996 || GET_CODE (XEXP (value, 1)) == LABEL_REF
4997 || GET_CODE (XEXP (value, 1)) == CONST))
4999 if (!subtarget)
5000 subtarget = gen_reg_rtx (GET_MODE (value));
5001 emit_move_insn (subtarget, value);
5002 return subtarget;
5005 if (GET_CODE (value) == PLUS)
5006 binoptab = add_optab;
5007 else if (GET_CODE (value) == MINUS)
5008 binoptab = sub_optab;
5009 else if (GET_CODE (value) == MULT)
5011 op2 = XEXP (value, 1);
5012 if (!CONSTANT_P (op2)
5013 && !(GET_CODE (op2) == REG && op2 != subtarget))
5014 subtarget = 0;
5015 tmp = force_operand (XEXP (value, 0), subtarget);
5016 return expand_mult (GET_MODE (value), tmp,
5017 force_operand (op2, NULL_RTX),
5018 target, 0);
5021 if (binoptab)
5023 op2 = XEXP (value, 1);
5024 if (!CONSTANT_P (op2)
5025 && !(GET_CODE (op2) == REG && op2 != subtarget))
5026 subtarget = 0;
5027 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5029 binoptab = add_optab;
5030 op2 = negate_rtx (GET_MODE (value), op2);
5033 /* Check for an addition with OP2 a constant integer and our first
5034 operand a PLUS of a virtual register and something else. In that
5035 case, we want to emit the sum of the virtual register and the
5036 constant first and then add the other value. This allows virtual
5037 register instantiation to simply modify the constant rather than
5038 creating another one around this addition. */
5039 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5040 && GET_CODE (XEXP (value, 0)) == PLUS
5041 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5042 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5043 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5045 rtx temp = expand_binop (GET_MODE (value), binoptab,
5046 XEXP (XEXP (value, 0), 0), op2,
5047 subtarget, 0, OPTAB_LIB_WIDEN);
5048 return expand_binop (GET_MODE (value), binoptab, temp,
5049 force_operand (XEXP (XEXP (value, 0), 1), 0),
5050 target, 0, OPTAB_LIB_WIDEN);
5053 tmp = force_operand (XEXP (value, 0), subtarget);
5054 return expand_binop (GET_MODE (value), binoptab, tmp,
5055 force_operand (op2, NULL_RTX),
5056 target, 0, OPTAB_LIB_WIDEN);
5057 /* We give UNSIGNEDP = 0 to expand_binop
5058 because the only operations we are expanding here are signed ones. */
5060 return value;
5063 /* Subroutine of expand_expr:
5064 save the non-copied parts (LIST) of an expr (LHS), and return a list
5065 which can restore these values to their previous values,
5066 should something modify their storage. */
5068 static tree
5069 save_noncopied_parts (lhs, list)
5070 tree lhs;
5071 tree list;
5073 tree tail;
5074 tree parts = 0;
5076 for (tail = list; tail; tail = TREE_CHAIN (tail))
5077 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5078 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
5079 else
5081 tree part = TREE_VALUE (tail);
5082 tree part_type = TREE_TYPE (part);
5083 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
5084 rtx target = assign_temp (part_type, 0, 1, 1);
5085 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
5086 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
5087 parts = tree_cons (to_be_saved,
5088 build (RTL_EXPR, part_type, NULL_TREE,
5089 (tree) target),
5090 parts);
5091 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
5093 return parts;
5096 /* Subroutine of expand_expr:
5097 record the non-copied parts (LIST) of an expr (LHS), and return a list
5098 which specifies the initial values of these parts. */
5100 static tree
5101 init_noncopied_parts (lhs, list)
5102 tree lhs;
5103 tree list;
5105 tree tail;
5106 tree parts = 0;
5108 for (tail = list; tail; tail = TREE_CHAIN (tail))
5109 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5110 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
5111 else if (TREE_PURPOSE (tail))
5113 tree part = TREE_VALUE (tail);
5114 tree part_type = TREE_TYPE (part);
5115 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
5116 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5118 return parts;
5121 /* Subroutine of expand_expr: return nonzero iff there is no way that
5122 EXP can reference X, which is being modified. TOP_P is nonzero if this
5123 call is going to be used to determine whether we need a temporary
5124 for EXP, as opposed to a recursive call to this function.
5126 It is always safe for this routine to return zero since it merely
5127 searches for optimization opportunities. */
5129 static int
5130 safe_from_p (x, exp, top_p)
5131 rtx x;
5132 tree exp;
5133 int top_p;
5135 rtx exp_rtl = 0;
5136 int i, nops;
5137 static int save_expr_count;
5138 static int save_expr_size = 0;
5139 static tree *save_expr_rewritten;
5140 static tree save_expr_trees[256];
5142 if (x == 0
5143 /* If EXP has varying size, we MUST use a target since we currently
5144 have no way of allocating temporaries of variable size
5145 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5146 So we assume here that something at a higher level has prevented a
5147 clash. This is somewhat bogus, but the best we can do. Only
5148 do this when X is BLKmode and when we are at the top level. */
5149 || (top_p && TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
5150 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5151 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5152 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5153 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5154 != INTEGER_CST)
5155 && GET_MODE (x) == BLKmode))
5156 return 1;
5158 if (top_p && save_expr_size == 0)
5160 int rtn;
5162 save_expr_count = 0;
5163 save_expr_size = sizeof (save_expr_trees) / sizeof (save_expr_trees[0]);
5164 save_expr_rewritten = &save_expr_trees[0];
5166 rtn = safe_from_p (x, exp, 1);
5168 for (i = 0; i < save_expr_count; ++i)
5170 if (TREE_CODE (save_expr_trees[i]) != ERROR_MARK)
5171 abort ();
5172 TREE_SET_CODE (save_expr_trees[i], SAVE_EXPR);
5175 save_expr_size = 0;
5177 return rtn;
5180 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5181 find the underlying pseudo. */
5182 if (GET_CODE (x) == SUBREG)
5184 x = SUBREG_REG (x);
5185 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5186 return 0;
5189 /* If X is a location in the outgoing argument area, it is always safe. */
5190 if (GET_CODE (x) == MEM
5191 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5192 || (GET_CODE (XEXP (x, 0)) == PLUS
5193 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
5194 return 1;
5196 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5198 case 'd':
5199 exp_rtl = DECL_RTL (exp);
5200 break;
5202 case 'c':
5203 return 1;
5205 case 'x':
5206 if (TREE_CODE (exp) == TREE_LIST)
5207 return ((TREE_VALUE (exp) == 0
5208 || safe_from_p (x, TREE_VALUE (exp), 0))
5209 && (TREE_CHAIN (exp) == 0
5210 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5211 else if (TREE_CODE (exp) == ERROR_MARK)
5212 return 1; /* An already-visited SAVE_EXPR? */
5213 else
5214 return 0;
5216 case '1':
5217 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5219 case '2':
5220 case '<':
5221 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5222 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5224 case 'e':
5225 case 'r':
5226 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5227 the expression. If it is set, we conflict iff we are that rtx or
5228 both are in memory. Otherwise, we check all operands of the
5229 expression recursively. */
5231 switch (TREE_CODE (exp))
5233 case ADDR_EXPR:
5234 return (staticp (TREE_OPERAND (exp, 0))
5235 || safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5236 || TREE_STATIC (exp));
5238 case INDIRECT_REF:
5239 if (GET_CODE (x) == MEM)
5240 return 0;
5241 break;
5243 case CALL_EXPR:
5244 exp_rtl = CALL_EXPR_RTL (exp);
5245 if (exp_rtl == 0)
5247 /* Assume that the call will clobber all hard registers and
5248 all of memory. */
5249 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5250 || GET_CODE (x) == MEM)
5251 return 0;
5254 break;
5256 case RTL_EXPR:
5257 /* If a sequence exists, we would have to scan every instruction
5258 in the sequence to see if it was safe. This is probably not
5259 worthwhile. */
5260 if (RTL_EXPR_SEQUENCE (exp))
5261 return 0;
5263 exp_rtl = RTL_EXPR_RTL (exp);
5264 break;
5266 case WITH_CLEANUP_EXPR:
5267 exp_rtl = RTL_EXPR_RTL (exp);
5268 break;
5270 case CLEANUP_POINT_EXPR:
5271 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5273 case SAVE_EXPR:
5274 exp_rtl = SAVE_EXPR_RTL (exp);
5275 if (exp_rtl)
5276 break;
5278 /* This SAVE_EXPR might appear many times in the top-level
5279 safe_from_p() expression, and if it has a complex
5280 subexpression, examining it multiple times could result
5281 in a combinatorial explosion. E.g. on an Alpha
5282 running at least 200MHz, a Fortran test case compiled with
5283 optimization took about 28 minutes to compile -- even though
5284 it was only a few lines long, and the complicated line causing
5285 so much time to be spent in the earlier version of safe_from_p()
5286 had only 293 or so unique nodes.
5288 So, turn this SAVE_EXPR into an ERROR_MARK for now, but remember
5289 where it is so we can turn it back in the top-level safe_from_p()
5290 when we're done. */
5292 /* For now, don't bother re-sizing the array. */
5293 if (save_expr_count >= save_expr_size)
5294 return 0;
5295 save_expr_rewritten[save_expr_count++] = exp;
5297 nops = tree_code_length[(int) SAVE_EXPR];
5298 for (i = 0; i < nops; i++)
5300 tree operand = TREE_OPERAND (exp, i);
5301 if (operand == NULL_TREE)
5302 continue;
5303 TREE_SET_CODE (exp, ERROR_MARK);
5304 if (!safe_from_p (x, operand, 0))
5305 return 0;
5306 TREE_SET_CODE (exp, SAVE_EXPR);
5308 TREE_SET_CODE (exp, ERROR_MARK);
5309 return 1;
5311 case BIND_EXPR:
5312 /* The only operand we look at is operand 1. The rest aren't
5313 part of the expression. */
5314 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5316 case METHOD_CALL_EXPR:
5317 /* This takes a rtx argument, but shouldn't appear here. */
5318 abort ();
5320 default:
5321 break;
5324 /* If we have an rtx, we do not need to scan our operands. */
5325 if (exp_rtl)
5326 break;
5328 nops = tree_code_length[(int) TREE_CODE (exp)];
5329 for (i = 0; i < nops; i++)
5330 if (TREE_OPERAND (exp, i) != 0
5331 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5332 return 0;
5335 /* If we have an rtl, find any enclosed object. Then see if we conflict
5336 with it. */
5337 if (exp_rtl)
5339 if (GET_CODE (exp_rtl) == SUBREG)
5341 exp_rtl = SUBREG_REG (exp_rtl);
5342 if (GET_CODE (exp_rtl) == REG
5343 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5344 return 0;
5347 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5348 are memory and EXP is not readonly. */
5349 return ! (rtx_equal_p (x, exp_rtl)
5350 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5351 && ! TREE_READONLY (exp)));
5354 /* If we reach here, it is safe. */
5355 return 1;
5358 /* Subroutine of expand_expr: return nonzero iff EXP is an
5359 expression whose type is statically determinable. */
5361 static int
5362 fixed_type_p (exp)
5363 tree exp;
5365 if (TREE_CODE (exp) == PARM_DECL
5366 || TREE_CODE (exp) == VAR_DECL
5367 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5368 || TREE_CODE (exp) == COMPONENT_REF
5369 || TREE_CODE (exp) == ARRAY_REF)
5370 return 1;
5371 return 0;
5374 /* Subroutine of expand_expr: return rtx if EXP is a
5375 variable or parameter; else return 0. */
5377 static rtx
5378 var_rtx (exp)
5379 tree exp;
5381 STRIP_NOPS (exp);
5382 switch (TREE_CODE (exp))
5384 case PARM_DECL:
5385 case VAR_DECL:
5386 return DECL_RTL (exp);
5387 default:
5388 return 0;
5392 #ifdef MAX_INTEGER_COMPUTATION_MODE
5393 void
5394 check_max_integer_computation_mode (exp)
5395 tree exp;
5397 enum tree_code code;
5398 enum machine_mode mode;
5400 /* Strip any NOPs that don't change the mode. */
5401 STRIP_NOPS (exp);
5402 code = TREE_CODE (exp);
5404 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5405 if (code == NOP_EXPR
5406 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5407 return;
5409 /* First check the type of the overall operation. We need only look at
5410 unary, binary and relational operations. */
5411 if (TREE_CODE_CLASS (code) == '1'
5412 || TREE_CODE_CLASS (code) == '2'
5413 || TREE_CODE_CLASS (code) == '<')
5415 mode = TYPE_MODE (TREE_TYPE (exp));
5416 if (GET_MODE_CLASS (mode) == MODE_INT
5417 && mode > MAX_INTEGER_COMPUTATION_MODE)
5418 fatal ("unsupported wide integer operation");
5421 /* Check operand of a unary op. */
5422 if (TREE_CODE_CLASS (code) == '1')
5424 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5425 if (GET_MODE_CLASS (mode) == MODE_INT
5426 && mode > MAX_INTEGER_COMPUTATION_MODE)
5427 fatal ("unsupported wide integer operation");
5430 /* Check operands of a binary/comparison op. */
5431 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5433 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5434 if (GET_MODE_CLASS (mode) == MODE_INT
5435 && mode > MAX_INTEGER_COMPUTATION_MODE)
5436 fatal ("unsupported wide integer operation");
5438 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5439 if (GET_MODE_CLASS (mode) == MODE_INT
5440 && mode > MAX_INTEGER_COMPUTATION_MODE)
5441 fatal ("unsupported wide integer operation");
5444 #endif
5447 /* expand_expr: generate code for computing expression EXP.
5448 An rtx for the computed value is returned. The value is never null.
5449 In the case of a void EXP, const0_rtx is returned.
5451 The value may be stored in TARGET if TARGET is nonzero.
5452 TARGET is just a suggestion; callers must assume that
5453 the rtx returned may not be the same as TARGET.
5455 If TARGET is CONST0_RTX, it means that the value will be ignored.
5457 If TMODE is not VOIDmode, it suggests generating the
5458 result in mode TMODE. But this is done only when convenient.
5459 Otherwise, TMODE is ignored and the value generated in its natural mode.
5460 TMODE is just a suggestion; callers must assume that
5461 the rtx returned may not have mode TMODE.
5463 Note that TARGET may have neither TMODE nor MODE. In that case, it
5464 probably will not be used.
5466 If MODIFIER is EXPAND_SUM then when EXP is an addition
5467 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5468 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5469 products as above, or REG or MEM, or constant.
5470 Ordinarily in such cases we would output mul or add instructions
5471 and then return a pseudo reg containing the sum.
5473 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5474 it also marks a label as absolutely required (it can't be dead).
5475 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
5476 This is used for outputting expressions used in initializers.
5478 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5479 with a constant address even if that address is not normally legitimate.
5480 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
5483 expand_expr (exp, target, tmode, modifier)
5484 register tree exp;
5485 rtx target;
5486 enum machine_mode tmode;
5487 enum expand_modifier modifier;
5489 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
5490 This is static so it will be accessible to our recursive callees. */
5491 static tree placeholder_list = 0;
5492 register rtx op0, op1, temp;
5493 tree type = TREE_TYPE (exp);
5494 int unsignedp = TREE_UNSIGNED (type);
5495 register enum machine_mode mode;
5496 register enum tree_code code = TREE_CODE (exp);
5497 optab this_optab;
5498 rtx subtarget, original_target;
5499 int ignore;
5500 tree context;
5501 /* Used by check-memory-usage to make modifier read only. */
5502 enum expand_modifier ro_modifier;
5504 /* Handle ERROR_MARK before anybody tries to access its type. */
5505 if (TREE_CODE (exp) == ERROR_MARK)
5507 op0 = CONST0_RTX (tmode);
5508 if (op0 != 0)
5509 return op0;
5510 return const0_rtx;
5513 mode = TYPE_MODE (type);
5514 /* Use subtarget as the target for operand 0 of a binary operation. */
5515 subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
5516 original_target = target;
5517 ignore = (target == const0_rtx
5518 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
5519 || code == CONVERT_EXPR || code == REFERENCE_EXPR
5520 || code == COND_EXPR)
5521 && TREE_CODE (type) == VOID_TYPE));
5523 /* Make a read-only version of the modifier. */
5524 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
5525 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
5526 ro_modifier = modifier;
5527 else
5528 ro_modifier = EXPAND_NORMAL;
5530 /* Don't use hard regs as subtargets, because the combiner
5531 can only handle pseudo regs. */
5532 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
5533 subtarget = 0;
5534 /* Avoid subtargets inside loops,
5535 since they hide some invariant expressions. */
5536 if (preserve_subexpressions_p ())
5537 subtarget = 0;
5539 /* If we are going to ignore this result, we need only do something
5540 if there is a side-effect somewhere in the expression. If there
5541 is, short-circuit the most common cases here. Note that we must
5542 not call expand_expr with anything but const0_rtx in case this
5543 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
5545 if (ignore)
5547 if (! TREE_SIDE_EFFECTS (exp))
5548 return const0_rtx;
5550 /* Ensure we reference a volatile object even if value is ignored. */
5551 if (TREE_THIS_VOLATILE (exp)
5552 && TREE_CODE (exp) != FUNCTION_DECL
5553 && mode != VOIDmode && mode != BLKmode)
5555 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
5556 if (GET_CODE (temp) == MEM)
5557 temp = copy_to_reg (temp);
5558 return const0_rtx;
5561 if (TREE_CODE_CLASS (code) == '1')
5562 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5563 VOIDmode, ro_modifier);
5564 else if (TREE_CODE_CLASS (code) == '2'
5565 || TREE_CODE_CLASS (code) == '<')
5567 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5568 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5569 return const0_rtx;
5571 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
5572 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
5573 /* If the second operand has no side effects, just evaluate
5574 the first. */
5575 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5576 VOIDmode, ro_modifier);
5578 target = 0;
5581 #ifdef MAX_INTEGER_COMPUTATION_MODE
5582 /* Only check stuff here if the mode we want is different from the mode
5583 of the expression; if it's the same, check_max_integer_computiation_mode
5584 will handle it. Do we really need to check this stuff at all? */
5586 if (target
5587 && GET_MODE (target) != mode
5588 && TREE_CODE (exp) != INTEGER_CST
5589 && TREE_CODE (exp) != PARM_DECL
5590 && TREE_CODE (exp) != ARRAY_REF
5591 && TREE_CODE (exp) != COMPONENT_REF
5592 && TREE_CODE (exp) != BIT_FIELD_REF
5593 && TREE_CODE (exp) != INDIRECT_REF
5594 && TREE_CODE (exp) != CALL_EXPR
5595 && TREE_CODE (exp) != VAR_DECL
5596 && TREE_CODE (exp) != RTL_EXPR)
5598 enum machine_mode mode = GET_MODE (target);
5600 if (GET_MODE_CLASS (mode) == MODE_INT
5601 && mode > MAX_INTEGER_COMPUTATION_MODE)
5602 fatal ("unsupported wide integer operation");
5605 if (tmode != mode
5606 && TREE_CODE (exp) != INTEGER_CST
5607 && TREE_CODE (exp) != PARM_DECL
5608 && TREE_CODE (exp) != ARRAY_REF
5609 && TREE_CODE (exp) != COMPONENT_REF
5610 && TREE_CODE (exp) != BIT_FIELD_REF
5611 && TREE_CODE (exp) != INDIRECT_REF
5612 && TREE_CODE (exp) != VAR_DECL
5613 && TREE_CODE (exp) != CALL_EXPR
5614 && TREE_CODE (exp) != RTL_EXPR
5615 && GET_MODE_CLASS (tmode) == MODE_INT
5616 && tmode > MAX_INTEGER_COMPUTATION_MODE)
5617 fatal ("unsupported wide integer operation");
5619 check_max_integer_computation_mode (exp);
5620 #endif
5622 /* If will do cse, generate all results into pseudo registers
5623 since 1) that allows cse to find more things
5624 and 2) otherwise cse could produce an insn the machine
5625 cannot support. */
5627 if (! cse_not_expected && mode != BLKmode && target
5628 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
5629 target = subtarget;
5631 switch (code)
5633 case LABEL_DECL:
5635 tree function = decl_function_context (exp);
5636 /* Handle using a label in a containing function. */
5637 if (function != current_function_decl
5638 && function != inline_function_decl && function != 0)
5640 struct function *p = find_function_data (function);
5641 /* Allocate in the memory associated with the function
5642 that the label is in. */
5643 push_obstacks (p->function_obstack,
5644 p->function_maybepermanent_obstack);
5646 p->expr->x_forced_labels
5647 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
5648 p->expr->x_forced_labels);
5649 pop_obstacks ();
5651 else
5653 if (modifier == EXPAND_INITIALIZER)
5654 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5655 label_rtx (exp),
5656 forced_labels);
5659 temp = gen_rtx_MEM (FUNCTION_MODE,
5660 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
5661 if (function != current_function_decl
5662 && function != inline_function_decl && function != 0)
5663 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
5664 return temp;
5667 case PARM_DECL:
5668 if (DECL_RTL (exp) == 0)
5670 error_with_decl (exp, "prior parameter's size depends on `%s'");
5671 return CONST0_RTX (mode);
5674 /* ... fall through ... */
5676 case VAR_DECL:
5677 /* If a static var's type was incomplete when the decl was written,
5678 but the type is complete now, lay out the decl now. */
5679 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
5680 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
5682 push_obstacks_nochange ();
5683 end_temporary_allocation ();
5684 layout_decl (exp, 0);
5685 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
5686 pop_obstacks ();
5689 /* Although static-storage variables start off initialized, according to
5690 ANSI C, a memcpy could overwrite them with uninitialized values. So
5691 we check them too. This also lets us check for read-only variables
5692 accessed via a non-const declaration, in case it won't be detected
5693 any other way (e.g., in an embedded system or OS kernel without
5694 memory protection).
5696 Aggregates are not checked here; they're handled elsewhere. */
5697 if (current_function && current_function_check_memory_usage
5698 && code == VAR_DECL
5699 && GET_CODE (DECL_RTL (exp)) == MEM
5700 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5702 enum memory_use_mode memory_usage;
5703 memory_usage = get_memory_usage_from_modifier (modifier);
5705 if (memory_usage != MEMORY_USE_DONT)
5706 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5707 XEXP (DECL_RTL (exp), 0), Pmode,
5708 GEN_INT (int_size_in_bytes (type)),
5709 TYPE_MODE (sizetype),
5710 GEN_INT (memory_usage),
5711 TYPE_MODE (integer_type_node));
5714 /* ... fall through ... */
5716 case FUNCTION_DECL:
5717 case RESULT_DECL:
5718 if (DECL_RTL (exp) == 0)
5719 abort ();
5721 /* Ensure variable marked as used even if it doesn't go through
5722 a parser. If it hasn't be used yet, write out an external
5723 definition. */
5724 if (! TREE_USED (exp))
5726 assemble_external (exp);
5727 TREE_USED (exp) = 1;
5730 /* Show we haven't gotten RTL for this yet. */
5731 temp = 0;
5733 /* Handle variables inherited from containing functions. */
5734 context = decl_function_context (exp);
5736 /* We treat inline_function_decl as an alias for the current function
5737 because that is the inline function whose vars, types, etc.
5738 are being merged into the current function.
5739 See expand_inline_function. */
5741 if (context != 0 && context != current_function_decl
5742 && context != inline_function_decl
5743 /* If var is static, we don't need a static chain to access it. */
5744 && ! (GET_CODE (DECL_RTL (exp)) == MEM
5745 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
5747 rtx addr;
5749 /* Mark as non-local and addressable. */
5750 DECL_NONLOCAL (exp) = 1;
5751 if (DECL_NO_STATIC_CHAIN (current_function_decl))
5752 abort ();
5753 mark_addressable (exp);
5754 if (GET_CODE (DECL_RTL (exp)) != MEM)
5755 abort ();
5756 addr = XEXP (DECL_RTL (exp), 0);
5757 if (GET_CODE (addr) == MEM)
5758 addr = gen_rtx_MEM (Pmode,
5759 fix_lexical_addr (XEXP (addr, 0), exp));
5760 else
5761 addr = fix_lexical_addr (addr, exp);
5762 temp = change_address (DECL_RTL (exp), mode, addr);
5765 /* This is the case of an array whose size is to be determined
5766 from its initializer, while the initializer is still being parsed.
5767 See expand_decl. */
5769 else if (GET_CODE (DECL_RTL (exp)) == MEM
5770 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
5771 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
5772 XEXP (DECL_RTL (exp), 0));
5774 /* If DECL_RTL is memory, we are in the normal case and either
5775 the address is not valid or it is not a register and -fforce-addr
5776 is specified, get the address into a register. */
5778 else if (GET_CODE (DECL_RTL (exp)) == MEM
5779 && modifier != EXPAND_CONST_ADDRESS
5780 && modifier != EXPAND_SUM
5781 && modifier != EXPAND_INITIALIZER
5782 && (! memory_address_p (DECL_MODE (exp),
5783 XEXP (DECL_RTL (exp), 0))
5784 || (flag_force_addr
5785 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
5786 temp = change_address (DECL_RTL (exp), VOIDmode,
5787 copy_rtx (XEXP (DECL_RTL (exp), 0)));
5789 /* If we got something, return it. But first, set the alignment
5790 the address is a register. */
5791 if (temp != 0)
5793 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
5794 mark_reg_pointer (XEXP (temp, 0),
5795 DECL_ALIGN (exp) / BITS_PER_UNIT);
5797 return temp;
5800 /* If the mode of DECL_RTL does not match that of the decl, it
5801 must be a promoted value. We return a SUBREG of the wanted mode,
5802 but mark it so that we know that it was already extended. */
5804 if (GET_CODE (DECL_RTL (exp)) == REG
5805 && GET_MODE (DECL_RTL (exp)) != mode)
5807 /* Get the signedness used for this variable. Ensure we get the
5808 same mode we got when the variable was declared. */
5809 if (GET_MODE (DECL_RTL (exp))
5810 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
5811 abort ();
5813 temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
5814 SUBREG_PROMOTED_VAR_P (temp) = 1;
5815 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5816 return temp;
5819 return DECL_RTL (exp);
5821 case INTEGER_CST:
5822 return immed_double_const (TREE_INT_CST_LOW (exp),
5823 TREE_INT_CST_HIGH (exp),
5824 mode);
5826 case CONST_DECL:
5827 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
5828 EXPAND_MEMORY_USE_BAD);
5830 case REAL_CST:
5831 /* If optimized, generate immediate CONST_DOUBLE
5832 which will be turned into memory by reload if necessary.
5834 We used to force a register so that loop.c could see it. But
5835 this does not allow gen_* patterns to perform optimizations with
5836 the constants. It also produces two insns in cases like "x = 1.0;".
5837 On most machines, floating-point constants are not permitted in
5838 many insns, so we'd end up copying it to a register in any case.
5840 Now, we do the copying in expand_binop, if appropriate. */
5841 return immed_real_const (exp);
5843 case COMPLEX_CST:
5844 case STRING_CST:
5845 if (! TREE_CST_RTL (exp))
5846 output_constant_def (exp);
5848 /* TREE_CST_RTL probably contains a constant address.
5849 On RISC machines where a constant address isn't valid,
5850 make some insns to get that address into a register. */
5851 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
5852 && modifier != EXPAND_CONST_ADDRESS
5853 && modifier != EXPAND_INITIALIZER
5854 && modifier != EXPAND_SUM
5855 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
5856 || (flag_force_addr
5857 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
5858 return change_address (TREE_CST_RTL (exp), VOIDmode,
5859 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
5860 return TREE_CST_RTL (exp);
5862 case EXPR_WITH_FILE_LOCATION:
5864 rtx to_return;
5865 char *saved_input_filename = input_filename;
5866 int saved_lineno = lineno;
5867 input_filename = EXPR_WFL_FILENAME (exp);
5868 lineno = EXPR_WFL_LINENO (exp);
5869 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
5870 emit_line_note (input_filename, lineno);
5871 /* Possibly avoid switching back and force here */
5872 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
5873 input_filename = saved_input_filename;
5874 lineno = saved_lineno;
5875 return to_return;
5878 case SAVE_EXPR:
5879 context = decl_function_context (exp);
5881 /* If this SAVE_EXPR was at global context, assume we are an
5882 initialization function and move it into our context. */
5883 if (context == 0)
5884 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
5886 /* We treat inline_function_decl as an alias for the current function
5887 because that is the inline function whose vars, types, etc.
5888 are being merged into the current function.
5889 See expand_inline_function. */
5890 if (context == current_function_decl || context == inline_function_decl)
5891 context = 0;
5893 /* If this is non-local, handle it. */
5894 if (context)
5896 /* The following call just exists to abort if the context is
5897 not of a containing function. */
5898 find_function_data (context);
5900 temp = SAVE_EXPR_RTL (exp);
5901 if (temp && GET_CODE (temp) == REG)
5903 put_var_into_stack (exp);
5904 temp = SAVE_EXPR_RTL (exp);
5906 if (temp == 0 || GET_CODE (temp) != MEM)
5907 abort ();
5908 return change_address (temp, mode,
5909 fix_lexical_addr (XEXP (temp, 0), exp));
5911 if (SAVE_EXPR_RTL (exp) == 0)
5913 if (mode == VOIDmode)
5914 temp = const0_rtx;
5915 else
5916 temp = assign_temp (type, 3, 0, 0);
5918 SAVE_EXPR_RTL (exp) = temp;
5919 if (!optimize && GET_CODE (temp) == REG)
5920 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
5921 save_expr_regs);
5923 /* If the mode of TEMP does not match that of the expression, it
5924 must be a promoted value. We pass store_expr a SUBREG of the
5925 wanted mode but mark it so that we know that it was already
5926 extended. Note that `unsignedp' was modified above in
5927 this case. */
5929 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
5931 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
5932 SUBREG_PROMOTED_VAR_P (temp) = 1;
5933 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5936 if (temp == const0_rtx)
5937 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5938 EXPAND_MEMORY_USE_BAD);
5939 else
5940 store_expr (TREE_OPERAND (exp, 0), temp, 0);
5942 TREE_USED (exp) = 1;
5945 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
5946 must be a promoted value. We return a SUBREG of the wanted mode,
5947 but mark it so that we know that it was already extended. */
5949 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
5950 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
5952 /* Compute the signedness and make the proper SUBREG. */
5953 promote_mode (type, mode, &unsignedp, 0);
5954 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
5955 SUBREG_PROMOTED_VAR_P (temp) = 1;
5956 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5957 return temp;
5960 return SAVE_EXPR_RTL (exp);
5962 case UNSAVE_EXPR:
5964 rtx temp;
5965 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5966 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
5967 return temp;
5970 case PLACEHOLDER_EXPR:
5972 tree placeholder_expr;
5974 /* If there is an object on the head of the placeholder list,
5975 see if some object in it of type TYPE or a pointer to it. For
5976 further information, see tree.def. */
5977 for (placeholder_expr = placeholder_list;
5978 placeholder_expr != 0;
5979 placeholder_expr = TREE_CHAIN (placeholder_expr))
5981 tree need_type = TYPE_MAIN_VARIANT (type);
5982 tree object = 0;
5983 tree old_list = placeholder_list;
5984 tree elt;
5986 /* Find the outermost reference that is of the type we want.
5987 If none, see if any object has a type that is a pointer to
5988 the type we want. */
5989 for (elt = TREE_PURPOSE (placeholder_expr);
5990 elt != 0 && object == 0;
5992 = ((TREE_CODE (elt) == COMPOUND_EXPR
5993 || TREE_CODE (elt) == COND_EXPR)
5994 ? TREE_OPERAND (elt, 1)
5995 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5996 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5997 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5998 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5999 ? TREE_OPERAND (elt, 0) : 0))
6000 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6001 object = elt;
6003 for (elt = TREE_PURPOSE (placeholder_expr);
6004 elt != 0 && object == 0;
6006 = ((TREE_CODE (elt) == COMPOUND_EXPR
6007 || TREE_CODE (elt) == COND_EXPR)
6008 ? TREE_OPERAND (elt, 1)
6009 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6010 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6011 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6012 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6013 ? TREE_OPERAND (elt, 0) : 0))
6014 if (POINTER_TYPE_P (TREE_TYPE (elt))
6015 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6016 == need_type))
6017 object = build1 (INDIRECT_REF, need_type, elt);
6019 if (object != 0)
6021 /* Expand this object skipping the list entries before
6022 it was found in case it is also a PLACEHOLDER_EXPR.
6023 In that case, we want to translate it using subsequent
6024 entries. */
6025 placeholder_list = TREE_CHAIN (placeholder_expr);
6026 temp = expand_expr (object, original_target, tmode,
6027 ro_modifier);
6028 placeholder_list = old_list;
6029 return temp;
6034 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6035 abort ();
6037 case WITH_RECORD_EXPR:
6038 /* Put the object on the placeholder list, expand our first operand,
6039 and pop the list. */
6040 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6041 placeholder_list);
6042 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
6043 tmode, ro_modifier);
6044 placeholder_list = TREE_CHAIN (placeholder_list);
6045 return target;
6047 case GOTO_EXPR:
6048 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6049 expand_goto (TREE_OPERAND (exp, 0));
6050 else
6051 expand_computed_goto (TREE_OPERAND (exp, 0));
6052 return const0_rtx;
6054 case EXIT_EXPR:
6055 expand_exit_loop_if_false (NULL_PTR,
6056 invert_truthvalue (TREE_OPERAND (exp, 0)));
6057 return const0_rtx;
6059 case LABELED_BLOCK_EXPR:
6060 if (LABELED_BLOCK_BODY (exp))
6061 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6062 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6063 return const0_rtx;
6065 case EXIT_BLOCK_EXPR:
6066 if (EXIT_BLOCK_RETURN (exp))
6067 sorry ("returned value in block_exit_expr");
6068 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6069 return const0_rtx;
6071 case LOOP_EXPR:
6072 push_temp_slots ();
6073 expand_start_loop (1);
6074 expand_expr_stmt (TREE_OPERAND (exp, 0));
6075 expand_end_loop ();
6076 pop_temp_slots ();
6078 return const0_rtx;
6080 case BIND_EXPR:
6082 tree vars = TREE_OPERAND (exp, 0);
6083 int vars_need_expansion = 0;
6085 /* Need to open a binding contour here because
6086 if there are any cleanups they must be contained here. */
6087 expand_start_bindings (0);
6089 /* Mark the corresponding BLOCK for output in its proper place. */
6090 if (TREE_OPERAND (exp, 2) != 0
6091 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6092 insert_block (TREE_OPERAND (exp, 2));
6094 /* If VARS have not yet been expanded, expand them now. */
6095 while (vars)
6097 if (DECL_RTL (vars) == 0)
6099 vars_need_expansion = 1;
6100 expand_decl (vars);
6102 expand_decl_init (vars);
6103 vars = TREE_CHAIN (vars);
6106 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6108 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6110 return temp;
6113 case RTL_EXPR:
6114 if (RTL_EXPR_SEQUENCE (exp))
6116 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6117 abort ();
6118 emit_insns (RTL_EXPR_SEQUENCE (exp));
6119 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6121 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6122 free_temps_for_rtl_expr (exp);
6123 return RTL_EXPR_RTL (exp);
6125 case CONSTRUCTOR:
6126 /* If we don't need the result, just ensure we evaluate any
6127 subexpressions. */
6128 if (ignore)
6130 tree elt;
6131 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6132 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6133 EXPAND_MEMORY_USE_BAD);
6134 return const0_rtx;
6137 /* All elts simple constants => refer to a constant in memory. But
6138 if this is a non-BLKmode mode, let it store a field at a time
6139 since that should make a CONST_INT or CONST_DOUBLE when we
6140 fold. Likewise, if we have a target we can use, it is best to
6141 store directly into the target unless the type is large enough
6142 that memcpy will be used. If we are making an initializer and
6143 all operands are constant, put it in memory as well. */
6144 else if ((TREE_STATIC (exp)
6145 && ((mode == BLKmode
6146 && ! (target != 0 && safe_from_p (target, exp, 1)))
6147 || TREE_ADDRESSABLE (exp)
6148 || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
6149 && (!MOVE_BY_PIECES_P
6150 (TREE_INT_CST_LOW (TYPE_SIZE (type))/BITS_PER_UNIT,
6151 TYPE_ALIGN (type) / BITS_PER_UNIT))
6152 && ! mostly_zeros_p (exp))))
6153 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6155 rtx constructor = output_constant_def (exp);
6156 if (modifier != EXPAND_CONST_ADDRESS
6157 && modifier != EXPAND_INITIALIZER
6158 && modifier != EXPAND_SUM
6159 && (! memory_address_p (GET_MODE (constructor),
6160 XEXP (constructor, 0))
6161 || (flag_force_addr
6162 && GET_CODE (XEXP (constructor, 0)) != REG)))
6163 constructor = change_address (constructor, VOIDmode,
6164 XEXP (constructor, 0));
6165 return constructor;
6168 else
6170 /* Handle calls that pass values in multiple non-contiguous
6171 locations. The Irix 6 ABI has examples of this. */
6172 if (target == 0 || ! safe_from_p (target, exp, 1)
6173 || GET_CODE (target) == PARALLEL)
6175 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
6176 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6177 else
6178 target = assign_temp (type, 0, 1, 1);
6181 if (TREE_READONLY (exp))
6183 if (GET_CODE (target) == MEM)
6184 target = copy_rtx (target);
6186 RTX_UNCHANGING_P (target) = 1;
6189 store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0);
6190 return target;
6193 case INDIRECT_REF:
6195 tree exp1 = TREE_OPERAND (exp, 0);
6196 tree exp2;
6197 tree index;
6198 tree string = string_constant (exp1, &index);
6199 int i;
6201 /* Try to optimize reads from const strings. */
6202 if (string
6203 && TREE_CODE (string) == STRING_CST
6204 && TREE_CODE (index) == INTEGER_CST
6205 && !TREE_INT_CST_HIGH (index)
6206 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (string)
6207 && GET_MODE_CLASS (mode) == MODE_INT
6208 && GET_MODE_SIZE (mode) == 1
6209 && modifier != EXPAND_MEMORY_USE_WO)
6210 return GEN_INT (TREE_STRING_POINTER (string)[i]);
6212 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6213 op0 = memory_address (mode, op0);
6215 if (current_function && current_function_check_memory_usage
6216 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6218 enum memory_use_mode memory_usage;
6219 memory_usage = get_memory_usage_from_modifier (modifier);
6221 if (memory_usage != MEMORY_USE_DONT)
6223 in_check_memory_usage = 1;
6224 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6225 op0, Pmode,
6226 GEN_INT (int_size_in_bytes (type)),
6227 TYPE_MODE (sizetype),
6228 GEN_INT (memory_usage),
6229 TYPE_MODE (integer_type_node));
6230 in_check_memory_usage = 0;
6234 temp = gen_rtx_MEM (mode, op0);
6235 /* If address was computed by addition,
6236 mark this as an element of an aggregate. */
6237 if (TREE_CODE (exp1) == PLUS_EXPR
6238 || (TREE_CODE (exp1) == SAVE_EXPR
6239 && TREE_CODE (TREE_OPERAND (exp1, 0)) == PLUS_EXPR)
6240 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
6241 || (TREE_CODE (exp1) == ADDR_EXPR
6242 && (exp2 = TREE_OPERAND (exp1, 0))
6243 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
6244 MEM_SET_IN_STRUCT_P (temp, 1);
6246 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
6247 MEM_ALIAS_SET (temp) = get_alias_set (exp);
6249 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6250 here, because, in C and C++, the fact that a location is accessed
6251 through a pointer to const does not mean that the value there can
6252 never change. Languages where it can never change should
6253 also set TREE_STATIC. */
6254 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
6255 return temp;
6258 case ARRAY_REF:
6259 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6260 abort ();
6263 tree array = TREE_OPERAND (exp, 0);
6264 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6265 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6266 tree index = TREE_OPERAND (exp, 1);
6267 tree index_type = TREE_TYPE (index);
6268 HOST_WIDE_INT i;
6270 /* Optimize the special-case of a zero lower bound.
6272 We convert the low_bound to sizetype to avoid some problems
6273 with constant folding. (E.g. suppose the lower bound is 1,
6274 and its mode is QI. Without the conversion, (ARRAY
6275 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6276 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
6278 But sizetype isn't quite right either (especially if
6279 the lowbound is negative). FIXME */
6281 if (! integer_zerop (low_bound))
6282 index = fold (build (MINUS_EXPR, index_type, index,
6283 convert (sizetype, low_bound)));
6285 /* Fold an expression like: "foo"[2].
6286 This is not done in fold so it won't happen inside &.
6287 Don't fold if this is for wide characters since it's too
6288 difficult to do correctly and this is a very rare case. */
6290 if (TREE_CODE (array) == STRING_CST
6291 && TREE_CODE (index) == INTEGER_CST
6292 && !TREE_INT_CST_HIGH (index)
6293 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
6294 && GET_MODE_CLASS (mode) == MODE_INT
6295 && GET_MODE_SIZE (mode) == 1)
6296 return GEN_INT (TREE_STRING_POINTER (array)[i]);
6298 /* If this is a constant index into a constant array,
6299 just get the value from the array. Handle both the cases when
6300 we have an explicit constructor and when our operand is a variable
6301 that was declared const. */
6303 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
6305 if (TREE_CODE (index) == INTEGER_CST
6306 && TREE_INT_CST_HIGH (index) == 0)
6308 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
6310 i = TREE_INT_CST_LOW (index);
6311 while (elem && i--)
6312 elem = TREE_CHAIN (elem);
6313 if (elem)
6314 return expand_expr (fold (TREE_VALUE (elem)), target,
6315 tmode, ro_modifier);
6319 else if (optimize >= 1
6320 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6321 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6322 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6324 if (TREE_CODE (index) == INTEGER_CST)
6326 tree init = DECL_INITIAL (array);
6328 i = TREE_INT_CST_LOW (index);
6329 if (TREE_CODE (init) == CONSTRUCTOR)
6331 tree elem = CONSTRUCTOR_ELTS (init);
6333 while (elem
6334 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
6335 elem = TREE_CHAIN (elem);
6336 if (elem)
6337 return expand_expr (fold (TREE_VALUE (elem)), target,
6338 tmode, ro_modifier);
6340 else if (TREE_CODE (init) == STRING_CST
6341 && TREE_INT_CST_HIGH (index) == 0
6342 && (TREE_INT_CST_LOW (index)
6343 < TREE_STRING_LENGTH (init)))
6344 return (GEN_INT
6345 (TREE_STRING_POINTER
6346 (init)[TREE_INT_CST_LOW (index)]));
6351 /* ... fall through ... */
6353 case COMPONENT_REF:
6354 case BIT_FIELD_REF:
6355 /* If the operand is a CONSTRUCTOR, we can just extract the
6356 appropriate field if it is present. Don't do this if we have
6357 already written the data since we want to refer to that copy
6358 and varasm.c assumes that's what we'll do. */
6359 if (code != ARRAY_REF
6360 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6361 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6363 tree elt;
6365 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6366 elt = TREE_CHAIN (elt))
6367 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6368 /* We can normally use the value of the field in the
6369 CONSTRUCTOR. However, if this is a bitfield in
6370 an integral mode that we can fit in a HOST_WIDE_INT,
6371 we must mask only the number of bits in the bitfield,
6372 since this is done implicitly by the constructor. If
6373 the bitfield does not meet either of those conditions,
6374 we can't do this optimization. */
6375 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6376 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6377 == MODE_INT)
6378 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6379 <= HOST_BITS_PER_WIDE_INT))))
6381 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6382 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6384 int bitsize = DECL_FIELD_SIZE (TREE_PURPOSE (elt));
6386 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6388 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6389 op0 = expand_and (op0, op1, target);
6391 else
6393 enum machine_mode imode
6394 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6395 tree count
6396 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6399 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6400 target, 0);
6401 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6402 target, 0);
6406 return op0;
6411 enum machine_mode mode1;
6412 int bitsize;
6413 int bitpos;
6414 tree offset;
6415 int volatilep = 0;
6416 int alignment;
6417 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6418 &mode1, &unsignedp, &volatilep,
6419 &alignment);
6421 /* If we got back the original object, something is wrong. Perhaps
6422 we are evaluating an expression too early. In any event, don't
6423 infinitely recurse. */
6424 if (tem == exp)
6425 abort ();
6427 /* If TEM's type is a union of variable size, pass TARGET to the inner
6428 computation, since it will need a temporary and TARGET is known
6429 to have to do. This occurs in unchecked conversion in Ada. */
6431 op0 = expand_expr (tem,
6432 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6433 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6434 != INTEGER_CST)
6435 ? target : NULL_RTX),
6436 VOIDmode,
6437 modifier == EXPAND_INITIALIZER
6438 ? modifier : EXPAND_NORMAL);
6440 /* If this is a constant, put it into a register if it is a
6441 legitimate constant and memory if it isn't. */
6442 if (CONSTANT_P (op0))
6444 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6445 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
6446 op0 = force_reg (mode, op0);
6447 else
6448 op0 = validize_mem (force_const_mem (mode, op0));
6451 if (offset != 0)
6453 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
6455 if (GET_CODE (op0) != MEM)
6456 abort ();
6458 if (GET_MODE (offset_rtx) != ptr_mode)
6460 #ifdef POINTERS_EXTEND_UNSIGNED
6461 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
6462 #else
6463 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6464 #endif
6467 /* A constant address in TO_RTX can have VOIDmode, we must not try
6468 to call force_reg for that case. Avoid that case. */
6469 if (GET_CODE (op0) == MEM
6470 && GET_MODE (op0) == BLKmode
6471 && GET_MODE (XEXP (op0, 0)) != VOIDmode
6472 && bitsize
6473 && (bitpos % bitsize) == 0
6474 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6475 && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
6477 rtx temp = change_address (op0, mode1,
6478 plus_constant (XEXP (op0, 0),
6479 (bitpos /
6480 BITS_PER_UNIT)));
6481 if (GET_CODE (XEXP (temp, 0)) == REG)
6482 op0 = temp;
6483 else
6484 op0 = change_address (op0, mode1,
6485 force_reg (GET_MODE (XEXP (temp, 0)),
6486 XEXP (temp, 0)));
6487 bitpos = 0;
6491 op0 = change_address (op0, VOIDmode,
6492 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
6493 force_reg (ptr_mode,
6494 offset_rtx)));
6497 /* Don't forget about volatility even if this is a bitfield. */
6498 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6500 op0 = copy_rtx (op0);
6501 MEM_VOLATILE_P (op0) = 1;
6504 /* Check the access. */
6505 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
6507 enum memory_use_mode memory_usage;
6508 memory_usage = get_memory_usage_from_modifier (modifier);
6510 if (memory_usage != MEMORY_USE_DONT)
6512 rtx to;
6513 int size;
6515 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
6516 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
6518 /* Check the access right of the pointer. */
6519 if (size > BITS_PER_UNIT)
6520 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6521 to, Pmode,
6522 GEN_INT (size / BITS_PER_UNIT),
6523 TYPE_MODE (sizetype),
6524 GEN_INT (memory_usage),
6525 TYPE_MODE (integer_type_node));
6529 /* In cases where an aligned union has an unaligned object
6530 as a field, we might be extracting a BLKmode value from
6531 an integer-mode (e.g., SImode) object. Handle this case
6532 by doing the extract into an object as wide as the field
6533 (which we know to be the width of a basic mode), then
6534 storing into memory, and changing the mode to BLKmode.
6535 If we ultimately want the address (EXPAND_CONST_ADDRESS or
6536 EXPAND_INITIALIZER), then we must not copy to a temporary. */
6537 if (mode1 == VOIDmode
6538 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6539 || (modifier != EXPAND_CONST_ADDRESS
6540 && modifier != EXPAND_INITIALIZER
6541 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
6542 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6543 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6544 /* If the field isn't aligned enough to fetch as a memref,
6545 fetch it as a bit field. */
6546 || (SLOW_UNALIGNED_ACCESS
6547 && ((TYPE_ALIGN (TREE_TYPE (tem)) < (unsigned int) GET_MODE_ALIGNMENT (mode))
6548 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))))))
6550 enum machine_mode ext_mode = mode;
6552 if (ext_mode == BLKmode)
6553 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6555 if (ext_mode == BLKmode)
6557 /* In this case, BITPOS must start at a byte boundary and
6558 TARGET, if specified, must be a MEM. */
6559 if (GET_CODE (op0) != MEM
6560 || (target != 0 && GET_CODE (target) != MEM)
6561 || bitpos % BITS_PER_UNIT != 0)
6562 abort ();
6564 op0 = change_address (op0, VOIDmode,
6565 plus_constant (XEXP (op0, 0),
6566 bitpos / BITS_PER_UNIT));
6567 if (target == 0)
6568 target = assign_temp (type, 0, 1, 1);
6570 emit_block_move (target, op0,
6571 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6572 / BITS_PER_UNIT),
6575 return target;
6578 op0 = validize_mem (op0);
6580 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
6581 mark_reg_pointer (XEXP (op0, 0), alignment);
6583 op0 = extract_bit_field (op0, bitsize, bitpos,
6584 unsignedp, target, ext_mode, ext_mode,
6585 alignment,
6586 int_size_in_bytes (TREE_TYPE (tem)));
6588 /* If the result is a record type and BITSIZE is narrower than
6589 the mode of OP0, an integral mode, and this is a big endian
6590 machine, we must put the field into the high-order bits. */
6591 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
6592 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
6593 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
6594 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
6595 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
6596 - bitsize),
6597 op0, 1);
6599 if (mode == BLKmode)
6601 rtx new = assign_stack_temp (ext_mode,
6602 bitsize / BITS_PER_UNIT, 0);
6604 emit_move_insn (new, op0);
6605 op0 = copy_rtx (new);
6606 PUT_MODE (op0, BLKmode);
6607 MEM_SET_IN_STRUCT_P (op0, 1);
6610 return op0;
6613 /* If the result is BLKmode, use that to access the object
6614 now as well. */
6615 if (mode == BLKmode)
6616 mode1 = BLKmode;
6618 /* Get a reference to just this component. */
6619 if (modifier == EXPAND_CONST_ADDRESS
6620 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6621 op0 = gen_rtx_MEM (mode1, plus_constant (XEXP (op0, 0),
6622 (bitpos / BITS_PER_UNIT)));
6623 else
6624 op0 = change_address (op0, mode1,
6625 plus_constant (XEXP (op0, 0),
6626 (bitpos / BITS_PER_UNIT)));
6628 if (GET_CODE (op0) == MEM)
6629 MEM_ALIAS_SET (op0) = get_alias_set (exp);
6631 if (GET_CODE (XEXP (op0, 0)) == REG)
6632 mark_reg_pointer (XEXP (op0, 0), alignment);
6634 MEM_SET_IN_STRUCT_P (op0, 1);
6635 MEM_VOLATILE_P (op0) |= volatilep;
6636 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
6637 || modifier == EXPAND_CONST_ADDRESS
6638 || modifier == EXPAND_INITIALIZER)
6639 return op0;
6640 else if (target == 0)
6641 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6643 convert_move (target, op0, unsignedp);
6644 return target;
6647 /* Intended for a reference to a buffer of a file-object in Pascal.
6648 But it's not certain that a special tree code will really be
6649 necessary for these. INDIRECT_REF might work for them. */
6650 case BUFFER_REF:
6651 abort ();
6653 case IN_EXPR:
6655 /* Pascal set IN expression.
6657 Algorithm:
6658 rlo = set_low - (set_low%bits_per_word);
6659 the_word = set [ (index - rlo)/bits_per_word ];
6660 bit_index = index % bits_per_word;
6661 bitmask = 1 << bit_index;
6662 return !!(the_word & bitmask); */
6664 tree set = TREE_OPERAND (exp, 0);
6665 tree index = TREE_OPERAND (exp, 1);
6666 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
6667 tree set_type = TREE_TYPE (set);
6668 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
6669 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
6670 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
6671 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
6672 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
6673 rtx setval = expand_expr (set, 0, VOIDmode, 0);
6674 rtx setaddr = XEXP (setval, 0);
6675 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
6676 rtx rlow;
6677 rtx diff, quo, rem, addr, bit, result;
6679 preexpand_calls (exp);
6681 /* If domain is empty, answer is no. Likewise if index is constant
6682 and out of bounds. */
6683 if (((TREE_CODE (set_high_bound) == INTEGER_CST
6684 && TREE_CODE (set_low_bound) == INTEGER_CST
6685 && tree_int_cst_lt (set_high_bound, set_low_bound))
6686 || (TREE_CODE (index) == INTEGER_CST
6687 && TREE_CODE (set_low_bound) == INTEGER_CST
6688 && tree_int_cst_lt (index, set_low_bound))
6689 || (TREE_CODE (set_high_bound) == INTEGER_CST
6690 && TREE_CODE (index) == INTEGER_CST
6691 && tree_int_cst_lt (set_high_bound, index))))
6692 return const0_rtx;
6694 if (target == 0)
6695 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6697 /* If we get here, we have to generate the code for both cases
6698 (in range and out of range). */
6700 op0 = gen_label_rtx ();
6701 op1 = gen_label_rtx ();
6703 if (! (GET_CODE (index_val) == CONST_INT
6704 && GET_CODE (lo_r) == CONST_INT))
6706 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
6707 GET_MODE (index_val), iunsignedp, 0, op1);
6710 if (! (GET_CODE (index_val) == CONST_INT
6711 && GET_CODE (hi_r) == CONST_INT))
6713 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
6714 GET_MODE (index_val), iunsignedp, 0, op1);
6717 /* Calculate the element number of bit zero in the first word
6718 of the set. */
6719 if (GET_CODE (lo_r) == CONST_INT)
6720 rlow = GEN_INT (INTVAL (lo_r)
6721 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
6722 else
6723 rlow = expand_binop (index_mode, and_optab, lo_r,
6724 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
6725 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6727 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
6728 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6730 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
6731 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6732 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
6733 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6735 addr = memory_address (byte_mode,
6736 expand_binop (index_mode, add_optab, diff,
6737 setaddr, NULL_RTX, iunsignedp,
6738 OPTAB_LIB_WIDEN));
6740 /* Extract the bit we want to examine */
6741 bit = expand_shift (RSHIFT_EXPR, byte_mode,
6742 gen_rtx_MEM (byte_mode, addr),
6743 make_tree (TREE_TYPE (index), rem),
6744 NULL_RTX, 1);
6745 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
6746 GET_MODE (target) == byte_mode ? target : 0,
6747 1, OPTAB_LIB_WIDEN);
6749 if (result != target)
6750 convert_move (target, result, 1);
6752 /* Output the code to handle the out-of-range case. */
6753 emit_jump (op0);
6754 emit_label (op1);
6755 emit_move_insn (target, const0_rtx);
6756 emit_label (op0);
6757 return target;
6760 case WITH_CLEANUP_EXPR:
6761 if (RTL_EXPR_RTL (exp) == 0)
6763 RTL_EXPR_RTL (exp)
6764 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
6765 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
6767 /* That's it for this cleanup. */
6768 TREE_OPERAND (exp, 2) = 0;
6770 return RTL_EXPR_RTL (exp);
6772 case CLEANUP_POINT_EXPR:
6774 /* Start a new binding layer that will keep track of all cleanup
6775 actions to be performed. */
6776 expand_start_bindings (0);
6778 target_temp_slot_level = temp_slot_level;
6780 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
6781 /* If we're going to use this value, load it up now. */
6782 if (! ignore)
6783 op0 = force_not_mem (op0);
6784 preserve_temp_slots (op0);
6785 expand_end_bindings (NULL_TREE, 0, 0);
6787 return op0;
6789 case CALL_EXPR:
6790 /* Check for a built-in function. */
6791 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
6792 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6793 == FUNCTION_DECL)
6794 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6795 return expand_builtin (exp, target, subtarget, tmode, ignore);
6797 /* If this call was expanded already by preexpand_calls,
6798 just return the result we got. */
6799 if (CALL_EXPR_RTL (exp) != 0)
6800 return CALL_EXPR_RTL (exp);
6802 return expand_call (exp, target, ignore);
6804 case NON_LVALUE_EXPR:
6805 case NOP_EXPR:
6806 case CONVERT_EXPR:
6807 case REFERENCE_EXPR:
6808 if (TREE_CODE (type) == UNION_TYPE)
6810 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
6811 if (target == 0)
6813 if (mode != BLKmode)
6814 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6815 else
6816 target = assign_temp (type, 0, 1, 1);
6819 if (GET_CODE (target) == MEM)
6820 /* Store data into beginning of memory target. */
6821 store_expr (TREE_OPERAND (exp, 0),
6822 change_address (target, TYPE_MODE (valtype), 0), 0);
6824 else if (GET_CODE (target) == REG)
6825 /* Store this field into a union of the proper type. */
6826 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
6827 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
6828 VOIDmode, 0, 1,
6829 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))),
6831 else
6832 abort ();
6834 /* Return the entire union. */
6835 return target;
6838 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6840 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
6841 ro_modifier);
6843 /* If the signedness of the conversion differs and OP0 is
6844 a promoted SUBREG, clear that indication since we now
6845 have to do the proper extension. */
6846 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
6847 && GET_CODE (op0) == SUBREG)
6848 SUBREG_PROMOTED_VAR_P (op0) = 0;
6850 return op0;
6853 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
6854 if (GET_MODE (op0) == mode)
6855 return op0;
6857 /* If OP0 is a constant, just convert it into the proper mode. */
6858 if (CONSTANT_P (op0))
6859 return
6860 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6861 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6863 if (modifier == EXPAND_INITIALIZER)
6864 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
6866 if (target == 0)
6867 return
6868 convert_to_mode (mode, op0,
6869 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6870 else
6871 convert_move (target, op0,
6872 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6873 return target;
6875 case PLUS_EXPR:
6876 /* We come here from MINUS_EXPR when the second operand is a
6877 constant. */
6878 plus_expr:
6879 this_optab = add_optab;
6881 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
6882 something else, make sure we add the register to the constant and
6883 then to the other thing. This case can occur during strength
6884 reduction and doing it this way will produce better code if the
6885 frame pointer or argument pointer is eliminated.
6887 fold-const.c will ensure that the constant is always in the inner
6888 PLUS_EXPR, so the only case we need to do anything about is if
6889 sp, ap, or fp is our second argument, in which case we must swap
6890 the innermost first argument and our second argument. */
6892 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
6893 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
6894 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
6895 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
6896 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
6897 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
6899 tree t = TREE_OPERAND (exp, 1);
6901 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6902 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
6905 /* If the result is to be ptr_mode and we are adding an integer to
6906 something, we might be forming a constant. So try to use
6907 plus_constant. If it produces a sum and we can't accept it,
6908 use force_operand. This allows P = &ARR[const] to generate
6909 efficient code on machines where a SYMBOL_REF is not a valid
6910 address.
6912 If this is an EXPAND_SUM call, always return the sum. */
6913 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
6914 || mode == ptr_mode)
6916 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
6917 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
6918 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
6920 rtx constant_part;
6922 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
6923 EXPAND_SUM);
6924 /* Use immed_double_const to ensure that the constant is
6925 truncated according to the mode of OP1, then sign extended
6926 to a HOST_WIDE_INT. Using the constant directly can result
6927 in non-canonical RTL in a 64x32 cross compile. */
6928 constant_part
6929 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
6930 (HOST_WIDE_INT) 0,
6931 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
6932 op1 = plus_constant (op1, INTVAL (constant_part));
6933 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6934 op1 = force_operand (op1, target);
6935 return op1;
6938 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6939 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
6940 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
6942 rtx constant_part;
6944 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
6945 EXPAND_SUM);
6946 if (! CONSTANT_P (op0))
6948 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6949 VOIDmode, modifier);
6950 /* Don't go to both_summands if modifier
6951 says it's not right to return a PLUS. */
6952 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6953 goto binop2;
6954 goto both_summands;
6956 /* Use immed_double_const to ensure that the constant is
6957 truncated according to the mode of OP1, then sign extended
6958 to a HOST_WIDE_INT. Using the constant directly can result
6959 in non-canonical RTL in a 64x32 cross compile. */
6960 constant_part
6961 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
6962 (HOST_WIDE_INT) 0,
6963 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
6964 op0 = plus_constant (op0, INTVAL (constant_part));
6965 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6966 op0 = force_operand (op0, target);
6967 return op0;
6971 /* No sense saving up arithmetic to be done
6972 if it's all in the wrong mode to form part of an address.
6973 And force_operand won't know whether to sign-extend or
6974 zero-extend. */
6975 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6976 || mode != ptr_mode)
6977 goto binop;
6979 preexpand_calls (exp);
6980 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
6981 subtarget = 0;
6983 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
6984 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
6986 both_summands:
6987 /* Make sure any term that's a sum with a constant comes last. */
6988 if (GET_CODE (op0) == PLUS
6989 && CONSTANT_P (XEXP (op0, 1)))
6991 temp = op0;
6992 op0 = op1;
6993 op1 = temp;
6995 /* If adding to a sum including a constant,
6996 associate it to put the constant outside. */
6997 if (GET_CODE (op1) == PLUS
6998 && CONSTANT_P (XEXP (op1, 1)))
7000 rtx constant_term = const0_rtx;
7002 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7003 if (temp != 0)
7004 op0 = temp;
7005 /* Ensure that MULT comes first if there is one. */
7006 else if (GET_CODE (op0) == MULT)
7007 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7008 else
7009 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7011 /* Let's also eliminate constants from op0 if possible. */
7012 op0 = eliminate_constant_term (op0, &constant_term);
7014 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7015 their sum should be a constant. Form it into OP1, since the
7016 result we want will then be OP0 + OP1. */
7018 temp = simplify_binary_operation (PLUS, mode, constant_term,
7019 XEXP (op1, 1));
7020 if (temp != 0)
7021 op1 = temp;
7022 else
7023 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7026 /* Put a constant term last and put a multiplication first. */
7027 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7028 temp = op1, op1 = op0, op0 = temp;
7030 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7031 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7033 case MINUS_EXPR:
7034 /* For initializers, we are allowed to return a MINUS of two
7035 symbolic constants. Here we handle all cases when both operands
7036 are constant. */
7037 /* Handle difference of two symbolic constants,
7038 for the sake of an initializer. */
7039 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7040 && really_constant_p (TREE_OPERAND (exp, 0))
7041 && really_constant_p (TREE_OPERAND (exp, 1)))
7043 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
7044 VOIDmode, ro_modifier);
7045 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7046 VOIDmode, ro_modifier);
7048 /* If the last operand is a CONST_INT, use plus_constant of
7049 the negated constant. Else make the MINUS. */
7050 if (GET_CODE (op1) == CONST_INT)
7051 return plus_constant (op0, - INTVAL (op1));
7052 else
7053 return gen_rtx_MINUS (mode, op0, op1);
7055 /* Convert A - const to A + (-const). */
7056 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7058 tree negated = fold (build1 (NEGATE_EXPR, type,
7059 TREE_OPERAND (exp, 1)));
7061 /* Deal with the case where we can't negate the constant
7062 in TYPE. */
7063 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7065 tree newtype = signed_type (type);
7066 tree newop0 = convert (newtype, TREE_OPERAND (exp, 0));
7067 tree newop1 = convert (newtype, TREE_OPERAND (exp, 1));
7068 tree newneg = fold (build1 (NEGATE_EXPR, newtype, newop1));
7070 if (! TREE_OVERFLOW (newneg))
7071 return expand_expr (convert (type,
7072 build (PLUS_EXPR, newtype,
7073 newop0, newneg)),
7074 target, tmode, ro_modifier);
7076 else
7078 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7079 goto plus_expr;
7082 this_optab = sub_optab;
7083 goto binop;
7085 case MULT_EXPR:
7086 preexpand_calls (exp);
7087 /* If first operand is constant, swap them.
7088 Thus the following special case checks need only
7089 check the second operand. */
7090 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7092 register tree t1 = TREE_OPERAND (exp, 0);
7093 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7094 TREE_OPERAND (exp, 1) = t1;
7097 /* Attempt to return something suitable for generating an
7098 indexed address, for machines that support that. */
7100 if (modifier == EXPAND_SUM && mode == ptr_mode
7101 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7102 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7104 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7105 EXPAND_SUM);
7107 /* Apply distributive law if OP0 is x+c. */
7108 if (GET_CODE (op0) == PLUS
7109 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7110 return
7111 gen_rtx_PLUS
7112 (mode,
7113 gen_rtx_MULT
7114 (mode, XEXP (op0, 0),
7115 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7116 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7117 * INTVAL (XEXP (op0, 1))));
7119 if (GET_CODE (op0) != REG)
7120 op0 = force_operand (op0, NULL_RTX);
7121 if (GET_CODE (op0) != REG)
7122 op0 = copy_to_mode_reg (mode, op0);
7124 return
7125 gen_rtx_MULT (mode, op0,
7126 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7129 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7130 subtarget = 0;
7132 /* Check for multiplying things that have been extended
7133 from a narrower type. If this machine supports multiplying
7134 in that narrower type with a result in the desired type,
7135 do it that way, and avoid the explicit type-conversion. */
7136 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7137 && TREE_CODE (type) == INTEGER_TYPE
7138 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7139 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7140 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7141 && int_fits_type_p (TREE_OPERAND (exp, 1),
7142 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7143 /* Don't use a widening multiply if a shift will do. */
7144 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7145 > HOST_BITS_PER_WIDE_INT)
7146 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7148 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7149 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7151 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7152 /* If both operands are extended, they must either both
7153 be zero-extended or both be sign-extended. */
7154 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7156 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7158 enum machine_mode innermode
7159 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7160 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7161 ? smul_widen_optab : umul_widen_optab);
7162 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7163 ? umul_widen_optab : smul_widen_optab);
7164 if (mode == GET_MODE_WIDER_MODE (innermode))
7166 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7168 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7169 NULL_RTX, VOIDmode, 0);
7170 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7171 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7172 VOIDmode, 0);
7173 else
7174 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7175 NULL_RTX, VOIDmode, 0);
7176 goto binop2;
7178 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7179 && innermode == word_mode)
7181 rtx htem;
7182 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7183 NULL_RTX, VOIDmode, 0);
7184 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7185 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7186 VOIDmode, 0);
7187 else
7188 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7189 NULL_RTX, VOIDmode, 0);
7190 temp = expand_binop (mode, other_optab, op0, op1, target,
7191 unsignedp, OPTAB_LIB_WIDEN);
7192 htem = expand_mult_highpart_adjust (innermode,
7193 gen_highpart (innermode, temp),
7194 op0, op1,
7195 gen_highpart (innermode, temp),
7196 unsignedp);
7197 emit_move_insn (gen_highpart (innermode, temp), htem);
7198 return temp;
7202 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7203 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7204 return expand_mult (mode, op0, op1, target, unsignedp);
7206 case TRUNC_DIV_EXPR:
7207 case FLOOR_DIV_EXPR:
7208 case CEIL_DIV_EXPR:
7209 case ROUND_DIV_EXPR:
7210 case EXACT_DIV_EXPR:
7211 preexpand_calls (exp);
7212 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7213 subtarget = 0;
7214 /* Possible optimization: compute the dividend with EXPAND_SUM
7215 then if the divisor is constant can optimize the case
7216 where some terms of the dividend have coeffs divisible by it. */
7217 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7218 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7219 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7221 case RDIV_EXPR:
7222 this_optab = flodiv_optab;
7223 goto binop;
7225 case TRUNC_MOD_EXPR:
7226 case FLOOR_MOD_EXPR:
7227 case CEIL_MOD_EXPR:
7228 case ROUND_MOD_EXPR:
7229 preexpand_calls (exp);
7230 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7231 subtarget = 0;
7232 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7233 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7234 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7236 case FIX_ROUND_EXPR:
7237 case FIX_FLOOR_EXPR:
7238 case FIX_CEIL_EXPR:
7239 abort (); /* Not used for C. */
7241 case FIX_TRUNC_EXPR:
7242 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7243 if (target == 0)
7244 target = gen_reg_rtx (mode);
7245 expand_fix (target, op0, unsignedp);
7246 return target;
7248 case FLOAT_EXPR:
7249 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7250 if (target == 0)
7251 target = gen_reg_rtx (mode);
7252 /* expand_float can't figure out what to do if FROM has VOIDmode.
7253 So give it the correct mode. With -O, cse will optimize this. */
7254 if (GET_MODE (op0) == VOIDmode)
7255 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7256 op0);
7257 expand_float (target, op0,
7258 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7259 return target;
7261 case NEGATE_EXPR:
7262 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7263 temp = expand_unop (mode, neg_optab, op0, target, 0);
7264 if (temp == 0)
7265 abort ();
7266 return temp;
7268 case ABS_EXPR:
7269 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7271 /* Handle complex values specially. */
7272 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7273 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7274 return expand_complex_abs (mode, op0, target, unsignedp);
7276 /* Unsigned abs is simply the operand. Testing here means we don't
7277 risk generating incorrect code below. */
7278 if (TREE_UNSIGNED (type))
7279 return op0;
7281 return expand_abs (mode, op0, target,
7282 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7284 case MAX_EXPR:
7285 case MIN_EXPR:
7286 target = original_target;
7287 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7288 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7289 || GET_MODE (target) != mode
7290 || (GET_CODE (target) == REG
7291 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7292 target = gen_reg_rtx (mode);
7293 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7294 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7296 /* First try to do it with a special MIN or MAX instruction.
7297 If that does not win, use a conditional jump to select the proper
7298 value. */
7299 this_optab = (TREE_UNSIGNED (type)
7300 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7301 : (code == MIN_EXPR ? smin_optab : smax_optab));
7303 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7304 OPTAB_WIDEN);
7305 if (temp != 0)
7306 return temp;
7308 /* At this point, a MEM target is no longer useful; we will get better
7309 code without it. */
7311 if (GET_CODE (target) == MEM)
7312 target = gen_reg_rtx (mode);
7314 if (target != op0)
7315 emit_move_insn (target, op0);
7317 op0 = gen_label_rtx ();
7319 /* If this mode is an integer too wide to compare properly,
7320 compare word by word. Rely on cse to optimize constant cases. */
7321 if (GET_MODE_CLASS (mode) == MODE_INT && ! can_compare_p (mode))
7323 if (code == MAX_EXPR)
7324 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7325 target, op1, NULL_RTX, op0);
7326 else
7327 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7328 op1, target, NULL_RTX, op0);
7330 else
7332 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7333 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7334 unsignedp, mode, NULL_RTX, 0, NULL_RTX,
7335 op0);
7337 emit_move_insn (target, op1);
7338 emit_label (op0);
7339 return target;
7341 case BIT_NOT_EXPR:
7342 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7343 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7344 if (temp == 0)
7345 abort ();
7346 return temp;
7348 case FFS_EXPR:
7349 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7350 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7351 if (temp == 0)
7352 abort ();
7353 return temp;
7355 /* ??? Can optimize bitwise operations with one arg constant.
7356 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7357 and (a bitwise1 b) bitwise2 b (etc)
7358 but that is probably not worth while. */
7360 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7361 boolean values when we want in all cases to compute both of them. In
7362 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7363 as actual zero-or-1 values and then bitwise anding. In cases where
7364 there cannot be any side effects, better code would be made by
7365 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7366 how to recognize those cases. */
7368 case TRUTH_AND_EXPR:
7369 case BIT_AND_EXPR:
7370 this_optab = and_optab;
7371 goto binop;
7373 case TRUTH_OR_EXPR:
7374 case BIT_IOR_EXPR:
7375 this_optab = ior_optab;
7376 goto binop;
7378 case TRUTH_XOR_EXPR:
7379 case BIT_XOR_EXPR:
7380 this_optab = xor_optab;
7381 goto binop;
7383 case LSHIFT_EXPR:
7384 case RSHIFT_EXPR:
7385 case LROTATE_EXPR:
7386 case RROTATE_EXPR:
7387 preexpand_calls (exp);
7388 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7389 subtarget = 0;
7390 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7391 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7392 unsignedp);
7394 /* Could determine the answer when only additive constants differ. Also,
7395 the addition of one can be handled by changing the condition. */
7396 case LT_EXPR:
7397 case LE_EXPR:
7398 case GT_EXPR:
7399 case GE_EXPR:
7400 case EQ_EXPR:
7401 case NE_EXPR:
7402 preexpand_calls (exp);
7403 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7404 if (temp != 0)
7405 return temp;
7407 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7408 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7409 && original_target
7410 && GET_CODE (original_target) == REG
7411 && (GET_MODE (original_target)
7412 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7414 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7415 VOIDmode, 0);
7417 if (temp != original_target)
7418 temp = copy_to_reg (temp);
7420 op1 = gen_label_rtx ();
7421 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7422 GET_MODE (temp), unsignedp, 0, op1);
7423 emit_move_insn (temp, const1_rtx);
7424 emit_label (op1);
7425 return temp;
7428 /* If no set-flag instruction, must generate a conditional
7429 store into a temporary variable. Drop through
7430 and handle this like && and ||. */
7432 case TRUTH_ANDIF_EXPR:
7433 case TRUTH_ORIF_EXPR:
7434 if (! ignore
7435 && (target == 0 || ! safe_from_p (target, exp, 1)
7436 /* Make sure we don't have a hard reg (such as function's return
7437 value) live across basic blocks, if not optimizing. */
7438 || (!optimize && GET_CODE (target) == REG
7439 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7440 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7442 if (target)
7443 emit_clr_insn (target);
7445 op1 = gen_label_rtx ();
7446 jumpifnot (exp, op1);
7448 if (target)
7449 emit_0_to_1_insn (target);
7451 emit_label (op1);
7452 return ignore ? const0_rtx : target;
7454 case TRUTH_NOT_EXPR:
7455 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7456 /* The parser is careful to generate TRUTH_NOT_EXPR
7457 only with operands that are always zero or one. */
7458 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7459 target, 1, OPTAB_LIB_WIDEN);
7460 if (temp == 0)
7461 abort ();
7462 return temp;
7464 case COMPOUND_EXPR:
7465 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7466 emit_queue ();
7467 return expand_expr (TREE_OPERAND (exp, 1),
7468 (ignore ? const0_rtx : target),
7469 VOIDmode, 0);
7471 case COND_EXPR:
7472 /* If we would have a "singleton" (see below) were it not for a
7473 conversion in each arm, bring that conversion back out. */
7474 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7475 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
7476 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
7477 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
7479 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
7480 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
7482 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
7483 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7484 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
7485 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
7486 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
7487 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7488 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
7489 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
7490 return expand_expr (build1 (NOP_EXPR, type,
7491 build (COND_EXPR, TREE_TYPE (true),
7492 TREE_OPERAND (exp, 0),
7493 true, false)),
7494 target, tmode, modifier);
7498 /* Note that COND_EXPRs whose type is a structure or union
7499 are required to be constructed to contain assignments of
7500 a temporary variable, so that we can evaluate them here
7501 for side effect only. If type is void, we must do likewise. */
7503 /* If an arm of the branch requires a cleanup,
7504 only that cleanup is performed. */
7506 tree singleton = 0;
7507 tree binary_op = 0, unary_op = 0;
7509 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
7510 convert it to our mode, if necessary. */
7511 if (integer_onep (TREE_OPERAND (exp, 1))
7512 && integer_zerop (TREE_OPERAND (exp, 2))
7513 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7515 if (ignore)
7517 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
7518 ro_modifier);
7519 return const0_rtx;
7522 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
7523 if (GET_MODE (op0) == mode)
7524 return op0;
7526 if (target == 0)
7527 target = gen_reg_rtx (mode);
7528 convert_move (target, op0, unsignedp);
7529 return target;
7532 /* Check for X ? A + B : A. If we have this, we can copy A to the
7533 output and conditionally add B. Similarly for unary operations.
7534 Don't do this if X has side-effects because those side effects
7535 might affect A or B and the "?" operation is a sequence point in
7536 ANSI. (operand_equal_p tests for side effects.) */
7538 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
7539 && operand_equal_p (TREE_OPERAND (exp, 2),
7540 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7541 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
7542 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
7543 && operand_equal_p (TREE_OPERAND (exp, 1),
7544 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7545 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
7546 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
7547 && operand_equal_p (TREE_OPERAND (exp, 2),
7548 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7549 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
7550 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
7551 && operand_equal_p (TREE_OPERAND (exp, 1),
7552 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7553 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
7555 /* If we are not to produce a result, we have no target. Otherwise,
7556 if a target was specified use it; it will not be used as an
7557 intermediate target unless it is safe. If no target, use a
7558 temporary. */
7560 if (ignore)
7561 temp = 0;
7562 else if (original_target
7563 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
7564 || (singleton && GET_CODE (original_target) == REG
7565 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
7566 && original_target == var_rtx (singleton)))
7567 && GET_MODE (original_target) == mode
7568 #ifdef HAVE_conditional_move
7569 && (! can_conditionally_move_p (mode)
7570 || GET_CODE (original_target) == REG
7571 || TREE_ADDRESSABLE (type))
7572 #endif
7573 && ! (GET_CODE (original_target) == MEM
7574 && MEM_VOLATILE_P (original_target)))
7575 temp = original_target;
7576 else if (TREE_ADDRESSABLE (type))
7577 abort ();
7578 else
7579 temp = assign_temp (type, 0, 0, 1);
7581 /* If we had X ? A + C : A, with C a constant power of 2, and we can
7582 do the test of X as a store-flag operation, do this as
7583 A + ((X != 0) << log C). Similarly for other simple binary
7584 operators. Only do for C == 1 if BRANCH_COST is low. */
7585 if (temp && singleton && binary_op
7586 && (TREE_CODE (binary_op) == PLUS_EXPR
7587 || TREE_CODE (binary_op) == MINUS_EXPR
7588 || TREE_CODE (binary_op) == BIT_IOR_EXPR
7589 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
7590 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
7591 : integer_onep (TREE_OPERAND (binary_op, 1)))
7592 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7594 rtx result;
7595 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
7596 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
7597 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
7598 : xor_optab);
7600 /* If we had X ? A : A + 1, do this as A + (X == 0).
7602 We have to invert the truth value here and then put it
7603 back later if do_store_flag fails. We cannot simply copy
7604 TREE_OPERAND (exp, 0) to another variable and modify that
7605 because invert_truthvalue can modify the tree pointed to
7606 by its argument. */
7607 if (singleton == TREE_OPERAND (exp, 1))
7608 TREE_OPERAND (exp, 0)
7609 = invert_truthvalue (TREE_OPERAND (exp, 0));
7611 result = do_store_flag (TREE_OPERAND (exp, 0),
7612 (safe_from_p (temp, singleton, 1)
7613 ? temp : NULL_RTX),
7614 mode, BRANCH_COST <= 1);
7616 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
7617 result = expand_shift (LSHIFT_EXPR, mode, result,
7618 build_int_2 (tree_log2
7619 (TREE_OPERAND
7620 (binary_op, 1)),
7622 (safe_from_p (temp, singleton, 1)
7623 ? temp : NULL_RTX), 0);
7625 if (result)
7627 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
7628 return expand_binop (mode, boptab, op1, result, temp,
7629 unsignedp, OPTAB_LIB_WIDEN);
7631 else if (singleton == TREE_OPERAND (exp, 1))
7632 TREE_OPERAND (exp, 0)
7633 = invert_truthvalue (TREE_OPERAND (exp, 0));
7636 do_pending_stack_adjust ();
7637 NO_DEFER_POP;
7638 op0 = gen_label_rtx ();
7640 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
7642 if (temp != 0)
7644 /* If the target conflicts with the other operand of the
7645 binary op, we can't use it. Also, we can't use the target
7646 if it is a hard register, because evaluating the condition
7647 might clobber it. */
7648 if ((binary_op
7649 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
7650 || (GET_CODE (temp) == REG
7651 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
7652 temp = gen_reg_rtx (mode);
7653 store_expr (singleton, temp, 0);
7655 else
7656 expand_expr (singleton,
7657 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7658 if (singleton == TREE_OPERAND (exp, 1))
7659 jumpif (TREE_OPERAND (exp, 0), op0);
7660 else
7661 jumpifnot (TREE_OPERAND (exp, 0), op0);
7663 start_cleanup_deferral ();
7664 if (binary_op && temp == 0)
7665 /* Just touch the other operand. */
7666 expand_expr (TREE_OPERAND (binary_op, 1),
7667 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7668 else if (binary_op)
7669 store_expr (build (TREE_CODE (binary_op), type,
7670 make_tree (type, temp),
7671 TREE_OPERAND (binary_op, 1)),
7672 temp, 0);
7673 else
7674 store_expr (build1 (TREE_CODE (unary_op), type,
7675 make_tree (type, temp)),
7676 temp, 0);
7677 op1 = op0;
7679 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
7680 comparison operator. If we have one of these cases, set the
7681 output to A, branch on A (cse will merge these two references),
7682 then set the output to FOO. */
7683 else if (temp
7684 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7685 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7686 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7687 TREE_OPERAND (exp, 1), 0)
7688 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7689 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
7690 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
7692 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7693 temp = gen_reg_rtx (mode);
7694 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7695 jumpif (TREE_OPERAND (exp, 0), op0);
7697 start_cleanup_deferral ();
7698 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7699 op1 = op0;
7701 else if (temp
7702 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7703 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7704 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7705 TREE_OPERAND (exp, 2), 0)
7706 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7707 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
7708 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
7710 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7711 temp = gen_reg_rtx (mode);
7712 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7713 jumpifnot (TREE_OPERAND (exp, 0), op0);
7715 start_cleanup_deferral ();
7716 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7717 op1 = op0;
7719 else
7721 op1 = gen_label_rtx ();
7722 jumpifnot (TREE_OPERAND (exp, 0), op0);
7724 start_cleanup_deferral ();
7726 /* One branch of the cond can be void, if it never returns. For
7727 example A ? throw : E */
7728 if (temp != 0
7729 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
7730 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7731 else
7732 expand_expr (TREE_OPERAND (exp, 1),
7733 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7734 end_cleanup_deferral ();
7735 emit_queue ();
7736 emit_jump_insn (gen_jump (op1));
7737 emit_barrier ();
7738 emit_label (op0);
7739 start_cleanup_deferral ();
7740 if (temp != 0
7741 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
7742 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7743 else
7744 expand_expr (TREE_OPERAND (exp, 2),
7745 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7748 end_cleanup_deferral ();
7750 emit_queue ();
7751 emit_label (op1);
7752 OK_DEFER_POP;
7754 return temp;
7757 case TARGET_EXPR:
7759 /* Something needs to be initialized, but we didn't know
7760 where that thing was when building the tree. For example,
7761 it could be the return value of a function, or a parameter
7762 to a function which lays down in the stack, or a temporary
7763 variable which must be passed by reference.
7765 We guarantee that the expression will either be constructed
7766 or copied into our original target. */
7768 tree slot = TREE_OPERAND (exp, 0);
7769 tree cleanups = NULL_TREE;
7770 tree exp1;
7772 if (TREE_CODE (slot) != VAR_DECL)
7773 abort ();
7775 if (! ignore)
7776 target = original_target;
7778 if (target == 0)
7780 if (DECL_RTL (slot) != 0)
7782 target = DECL_RTL (slot);
7783 /* If we have already expanded the slot, so don't do
7784 it again. (mrs) */
7785 if (TREE_OPERAND (exp, 1) == NULL_TREE)
7786 return target;
7788 else
7790 target = assign_temp (type, 2, 0, 1);
7791 /* All temp slots at this level must not conflict. */
7792 preserve_temp_slots (target);
7793 DECL_RTL (slot) = target;
7794 if (TREE_ADDRESSABLE (slot))
7796 TREE_ADDRESSABLE (slot) = 0;
7797 mark_addressable (slot);
7800 /* Since SLOT is not known to the called function
7801 to belong to its stack frame, we must build an explicit
7802 cleanup. This case occurs when we must build up a reference
7803 to pass the reference as an argument. In this case,
7804 it is very likely that such a reference need not be
7805 built here. */
7807 if (TREE_OPERAND (exp, 2) == 0)
7808 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
7809 cleanups = TREE_OPERAND (exp, 2);
7812 else
7814 /* This case does occur, when expanding a parameter which
7815 needs to be constructed on the stack. The target
7816 is the actual stack address that we want to initialize.
7817 The function we call will perform the cleanup in this case. */
7819 /* If we have already assigned it space, use that space,
7820 not target that we were passed in, as our target
7821 parameter is only a hint. */
7822 if (DECL_RTL (slot) != 0)
7824 target = DECL_RTL (slot);
7825 /* If we have already expanded the slot, so don't do
7826 it again. (mrs) */
7827 if (TREE_OPERAND (exp, 1) == NULL_TREE)
7828 return target;
7830 else
7832 DECL_RTL (slot) = target;
7833 /* If we must have an addressable slot, then make sure that
7834 the RTL that we just stored in slot is OK. */
7835 if (TREE_ADDRESSABLE (slot))
7837 TREE_ADDRESSABLE (slot) = 0;
7838 mark_addressable (slot);
7843 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
7844 /* Mark it as expanded. */
7845 TREE_OPERAND (exp, 1) = NULL_TREE;
7847 TREE_USED (slot) = 1;
7848 store_expr (exp1, target, 0);
7850 expand_decl_cleanup (NULL_TREE, cleanups);
7852 return target;
7855 case INIT_EXPR:
7857 tree lhs = TREE_OPERAND (exp, 0);
7858 tree rhs = TREE_OPERAND (exp, 1);
7859 tree noncopied_parts = 0;
7860 tree lhs_type = TREE_TYPE (lhs);
7862 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7863 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
7864 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
7865 TYPE_NONCOPIED_PARTS (lhs_type));
7866 while (noncopied_parts != 0)
7868 expand_assignment (TREE_VALUE (noncopied_parts),
7869 TREE_PURPOSE (noncopied_parts), 0, 0);
7870 noncopied_parts = TREE_CHAIN (noncopied_parts);
7872 return temp;
7875 case MODIFY_EXPR:
7877 /* If lhs is complex, expand calls in rhs before computing it.
7878 That's so we don't compute a pointer and save it over a call.
7879 If lhs is simple, compute it first so we can give it as a
7880 target if the rhs is just a call. This avoids an extra temp and copy
7881 and that prevents a partial-subsumption which makes bad code.
7882 Actually we could treat component_ref's of vars like vars. */
7884 tree lhs = TREE_OPERAND (exp, 0);
7885 tree rhs = TREE_OPERAND (exp, 1);
7886 tree noncopied_parts = 0;
7887 tree lhs_type = TREE_TYPE (lhs);
7889 temp = 0;
7891 if (TREE_CODE (lhs) != VAR_DECL
7892 && TREE_CODE (lhs) != RESULT_DECL
7893 && TREE_CODE (lhs) != PARM_DECL
7894 && ! (TREE_CODE (lhs) == INDIRECT_REF
7895 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
7896 preexpand_calls (exp);
7898 /* Check for |= or &= of a bitfield of size one into another bitfield
7899 of size 1. In this case, (unless we need the result of the
7900 assignment) we can do this more efficiently with a
7901 test followed by an assignment, if necessary.
7903 ??? At this point, we can't get a BIT_FIELD_REF here. But if
7904 things change so we do, this code should be enhanced to
7905 support it. */
7906 if (ignore
7907 && TREE_CODE (lhs) == COMPONENT_REF
7908 && (TREE_CODE (rhs) == BIT_IOR_EXPR
7909 || TREE_CODE (rhs) == BIT_AND_EXPR)
7910 && TREE_OPERAND (rhs, 0) == lhs
7911 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
7912 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
7913 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
7915 rtx label = gen_label_rtx ();
7917 do_jump (TREE_OPERAND (rhs, 1),
7918 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
7919 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
7920 expand_assignment (lhs, convert (TREE_TYPE (rhs),
7921 (TREE_CODE (rhs) == BIT_IOR_EXPR
7922 ? integer_one_node
7923 : integer_zero_node)),
7924 0, 0);
7925 do_pending_stack_adjust ();
7926 emit_label (label);
7927 return const0_rtx;
7930 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
7931 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
7932 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
7933 TYPE_NONCOPIED_PARTS (lhs_type));
7935 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7936 while (noncopied_parts != 0)
7938 expand_assignment (TREE_PURPOSE (noncopied_parts),
7939 TREE_VALUE (noncopied_parts), 0, 0);
7940 noncopied_parts = TREE_CHAIN (noncopied_parts);
7942 return temp;
7945 case RETURN_EXPR:
7946 if (!TREE_OPERAND (exp, 0))
7947 expand_null_return ();
7948 else
7949 expand_return (TREE_OPERAND (exp, 0));
7950 return const0_rtx;
7952 case PREINCREMENT_EXPR:
7953 case PREDECREMENT_EXPR:
7954 return expand_increment (exp, 0, ignore);
7956 case POSTINCREMENT_EXPR:
7957 case POSTDECREMENT_EXPR:
7958 /* Faster to treat as pre-increment if result is not used. */
7959 return expand_increment (exp, ! ignore, ignore);
7961 case ADDR_EXPR:
7962 /* If nonzero, TEMP will be set to the address of something that might
7963 be a MEM corresponding to a stack slot. */
7964 temp = 0;
7966 /* Are we taking the address of a nested function? */
7967 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
7968 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
7969 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
7970 && ! TREE_STATIC (exp))
7972 op0 = trampoline_address (TREE_OPERAND (exp, 0));
7973 op0 = force_operand (op0, target);
7975 /* If we are taking the address of something erroneous, just
7976 return a zero. */
7977 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
7978 return const0_rtx;
7979 else
7981 /* We make sure to pass const0_rtx down if we came in with
7982 ignore set, to avoid doing the cleanups twice for something. */
7983 op0 = expand_expr (TREE_OPERAND (exp, 0),
7984 ignore ? const0_rtx : NULL_RTX, VOIDmode,
7985 (modifier == EXPAND_INITIALIZER
7986 ? modifier : EXPAND_CONST_ADDRESS));
7988 /* If we are going to ignore the result, OP0 will have been set
7989 to const0_rtx, so just return it. Don't get confused and
7990 think we are taking the address of the constant. */
7991 if (ignore)
7992 return op0;
7994 op0 = protect_from_queue (op0, 0);
7996 /* We would like the object in memory. If it is a constant, we can
7997 have it be statically allocated into memory. For a non-constant,
7998 we need to allocate some memory and store the value into it. */
8000 if (CONSTANT_P (op0))
8001 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8002 op0);
8003 else if (GET_CODE (op0) == MEM)
8005 mark_temp_addr_taken (op0);
8006 temp = XEXP (op0, 0);
8009 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8010 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8012 /* If this object is in a register, it must be not
8013 be BLKmode. */
8014 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8015 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8017 mark_temp_addr_taken (memloc);
8018 emit_move_insn (memloc, op0);
8019 op0 = memloc;
8022 if (GET_CODE (op0) != MEM)
8023 abort ();
8025 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8027 temp = XEXP (op0, 0);
8028 #ifdef POINTERS_EXTEND_UNSIGNED
8029 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8030 && mode == ptr_mode)
8031 temp = convert_memory_address (ptr_mode, temp);
8032 #endif
8033 return temp;
8036 op0 = force_operand (XEXP (op0, 0), target);
8039 if (flag_force_addr && GET_CODE (op0) != REG)
8040 op0 = force_reg (Pmode, op0);
8042 if (GET_CODE (op0) == REG
8043 && ! REG_USERVAR_P (op0))
8044 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)) / BITS_PER_UNIT);
8046 /* If we might have had a temp slot, add an equivalent address
8047 for it. */
8048 if (temp != 0)
8049 update_temp_slot_address (temp, op0);
8051 #ifdef POINTERS_EXTEND_UNSIGNED
8052 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8053 && mode == ptr_mode)
8054 op0 = convert_memory_address (ptr_mode, op0);
8055 #endif
8057 return op0;
8059 case ENTRY_VALUE_EXPR:
8060 abort ();
8062 /* COMPLEX type for Extended Pascal & Fortran */
8063 case COMPLEX_EXPR:
8065 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8066 rtx insns;
8068 /* Get the rtx code of the operands. */
8069 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8070 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8072 if (! target)
8073 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8075 start_sequence ();
8077 /* Move the real (op0) and imaginary (op1) parts to their location. */
8078 emit_move_insn (gen_realpart (mode, target), op0);
8079 emit_move_insn (gen_imagpart (mode, target), op1);
8081 insns = get_insns ();
8082 end_sequence ();
8084 /* Complex construction should appear as a single unit. */
8085 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8086 each with a separate pseudo as destination.
8087 It's not correct for flow to treat them as a unit. */
8088 if (GET_CODE (target) != CONCAT)
8089 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8090 else
8091 emit_insns (insns);
8093 return target;
8096 case REALPART_EXPR:
8097 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8098 return gen_realpart (mode, op0);
8100 case IMAGPART_EXPR:
8101 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8102 return gen_imagpart (mode, op0);
8104 case CONJ_EXPR:
8106 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8107 rtx imag_t;
8108 rtx insns;
8110 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8112 if (! target)
8113 target = gen_reg_rtx (mode);
8115 start_sequence ();
8117 /* Store the realpart and the negated imagpart to target. */
8118 emit_move_insn (gen_realpart (partmode, target),
8119 gen_realpart (partmode, op0));
8121 imag_t = gen_imagpart (partmode, target);
8122 temp = expand_unop (partmode, neg_optab,
8123 gen_imagpart (partmode, op0), imag_t, 0);
8124 if (temp != imag_t)
8125 emit_move_insn (imag_t, temp);
8127 insns = get_insns ();
8128 end_sequence ();
8130 /* Conjugate should appear as a single unit
8131 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8132 each with a separate pseudo as destination.
8133 It's not correct for flow to treat them as a unit. */
8134 if (GET_CODE (target) != CONCAT)
8135 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8136 else
8137 emit_insns (insns);
8139 return target;
8142 case TRY_CATCH_EXPR:
8144 tree handler = TREE_OPERAND (exp, 1);
8146 expand_eh_region_start ();
8148 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8150 expand_eh_region_end (handler);
8152 return op0;
8155 case TRY_FINALLY_EXPR:
8157 tree try_block = TREE_OPERAND (exp, 0);
8158 tree finally_block = TREE_OPERAND (exp, 1);
8159 rtx finally_label = gen_label_rtx ();
8160 rtx done_label = gen_label_rtx ();
8161 rtx return_link = gen_reg_rtx (Pmode);
8162 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8163 (tree) finally_label, (tree) return_link);
8164 TREE_SIDE_EFFECTS (cleanup) = 1;
8166 /* Start a new binding layer that will keep track of all cleanup
8167 actions to be performed. */
8168 expand_start_bindings (0);
8170 target_temp_slot_level = temp_slot_level;
8172 expand_decl_cleanup (NULL_TREE, cleanup);
8173 op0 = expand_expr (try_block, target, tmode, modifier);
8175 preserve_temp_slots (op0);
8176 expand_end_bindings (NULL_TREE, 0, 0);
8177 emit_jump (done_label);
8178 emit_label (finally_label);
8179 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8180 emit_indirect_jump (return_link);
8181 emit_label (done_label);
8182 return op0;
8185 case GOTO_SUBROUTINE_EXPR:
8187 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8188 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8189 rtx return_address = gen_label_rtx ();
8190 emit_move_insn (return_link, gen_rtx_LABEL_REF (Pmode, return_address));
8191 emit_jump (subr);
8192 emit_label (return_address);
8193 return const0_rtx;
8196 case POPDCC_EXPR:
8198 rtx dcc = get_dynamic_cleanup_chain ();
8199 emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc)));
8200 return const0_rtx;
8203 case POPDHC_EXPR:
8205 rtx dhc = get_dynamic_handler_chain ();
8206 emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc)));
8207 return const0_rtx;
8210 case VA_ARG_EXPR:
8211 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8213 default:
8214 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8217 /* Here to do an ordinary binary operator, generating an instruction
8218 from the optab already placed in `this_optab'. */
8219 binop:
8220 preexpand_calls (exp);
8221 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8222 subtarget = 0;
8223 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8224 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8225 binop2:
8226 temp = expand_binop (mode, this_optab, op0, op1, target,
8227 unsignedp, OPTAB_LIB_WIDEN);
8228 if (temp == 0)
8229 abort ();
8230 return temp;
8233 /* Return the tree node and offset if a given argument corresponds to
8234 a string constant. */
8236 tree
8237 string_constant (arg, ptr_offset)
8238 tree arg;
8239 tree *ptr_offset;
8241 STRIP_NOPS (arg);
8243 if (TREE_CODE (arg) == ADDR_EXPR
8244 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8246 *ptr_offset = integer_zero_node;
8247 return TREE_OPERAND (arg, 0);
8249 else if (TREE_CODE (arg) == PLUS_EXPR)
8251 tree arg0 = TREE_OPERAND (arg, 0);
8252 tree arg1 = TREE_OPERAND (arg, 1);
8254 STRIP_NOPS (arg0);
8255 STRIP_NOPS (arg1);
8257 if (TREE_CODE (arg0) == ADDR_EXPR
8258 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
8260 *ptr_offset = arg1;
8261 return TREE_OPERAND (arg0, 0);
8263 else if (TREE_CODE (arg1) == ADDR_EXPR
8264 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
8266 *ptr_offset = arg0;
8267 return TREE_OPERAND (arg1, 0);
8271 return 0;
8274 /* Expand code for a post- or pre- increment or decrement
8275 and return the RTX for the result.
8276 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
8278 static rtx
8279 expand_increment (exp, post, ignore)
8280 register tree exp;
8281 int post, ignore;
8283 register rtx op0, op1;
8284 register rtx temp, value;
8285 register tree incremented = TREE_OPERAND (exp, 0);
8286 optab this_optab = add_optab;
8287 int icode;
8288 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
8289 int op0_is_copy = 0;
8290 int single_insn = 0;
8291 /* 1 means we can't store into OP0 directly,
8292 because it is a subreg narrower than a word,
8293 and we don't dare clobber the rest of the word. */
8294 int bad_subreg = 0;
8296 /* Stabilize any component ref that might need to be
8297 evaluated more than once below. */
8298 if (!post
8299 || TREE_CODE (incremented) == BIT_FIELD_REF
8300 || (TREE_CODE (incremented) == COMPONENT_REF
8301 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
8302 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
8303 incremented = stabilize_reference (incremented);
8304 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
8305 ones into save exprs so that they don't accidentally get evaluated
8306 more than once by the code below. */
8307 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
8308 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
8309 incremented = save_expr (incremented);
8311 /* Compute the operands as RTX.
8312 Note whether OP0 is the actual lvalue or a copy of it:
8313 I believe it is a copy iff it is a register or subreg
8314 and insns were generated in computing it. */
8316 temp = get_last_insn ();
8317 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
8319 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
8320 in place but instead must do sign- or zero-extension during assignment,
8321 so we copy it into a new register and let the code below use it as
8322 a copy.
8324 Note that we can safely modify this SUBREG since it is know not to be
8325 shared (it was made by the expand_expr call above). */
8327 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
8329 if (post)
8330 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
8331 else
8332 bad_subreg = 1;
8334 else if (GET_CODE (op0) == SUBREG
8335 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
8337 /* We cannot increment this SUBREG in place. If we are
8338 post-incrementing, get a copy of the old value. Otherwise,
8339 just mark that we cannot increment in place. */
8340 if (post)
8341 op0 = copy_to_reg (op0);
8342 else
8343 bad_subreg = 1;
8346 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
8347 && temp != get_last_insn ());
8348 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
8349 EXPAND_MEMORY_USE_BAD);
8351 /* Decide whether incrementing or decrementing. */
8352 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
8353 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8354 this_optab = sub_optab;
8356 /* Convert decrement by a constant into a negative increment. */
8357 if (this_optab == sub_optab
8358 && GET_CODE (op1) == CONST_INT)
8360 op1 = GEN_INT (- INTVAL (op1));
8361 this_optab = add_optab;
8364 /* For a preincrement, see if we can do this with a single instruction. */
8365 if (!post)
8367 icode = (int) this_optab->handlers[(int) mode].insn_code;
8368 if (icode != (int) CODE_FOR_nothing
8369 /* Make sure that OP0 is valid for operands 0 and 1
8370 of the insn we want to queue. */
8371 && (*insn_data[icode].operand[0].predicate) (op0, mode)
8372 && (*insn_data[icode].operand[1].predicate) (op0, mode)
8373 && (*insn_data[icode].operand[2].predicate) (op1, mode))
8374 single_insn = 1;
8377 /* If OP0 is not the actual lvalue, but rather a copy in a register,
8378 then we cannot just increment OP0. We must therefore contrive to
8379 increment the original value. Then, for postincrement, we can return
8380 OP0 since it is a copy of the old value. For preincrement, expand here
8381 unless we can do it with a single insn.
8383 Likewise if storing directly into OP0 would clobber high bits
8384 we need to preserve (bad_subreg). */
8385 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
8387 /* This is the easiest way to increment the value wherever it is.
8388 Problems with multiple evaluation of INCREMENTED are prevented
8389 because either (1) it is a component_ref or preincrement,
8390 in which case it was stabilized above, or (2) it is an array_ref
8391 with constant index in an array in a register, which is
8392 safe to reevaluate. */
8393 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
8394 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8395 ? MINUS_EXPR : PLUS_EXPR),
8396 TREE_TYPE (exp),
8397 incremented,
8398 TREE_OPERAND (exp, 1));
8400 while (TREE_CODE (incremented) == NOP_EXPR
8401 || TREE_CODE (incremented) == CONVERT_EXPR)
8403 newexp = convert (TREE_TYPE (incremented), newexp);
8404 incremented = TREE_OPERAND (incremented, 0);
8407 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
8408 return post ? op0 : temp;
8411 if (post)
8413 /* We have a true reference to the value in OP0.
8414 If there is an insn to add or subtract in this mode, queue it.
8415 Queueing the increment insn avoids the register shuffling
8416 that often results if we must increment now and first save
8417 the old value for subsequent use. */
8419 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
8420 op0 = stabilize (op0);
8421 #endif
8423 icode = (int) this_optab->handlers[(int) mode].insn_code;
8424 if (icode != (int) CODE_FOR_nothing
8425 /* Make sure that OP0 is valid for operands 0 and 1
8426 of the insn we want to queue. */
8427 && (*insn_data[icode].operand[0].predicate) (op0, mode)
8428 && (*insn_data[icode].operand[1].predicate) (op0, mode))
8430 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
8431 op1 = force_reg (mode, op1);
8433 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
8435 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
8437 rtx addr = (general_operand (XEXP (op0, 0), mode)
8438 ? force_reg (Pmode, XEXP (op0, 0))
8439 : copy_to_reg (XEXP (op0, 0)));
8440 rtx temp, result;
8442 op0 = change_address (op0, VOIDmode, addr);
8443 temp = force_reg (GET_MODE (op0), op0);
8444 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
8445 op1 = force_reg (mode, op1);
8447 /* The increment queue is LIFO, thus we have to `queue'
8448 the instructions in reverse order. */
8449 enqueue_insn (op0, gen_move_insn (op0, temp));
8450 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
8451 return result;
8455 /* Preincrement, or we can't increment with one simple insn. */
8456 if (post)
8457 /* Save a copy of the value before inc or dec, to return it later. */
8458 temp = value = copy_to_reg (op0);
8459 else
8460 /* Arrange to return the incremented value. */
8461 /* Copy the rtx because expand_binop will protect from the queue,
8462 and the results of that would be invalid for us to return
8463 if our caller does emit_queue before using our result. */
8464 temp = copy_rtx (value = op0);
8466 /* Increment however we can. */
8467 op1 = expand_binop (mode, this_optab, value, op1,
8468 current_function_check_memory_usage ? NULL_RTX : op0,
8469 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
8470 /* Make sure the value is stored into OP0. */
8471 if (op1 != op0)
8472 emit_move_insn (op0, op1);
8474 return temp;
8477 /* Expand all function calls contained within EXP, innermost ones first.
8478 But don't look within expressions that have sequence points.
8479 For each CALL_EXPR, record the rtx for its value
8480 in the CALL_EXPR_RTL field. */
8482 static void
8483 preexpand_calls (exp)
8484 tree exp;
8486 register int nops, i;
8487 int type = TREE_CODE_CLASS (TREE_CODE (exp));
8489 if (! do_preexpand_calls)
8490 return;
8492 /* Only expressions and references can contain calls. */
8494 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
8495 return;
8497 switch (TREE_CODE (exp))
8499 case CALL_EXPR:
8500 /* Do nothing if already expanded. */
8501 if (CALL_EXPR_RTL (exp) != 0
8502 /* Do nothing if the call returns a variable-sized object. */
8503 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
8504 /* Do nothing to built-in functions. */
8505 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
8506 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
8507 == FUNCTION_DECL)
8508 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
8509 return;
8511 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
8512 return;
8514 case COMPOUND_EXPR:
8515 case COND_EXPR:
8516 case TRUTH_ANDIF_EXPR:
8517 case TRUTH_ORIF_EXPR:
8518 /* If we find one of these, then we can be sure
8519 the adjust will be done for it (since it makes jumps).
8520 Do it now, so that if this is inside an argument
8521 of a function, we don't get the stack adjustment
8522 after some other args have already been pushed. */
8523 do_pending_stack_adjust ();
8524 return;
8526 case BLOCK:
8527 case RTL_EXPR:
8528 case WITH_CLEANUP_EXPR:
8529 case CLEANUP_POINT_EXPR:
8530 case TRY_CATCH_EXPR:
8531 return;
8533 case SAVE_EXPR:
8534 if (SAVE_EXPR_RTL (exp) != 0)
8535 return;
8537 default:
8538 break;
8541 nops = tree_code_length[(int) TREE_CODE (exp)];
8542 for (i = 0; i < nops; i++)
8543 if (TREE_OPERAND (exp, i) != 0)
8545 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
8546 if (type == 'e' || type == '<' || type == '1' || type == '2'
8547 || type == 'r')
8548 preexpand_calls (TREE_OPERAND (exp, i));
8552 /* At the start of a function, record that we have no previously-pushed
8553 arguments waiting to be popped. */
8555 void
8556 init_pending_stack_adjust ()
8558 pending_stack_adjust = 0;
8561 /* When exiting from function, if safe, clear out any pending stack adjust
8562 so the adjustment won't get done.
8564 Note, if the current function calls alloca, then it must have a
8565 frame pointer regardless of the value of flag_omit_frame_pointer. */
8567 void
8568 clear_pending_stack_adjust ()
8570 #ifdef EXIT_IGNORE_STACK
8571 if (optimize > 0
8572 && (! flag_omit_frame_pointer || current_function_calls_alloca)
8573 && EXIT_IGNORE_STACK
8574 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
8575 && ! flag_inline_functions)
8576 pending_stack_adjust = 0;
8577 #endif
8580 /* Pop any previously-pushed arguments that have not been popped yet. */
8582 void
8583 do_pending_stack_adjust ()
8585 if (inhibit_defer_pop == 0)
8587 if (pending_stack_adjust != 0)
8588 adjust_stack (GEN_INT (pending_stack_adjust));
8589 pending_stack_adjust = 0;
8593 /* Expand conditional expressions. */
8595 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
8596 LABEL is an rtx of code CODE_LABEL, in this function and all the
8597 functions here. */
8599 void
8600 jumpifnot (exp, label)
8601 tree exp;
8602 rtx label;
8604 do_jump (exp, label, NULL_RTX);
8607 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
8609 void
8610 jumpif (exp, label)
8611 tree exp;
8612 rtx label;
8614 do_jump (exp, NULL_RTX, label);
8617 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
8618 the result is zero, or IF_TRUE_LABEL if the result is one.
8619 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
8620 meaning fall through in that case.
8622 do_jump always does any pending stack adjust except when it does not
8623 actually perform a jump. An example where there is no jump
8624 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
8626 This function is responsible for optimizing cases such as
8627 &&, || and comparison operators in EXP. */
8629 void
8630 do_jump (exp, if_false_label, if_true_label)
8631 tree exp;
8632 rtx if_false_label, if_true_label;
8634 register enum tree_code code = TREE_CODE (exp);
8635 /* Some cases need to create a label to jump to
8636 in order to properly fall through.
8637 These cases set DROP_THROUGH_LABEL nonzero. */
8638 rtx drop_through_label = 0;
8639 rtx temp;
8640 int i;
8641 tree type;
8642 enum machine_mode mode;
8644 #ifdef MAX_INTEGER_COMPUTATION_MODE
8645 check_max_integer_computation_mode (exp);
8646 #endif
8648 emit_queue ();
8650 switch (code)
8652 case ERROR_MARK:
8653 break;
8655 case INTEGER_CST:
8656 temp = integer_zerop (exp) ? if_false_label : if_true_label;
8657 if (temp)
8658 emit_jump (temp);
8659 break;
8661 #if 0
8662 /* This is not true with #pragma weak */
8663 case ADDR_EXPR:
8664 /* The address of something can never be zero. */
8665 if (if_true_label)
8666 emit_jump (if_true_label);
8667 break;
8668 #endif
8670 case NOP_EXPR:
8671 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
8672 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
8673 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
8674 goto normal;
8675 case CONVERT_EXPR:
8676 /* If we are narrowing the operand, we have to do the compare in the
8677 narrower mode. */
8678 if ((TYPE_PRECISION (TREE_TYPE (exp))
8679 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8680 goto normal;
8681 case NON_LVALUE_EXPR:
8682 case REFERENCE_EXPR:
8683 case ABS_EXPR:
8684 case NEGATE_EXPR:
8685 case LROTATE_EXPR:
8686 case RROTATE_EXPR:
8687 /* These cannot change zero->non-zero or vice versa. */
8688 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
8689 break;
8691 #if 0
8692 /* This is never less insns than evaluating the PLUS_EXPR followed by
8693 a test and can be longer if the test is eliminated. */
8694 case PLUS_EXPR:
8695 /* Reduce to minus. */
8696 exp = build (MINUS_EXPR, TREE_TYPE (exp),
8697 TREE_OPERAND (exp, 0),
8698 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
8699 TREE_OPERAND (exp, 1))));
8700 /* Process as MINUS. */
8701 #endif
8703 case MINUS_EXPR:
8704 /* Non-zero iff operands of minus differ. */
8705 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
8706 TREE_OPERAND (exp, 0),
8707 TREE_OPERAND (exp, 1)),
8708 NE, NE, if_false_label, if_true_label);
8709 break;
8711 case BIT_AND_EXPR:
8712 /* If we are AND'ing with a small constant, do this comparison in the
8713 smallest type that fits. If the machine doesn't have comparisons
8714 that small, it will be converted back to the wider comparison.
8715 This helps if we are testing the sign bit of a narrower object.
8716 combine can't do this for us because it can't know whether a
8717 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
8719 if (! SLOW_BYTE_ACCESS
8720 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8721 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
8722 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
8723 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
8724 && (type = type_for_mode (mode, 1)) != 0
8725 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
8726 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
8727 != CODE_FOR_nothing))
8729 do_jump (convert (type, exp), if_false_label, if_true_label);
8730 break;
8732 goto normal;
8734 case TRUTH_NOT_EXPR:
8735 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
8736 break;
8738 case TRUTH_ANDIF_EXPR:
8739 if (if_false_label == 0)
8740 if_false_label = drop_through_label = gen_label_rtx ();
8741 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
8742 start_cleanup_deferral ();
8743 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8744 end_cleanup_deferral ();
8745 break;
8747 case TRUTH_ORIF_EXPR:
8748 if (if_true_label == 0)
8749 if_true_label = drop_through_label = gen_label_rtx ();
8750 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
8751 start_cleanup_deferral ();
8752 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8753 end_cleanup_deferral ();
8754 break;
8756 case COMPOUND_EXPR:
8757 push_temp_slots ();
8758 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8759 preserve_temp_slots (NULL_RTX);
8760 free_temp_slots ();
8761 pop_temp_slots ();
8762 emit_queue ();
8763 do_pending_stack_adjust ();
8764 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8765 break;
8767 case COMPONENT_REF:
8768 case BIT_FIELD_REF:
8769 case ARRAY_REF:
8771 int bitsize, bitpos, unsignedp;
8772 enum machine_mode mode;
8773 tree type;
8774 tree offset;
8775 int volatilep = 0;
8776 int alignment;
8778 /* Get description of this reference. We don't actually care
8779 about the underlying object here. */
8780 get_inner_reference (exp, &bitsize, &bitpos, &offset,
8781 &mode, &unsignedp, &volatilep,
8782 &alignment);
8784 type = type_for_size (bitsize, unsignedp);
8785 if (! SLOW_BYTE_ACCESS
8786 && type != 0 && bitsize >= 0
8787 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
8788 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
8789 != CODE_FOR_nothing))
8791 do_jump (convert (type, exp), if_false_label, if_true_label);
8792 break;
8794 goto normal;
8797 case COND_EXPR:
8798 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
8799 if (integer_onep (TREE_OPERAND (exp, 1))
8800 && integer_zerop (TREE_OPERAND (exp, 2)))
8801 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
8803 else if (integer_zerop (TREE_OPERAND (exp, 1))
8804 && integer_onep (TREE_OPERAND (exp, 2)))
8805 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
8807 else
8809 register rtx label1 = gen_label_rtx ();
8810 drop_through_label = gen_label_rtx ();
8812 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
8814 start_cleanup_deferral ();
8815 /* Now the THEN-expression. */
8816 do_jump (TREE_OPERAND (exp, 1),
8817 if_false_label ? if_false_label : drop_through_label,
8818 if_true_label ? if_true_label : drop_through_label);
8819 /* In case the do_jump just above never jumps. */
8820 do_pending_stack_adjust ();
8821 emit_label (label1);
8823 /* Now the ELSE-expression. */
8824 do_jump (TREE_OPERAND (exp, 2),
8825 if_false_label ? if_false_label : drop_through_label,
8826 if_true_label ? if_true_label : drop_through_label);
8827 end_cleanup_deferral ();
8829 break;
8831 case EQ_EXPR:
8833 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8835 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
8836 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
8838 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
8839 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
8840 do_jump
8841 (fold
8842 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
8843 fold (build (EQ_EXPR, TREE_TYPE (exp),
8844 fold (build1 (REALPART_EXPR,
8845 TREE_TYPE (inner_type),
8846 exp0)),
8847 fold (build1 (REALPART_EXPR,
8848 TREE_TYPE (inner_type),
8849 exp1)))),
8850 fold (build (EQ_EXPR, TREE_TYPE (exp),
8851 fold (build1 (IMAGPART_EXPR,
8852 TREE_TYPE (inner_type),
8853 exp0)),
8854 fold (build1 (IMAGPART_EXPR,
8855 TREE_TYPE (inner_type),
8856 exp1)))))),
8857 if_false_label, if_true_label);
8860 else if (integer_zerop (TREE_OPERAND (exp, 1)))
8861 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
8863 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
8864 && !can_compare_p (TYPE_MODE (inner_type)))
8865 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
8866 else
8867 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
8868 break;
8871 case NE_EXPR:
8873 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8875 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
8876 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
8878 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
8879 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
8880 do_jump
8881 (fold
8882 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
8883 fold (build (NE_EXPR, TREE_TYPE (exp),
8884 fold (build1 (REALPART_EXPR,
8885 TREE_TYPE (inner_type),
8886 exp0)),
8887 fold (build1 (REALPART_EXPR,
8888 TREE_TYPE (inner_type),
8889 exp1)))),
8890 fold (build (NE_EXPR, TREE_TYPE (exp),
8891 fold (build1 (IMAGPART_EXPR,
8892 TREE_TYPE (inner_type),
8893 exp0)),
8894 fold (build1 (IMAGPART_EXPR,
8895 TREE_TYPE (inner_type),
8896 exp1)))))),
8897 if_false_label, if_true_label);
8900 else if (integer_zerop (TREE_OPERAND (exp, 1)))
8901 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
8903 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
8904 && !can_compare_p (TYPE_MODE (inner_type)))
8905 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
8906 else
8907 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
8908 break;
8911 case LT_EXPR:
8912 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8913 == MODE_INT)
8914 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8915 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
8916 else
8917 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
8918 break;
8920 case LE_EXPR:
8921 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8922 == MODE_INT)
8923 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8924 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
8925 else
8926 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
8927 break;
8929 case GT_EXPR:
8930 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8931 == MODE_INT)
8932 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8933 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
8934 else
8935 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
8936 break;
8938 case GE_EXPR:
8939 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8940 == MODE_INT)
8941 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8942 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
8943 else
8944 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
8945 break;
8947 default:
8948 normal:
8949 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
8950 #if 0
8951 /* This is not needed any more and causes poor code since it causes
8952 comparisons and tests from non-SI objects to have different code
8953 sequences. */
8954 /* Copy to register to avoid generating bad insns by cse
8955 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
8956 if (!cse_not_expected && GET_CODE (temp) == MEM)
8957 temp = copy_to_reg (temp);
8958 #endif
8959 do_pending_stack_adjust ();
8960 /* Do any postincrements in the expression that was tested. */
8961 emit_queue ();
8963 if (GET_CODE (temp) == CONST_INT || GET_CODE (temp) == LABEL_REF)
8965 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
8966 if (target)
8967 emit_jump (target);
8969 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
8970 && ! can_compare_p (GET_MODE (temp)))
8971 /* Note swapping the labels gives us not-equal. */
8972 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
8973 else if (GET_MODE (temp) != VOIDmode)
8974 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
8975 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
8976 GET_MODE (temp), NULL_RTX, 0,
8977 if_false_label, if_true_label);
8978 else
8979 abort ();
8982 if (drop_through_label)
8984 /* If do_jump produces code that might be jumped around,
8985 do any stack adjusts from that code, before the place
8986 where control merges in. */
8987 do_pending_stack_adjust ();
8988 emit_label (drop_through_label);
8992 /* Given a comparison expression EXP for values too wide to be compared
8993 with one insn, test the comparison and jump to the appropriate label.
8994 The code of EXP is ignored; we always test GT if SWAP is 0,
8995 and LT if SWAP is 1. */
8997 static void
8998 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
8999 tree exp;
9000 int swap;
9001 rtx if_false_label, if_true_label;
9003 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9004 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9005 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9006 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9008 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9011 /* Compare OP0 with OP1, word at a time, in mode MODE.
9012 UNSIGNEDP says to do unsigned comparison.
9013 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9015 void
9016 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9017 enum machine_mode mode;
9018 int unsignedp;
9019 rtx op0, op1;
9020 rtx if_false_label, if_true_label;
9022 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9023 rtx drop_through_label = 0;
9024 int i;
9026 if (! if_true_label || ! if_false_label)
9027 drop_through_label = gen_label_rtx ();
9028 if (! if_true_label)
9029 if_true_label = drop_through_label;
9030 if (! if_false_label)
9031 if_false_label = drop_through_label;
9033 /* Compare a word at a time, high order first. */
9034 for (i = 0; i < nwords; i++)
9036 rtx op0_word, op1_word;
9038 if (WORDS_BIG_ENDIAN)
9040 op0_word = operand_subword_force (op0, i, mode);
9041 op1_word = operand_subword_force (op1, i, mode);
9043 else
9045 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9046 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9049 /* All but high-order word must be compared as unsigned. */
9050 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9051 (unsignedp || i > 0), word_mode, NULL_RTX, 0,
9052 NULL_RTX, if_true_label);
9054 /* Consider lower words only if these are equal. */
9055 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9056 NULL_RTX, 0, NULL_RTX, if_false_label);
9059 if (if_false_label)
9060 emit_jump (if_false_label);
9061 if (drop_through_label)
9062 emit_label (drop_through_label);
9065 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9066 with one insn, test the comparison and jump to the appropriate label. */
9068 static void
9069 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9070 tree exp;
9071 rtx if_false_label, if_true_label;
9073 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9074 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9075 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9076 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9077 int i;
9078 rtx drop_through_label = 0;
9080 if (! if_false_label)
9081 drop_through_label = if_false_label = gen_label_rtx ();
9083 for (i = 0; i < nwords; i++)
9084 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
9085 operand_subword_force (op1, i, mode),
9086 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9087 word_mode, NULL_RTX, 0, if_false_label,
9088 NULL_RTX);
9090 if (if_true_label)
9091 emit_jump (if_true_label);
9092 if (drop_through_label)
9093 emit_label (drop_through_label);
9096 /* Jump according to whether OP0 is 0.
9097 We assume that OP0 has an integer mode that is too wide
9098 for the available compare insns. */
9100 void
9101 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9102 rtx op0;
9103 rtx if_false_label, if_true_label;
9105 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9106 rtx part;
9107 int i;
9108 rtx drop_through_label = 0;
9110 /* The fastest way of doing this comparison on almost any machine is to
9111 "or" all the words and compare the result. If all have to be loaded
9112 from memory and this is a very wide item, it's possible this may
9113 be slower, but that's highly unlikely. */
9115 part = gen_reg_rtx (word_mode);
9116 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
9117 for (i = 1; i < nwords && part != 0; i++)
9118 part = expand_binop (word_mode, ior_optab, part,
9119 operand_subword_force (op0, i, GET_MODE (op0)),
9120 part, 1, OPTAB_WIDEN);
9122 if (part != 0)
9124 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
9125 NULL_RTX, 0, if_false_label, if_true_label);
9127 return;
9130 /* If we couldn't do the "or" simply, do this with a series of compares. */
9131 if (! if_false_label)
9132 drop_through_label = if_false_label = gen_label_rtx ();
9134 for (i = 0; i < nwords; i++)
9135 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
9136 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0,
9137 if_false_label, NULL_RTX);
9139 if (if_true_label)
9140 emit_jump (if_true_label);
9142 if (drop_through_label)
9143 emit_label (drop_through_label);
9146 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
9147 (including code to compute the values to be compared)
9148 and set (CC0) according to the result.
9149 The decision as to signed or unsigned comparison must be made by the caller.
9151 We force a stack adjustment unless there are currently
9152 things pushed on the stack that aren't yet used.
9154 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9155 compared.
9157 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9158 size of MODE should be used. */
9161 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
9162 register rtx op0, op1;
9163 enum rtx_code code;
9164 int unsignedp;
9165 enum machine_mode mode;
9166 rtx size;
9167 int align;
9169 rtx tem;
9171 /* If one operand is constant, make it the second one. Only do this
9172 if the other operand is not constant as well. */
9174 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9175 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9177 tem = op0;
9178 op0 = op1;
9179 op1 = tem;
9180 code = swap_condition (code);
9183 if (flag_force_mem)
9185 op0 = force_not_mem (op0);
9186 op1 = force_not_mem (op1);
9189 do_pending_stack_adjust ();
9191 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9192 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9193 return tem;
9195 #if 0
9196 /* There's no need to do this now that combine.c can eliminate lots of
9197 sign extensions. This can be less efficient in certain cases on other
9198 machines. */
9200 /* If this is a signed equality comparison, we can do it as an
9201 unsigned comparison since zero-extension is cheaper than sign
9202 extension and comparisons with zero are done as unsigned. This is
9203 the case even on machines that can do fast sign extension, since
9204 zero-extension is easier to combine with other operations than
9205 sign-extension is. If we are comparing against a constant, we must
9206 convert it to what it would look like unsigned. */
9207 if ((code == EQ || code == NE) && ! unsignedp
9208 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9210 if (GET_CODE (op1) == CONST_INT
9211 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9212 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9213 unsignedp = 1;
9215 #endif
9217 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
9219 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
9222 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
9223 The decision as to signed or unsigned comparison must be made by the caller.
9225 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9226 compared.
9228 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9229 size of MODE should be used. */
9231 void
9232 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, align,
9233 if_false_label, if_true_label)
9234 register rtx op0, op1;
9235 enum rtx_code code;
9236 int unsignedp;
9237 enum machine_mode mode;
9238 rtx size;
9239 int align;
9240 rtx if_false_label, if_true_label;
9242 rtx tem;
9243 int dummy_true_label = 0;
9245 /* Reverse the comparison if that is safe and we want to jump if it is
9246 false. */
9247 if (! if_true_label && ! FLOAT_MODE_P (mode))
9249 if_true_label = if_false_label;
9250 if_false_label = 0;
9251 code = reverse_condition (code);
9254 /* If one operand is constant, make it the second one. Only do this
9255 if the other operand is not constant as well. */
9257 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9258 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9260 tem = op0;
9261 op0 = op1;
9262 op1 = tem;
9263 code = swap_condition (code);
9266 if (flag_force_mem)
9268 op0 = force_not_mem (op0);
9269 op1 = force_not_mem (op1);
9272 do_pending_stack_adjust ();
9274 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9275 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9277 if (tem == const_true_rtx)
9279 if (if_true_label)
9280 emit_jump (if_true_label);
9282 else
9284 if (if_false_label)
9285 emit_jump (if_false_label);
9287 return;
9290 #if 0
9291 /* There's no need to do this now that combine.c can eliminate lots of
9292 sign extensions. This can be less efficient in certain cases on other
9293 machines. */
9295 /* If this is a signed equality comparison, we can do it as an
9296 unsigned comparison since zero-extension is cheaper than sign
9297 extension and comparisons with zero are done as unsigned. This is
9298 the case even on machines that can do fast sign extension, since
9299 zero-extension is easier to combine with other operations than
9300 sign-extension is. If we are comparing against a constant, we must
9301 convert it to what it would look like unsigned. */
9302 if ((code == EQ || code == NE) && ! unsignedp
9303 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9305 if (GET_CODE (op1) == CONST_INT
9306 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9307 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9308 unsignedp = 1;
9310 #endif
9312 if (! if_true_label)
9314 dummy_true_label = 1;
9315 if_true_label = gen_label_rtx ();
9318 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, align,
9319 if_true_label);
9321 if (if_false_label)
9322 emit_jump (if_false_label);
9323 if (dummy_true_label)
9324 emit_label (if_true_label);
9327 /* Generate code for a comparison expression EXP (including code to compute
9328 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
9329 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
9330 generated code will drop through.
9331 SIGNED_CODE should be the rtx operation for this comparison for
9332 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
9334 We force a stack adjustment unless there are currently
9335 things pushed on the stack that aren't yet used. */
9337 static void
9338 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
9339 if_true_label)
9340 register tree exp;
9341 enum rtx_code signed_code, unsigned_code;
9342 rtx if_false_label, if_true_label;
9344 register rtx op0, op1;
9345 register tree type;
9346 register enum machine_mode mode;
9347 int unsignedp;
9348 enum rtx_code code;
9350 /* Don't crash if the comparison was erroneous. */
9351 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9352 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
9353 return;
9355 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9356 type = TREE_TYPE (TREE_OPERAND (exp, 0));
9357 mode = TYPE_MODE (type);
9358 unsignedp = TREE_UNSIGNED (type);
9359 code = unsignedp ? unsigned_code : signed_code;
9361 #ifdef HAVE_canonicalize_funcptr_for_compare
9362 /* If function pointers need to be "canonicalized" before they can
9363 be reliably compared, then canonicalize them. */
9364 if (HAVE_canonicalize_funcptr_for_compare
9365 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9366 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9367 == FUNCTION_TYPE))
9369 rtx new_op0 = gen_reg_rtx (mode);
9371 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
9372 op0 = new_op0;
9375 if (HAVE_canonicalize_funcptr_for_compare
9376 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9377 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9378 == FUNCTION_TYPE))
9380 rtx new_op1 = gen_reg_rtx (mode);
9382 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
9383 op1 = new_op1;
9385 #endif
9387 /* Do any postincrements in the expression that was tested. */
9388 emit_queue ();
9390 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
9391 ((mode == BLKmode)
9392 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
9393 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT,
9394 if_false_label, if_true_label);
9397 /* Generate code to calculate EXP using a store-flag instruction
9398 and return an rtx for the result. EXP is either a comparison
9399 or a TRUTH_NOT_EXPR whose operand is a comparison.
9401 If TARGET is nonzero, store the result there if convenient.
9403 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
9404 cheap.
9406 Return zero if there is no suitable set-flag instruction
9407 available on this machine.
9409 Once expand_expr has been called on the arguments of the comparison,
9410 we are committed to doing the store flag, since it is not safe to
9411 re-evaluate the expression. We emit the store-flag insn by calling
9412 emit_store_flag, but only expand the arguments if we have a reason
9413 to believe that emit_store_flag will be successful. If we think that
9414 it will, but it isn't, we have to simulate the store-flag with a
9415 set/jump/set sequence. */
9417 static rtx
9418 do_store_flag (exp, target, mode, only_cheap)
9419 tree exp;
9420 rtx target;
9421 enum machine_mode mode;
9422 int only_cheap;
9424 enum rtx_code code;
9425 tree arg0, arg1, type;
9426 tree tem;
9427 enum machine_mode operand_mode;
9428 int invert = 0;
9429 int unsignedp;
9430 rtx op0, op1;
9431 enum insn_code icode;
9432 rtx subtarget = target;
9433 rtx result, label;
9435 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9436 result at the end. We can't simply invert the test since it would
9437 have already been inverted if it were valid. This case occurs for
9438 some floating-point comparisons. */
9440 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9441 invert = 1, exp = TREE_OPERAND (exp, 0);
9443 arg0 = TREE_OPERAND (exp, 0);
9444 arg1 = TREE_OPERAND (exp, 1);
9445 type = TREE_TYPE (arg0);
9446 operand_mode = TYPE_MODE (type);
9447 unsignedp = TREE_UNSIGNED (type);
9449 /* We won't bother with BLKmode store-flag operations because it would mean
9450 passing a lot of information to emit_store_flag. */
9451 if (operand_mode == BLKmode)
9452 return 0;
9454 /* We won't bother with store-flag operations involving function pointers
9455 when function pointers must be canonicalized before comparisons. */
9456 #ifdef HAVE_canonicalize_funcptr_for_compare
9457 if (HAVE_canonicalize_funcptr_for_compare
9458 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9459 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9460 == FUNCTION_TYPE))
9461 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9462 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9463 == FUNCTION_TYPE))))
9464 return 0;
9465 #endif
9467 STRIP_NOPS (arg0);
9468 STRIP_NOPS (arg1);
9470 /* Get the rtx comparison code to use. We know that EXP is a comparison
9471 operation of some type. Some comparisons against 1 and -1 can be
9472 converted to comparisons with zero. Do so here so that the tests
9473 below will be aware that we have a comparison with zero. These
9474 tests will not catch constants in the first operand, but constants
9475 are rarely passed as the first operand. */
9477 switch (TREE_CODE (exp))
9479 case EQ_EXPR:
9480 code = EQ;
9481 break;
9482 case NE_EXPR:
9483 code = NE;
9484 break;
9485 case LT_EXPR:
9486 if (integer_onep (arg1))
9487 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9488 else
9489 code = unsignedp ? LTU : LT;
9490 break;
9491 case LE_EXPR:
9492 if (! unsignedp && integer_all_onesp (arg1))
9493 arg1 = integer_zero_node, code = LT;
9494 else
9495 code = unsignedp ? LEU : LE;
9496 break;
9497 case GT_EXPR:
9498 if (! unsignedp && integer_all_onesp (arg1))
9499 arg1 = integer_zero_node, code = GE;
9500 else
9501 code = unsignedp ? GTU : GT;
9502 break;
9503 case GE_EXPR:
9504 if (integer_onep (arg1))
9505 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9506 else
9507 code = unsignedp ? GEU : GE;
9508 break;
9509 default:
9510 abort ();
9513 /* Put a constant second. */
9514 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9516 tem = arg0; arg0 = arg1; arg1 = tem;
9517 code = swap_condition (code);
9520 /* If this is an equality or inequality test of a single bit, we can
9521 do this by shifting the bit being tested to the low-order bit and
9522 masking the result with the constant 1. If the condition was EQ,
9523 we xor it with 1. This does not require an scc insn and is faster
9524 than an scc insn even if we have it. */
9526 if ((code == NE || code == EQ)
9527 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9528 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9530 tree inner = TREE_OPERAND (arg0, 0);
9531 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
9532 int ops_unsignedp;
9534 /* If INNER is a right shift of a constant and it plus BITNUM does
9535 not overflow, adjust BITNUM and INNER. */
9537 if (TREE_CODE (inner) == RSHIFT_EXPR
9538 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
9539 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
9540 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
9541 < TYPE_PRECISION (type)))
9543 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
9544 inner = TREE_OPERAND (inner, 0);
9547 /* If we are going to be able to omit the AND below, we must do our
9548 operations as unsigned. If we must use the AND, we have a choice.
9549 Normally unsigned is faster, but for some machines signed is. */
9550 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
9551 #ifdef LOAD_EXTEND_OP
9552 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
9553 #else
9555 #endif
9558 if (subtarget == 0 || GET_CODE (subtarget) != REG
9559 || GET_MODE (subtarget) != operand_mode
9560 || ! safe_from_p (subtarget, inner, 1))
9561 subtarget = 0;
9563 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
9565 if (bitnum != 0)
9566 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
9567 size_int (bitnum), subtarget, ops_unsignedp);
9569 if (GET_MODE (op0) != mode)
9570 op0 = convert_to_mode (mode, op0, ops_unsignedp);
9572 if ((code == EQ && ! invert) || (code == NE && invert))
9573 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
9574 ops_unsignedp, OPTAB_LIB_WIDEN);
9576 /* Put the AND last so it can combine with more things. */
9577 if (bitnum != TYPE_PRECISION (type) - 1)
9578 op0 = expand_and (op0, const1_rtx, subtarget);
9580 return op0;
9583 /* Now see if we are likely to be able to do this. Return if not. */
9584 if (! can_compare_p (operand_mode))
9585 return 0;
9586 icode = setcc_gen_code[(int) code];
9587 if (icode == CODE_FOR_nothing
9588 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
9590 /* We can only do this if it is one of the special cases that
9591 can be handled without an scc insn. */
9592 if ((code == LT && integer_zerop (arg1))
9593 || (! only_cheap && code == GE && integer_zerop (arg1)))
9595 else if (BRANCH_COST >= 0
9596 && ! only_cheap && (code == NE || code == EQ)
9597 && TREE_CODE (type) != REAL_TYPE
9598 && ((abs_optab->handlers[(int) operand_mode].insn_code
9599 != CODE_FOR_nothing)
9600 || (ffs_optab->handlers[(int) operand_mode].insn_code
9601 != CODE_FOR_nothing)))
9603 else
9604 return 0;
9607 preexpand_calls (exp);
9608 if (subtarget == 0 || GET_CODE (subtarget) != REG
9609 || GET_MODE (subtarget) != operand_mode
9610 || ! safe_from_p (subtarget, arg1, 1))
9611 subtarget = 0;
9613 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
9614 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
9616 if (target == 0)
9617 target = gen_reg_rtx (mode);
9619 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
9620 because, if the emit_store_flag does anything it will succeed and
9621 OP0 and OP1 will not be used subsequently. */
9623 result = emit_store_flag (target, code,
9624 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
9625 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
9626 operand_mode, unsignedp, 1);
9628 if (result)
9630 if (invert)
9631 result = expand_binop (mode, xor_optab, result, const1_rtx,
9632 result, 0, OPTAB_LIB_WIDEN);
9633 return result;
9636 /* If this failed, we have to do this with set/compare/jump/set code. */
9637 if (GET_CODE (target) != REG
9638 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9639 target = gen_reg_rtx (GET_MODE (target));
9641 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9642 result = compare_from_rtx (op0, op1, code, unsignedp,
9643 operand_mode, NULL_RTX, 0);
9644 if (GET_CODE (result) == CONST_INT)
9645 return (((result == const0_rtx && ! invert)
9646 || (result != const0_rtx && invert))
9647 ? const0_rtx : const1_rtx);
9649 label = gen_label_rtx ();
9650 if (bcc_gen_fctn[(int) code] == 0)
9651 abort ();
9653 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
9654 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9655 emit_label (label);
9657 return target;
9660 /* Generate a tablejump instruction (used for switch statements). */
9662 #ifdef HAVE_tablejump
9664 /* INDEX is the value being switched on, with the lowest value
9665 in the table already subtracted.
9666 MODE is its expected mode (needed if INDEX is constant).
9667 RANGE is the length of the jump table.
9668 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9670 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9671 index value is out of range. */
9673 void
9674 do_tablejump (index, mode, range, table_label, default_label)
9675 rtx index, range, table_label, default_label;
9676 enum machine_mode mode;
9678 register rtx temp, vector;
9680 /* Do an unsigned comparison (in the proper mode) between the index
9681 expression and the value which represents the length of the range.
9682 Since we just finished subtracting the lower bound of the range
9683 from the index expression, this comparison allows us to simultaneously
9684 check that the original index expression value is both greater than
9685 or equal to the minimum value of the range and less than or equal to
9686 the maximum value of the range. */
9688 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
9689 0, default_label);
9691 /* If index is in range, it must fit in Pmode.
9692 Convert to Pmode so we can index with it. */
9693 if (mode != Pmode)
9694 index = convert_to_mode (Pmode, index, 1);
9696 /* Don't let a MEM slip thru, because then INDEX that comes
9697 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9698 and break_out_memory_refs will go to work on it and mess it up. */
9699 #ifdef PIC_CASE_VECTOR_ADDRESS
9700 if (flag_pic && GET_CODE (index) != REG)
9701 index = copy_to_mode_reg (Pmode, index);
9702 #endif
9704 /* If flag_force_addr were to affect this address
9705 it could interfere with the tricky assumptions made
9706 about addresses that contain label-refs,
9707 which may be valid only very near the tablejump itself. */
9708 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9709 GET_MODE_SIZE, because this indicates how large insns are. The other
9710 uses should all be Pmode, because they are addresses. This code
9711 could fail if addresses and insns are not the same size. */
9712 index = gen_rtx_PLUS (Pmode,
9713 gen_rtx_MULT (Pmode, index,
9714 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9715 gen_rtx_LABEL_REF (Pmode, table_label));
9716 #ifdef PIC_CASE_VECTOR_ADDRESS
9717 if (flag_pic)
9718 index = PIC_CASE_VECTOR_ADDRESS (index);
9719 else
9720 #endif
9721 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9722 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9723 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
9724 RTX_UNCHANGING_P (vector) = 1;
9725 convert_move (temp, vector, 0);
9727 emit_jump_insn (gen_tablejump (temp, table_label));
9729 /* If we are generating PIC code or if the table is PC-relative, the
9730 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9731 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9732 emit_barrier ();
9735 #endif /* HAVE_tablejump */