* stmt.c (expand_start_bindings): Allow callers to pass a flag
[official-gcc.git] / gcc / expr.c
blob880e347a68e46d31dd6fdb53be1cdb280b51bfe6
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 92-98, 1999 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "obstack.h"
28 #include "flags.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "except.h"
32 #include "function.h"
33 #include "insn-flags.h"
34 #include "insn-codes.h"
35 #include "insn-config.h"
36 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
37 #include "expr.h"
38 #include "recog.h"
39 #include "output.h"
40 #include "typeclass.h"
41 #include "defaults.h"
42 #include "toplev.h"
43 #include "ggc.h"
45 #define CEIL(x,y) (((x) + (y) - 1) / (y))
47 /* Decide whether a function's arguments should be processed
48 from first to last or from last to first.
50 They should if the stack and args grow in opposite directions, but
51 only if we have push insns. */
53 #ifdef PUSH_ROUNDING
55 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
56 #define PUSH_ARGS_REVERSED /* If it's last to first */
57 #endif
59 #endif
61 #ifndef STACK_PUSH_CODE
62 #ifdef STACK_GROWS_DOWNWARD
63 #define STACK_PUSH_CODE PRE_DEC
64 #else
65 #define STACK_PUSH_CODE PRE_INC
66 #endif
67 #endif
69 /* Assume that case vectors are not pc-relative. */
70 #ifndef CASE_VECTOR_PC_RELATIVE
71 #define CASE_VECTOR_PC_RELATIVE 0
72 #endif
74 /* If this is nonzero, we do not bother generating VOLATILE
75 around volatile memory references, and we are willing to
76 output indirect addresses. If cse is to follow, we reject
77 indirect addresses so a useful potential cse is generated;
78 if it is used only once, instruction combination will produce
79 the same indirect address eventually. */
80 int cse_not_expected;
82 /* Nonzero to generate code for all the subroutines within an
83 expression before generating the upper levels of the expression.
84 Nowadays this is never zero. */
85 int do_preexpand_calls = 1;
87 /* Don't check memory usage, since code is being emitted to check a memory
88 usage. Used when current_function_check_memory_usage is true, to avoid
89 infinite recursion. */
90 static int in_check_memory_usage;
92 /* This structure is used by move_by_pieces to describe the move to
93 be performed. */
94 struct move_by_pieces
96 rtx to;
97 rtx to_addr;
98 int autinc_to;
99 int explicit_inc_to;
100 int to_struct;
101 int to_readonly;
102 rtx from;
103 rtx from_addr;
104 int autinc_from;
105 int explicit_inc_from;
106 int from_struct;
107 int from_readonly;
108 int len;
109 int offset;
110 int reverse;
113 /* This structure is used by clear_by_pieces to describe the clear to
114 be performed. */
116 struct clear_by_pieces
118 rtx to;
119 rtx to_addr;
120 int autinc_to;
121 int explicit_inc_to;
122 int to_struct;
123 int len;
124 int offset;
125 int reverse;
128 extern struct obstack permanent_obstack;
130 static rtx get_push_address PROTO ((int));
132 static rtx enqueue_insn PROTO((rtx, rtx));
133 static int move_by_pieces_ninsns PROTO((unsigned int, int));
134 static void move_by_pieces_1 PROTO((rtx (*) (rtx, ...), enum machine_mode,
135 struct move_by_pieces *));
136 static void clear_by_pieces PROTO((rtx, int, int));
137 static void clear_by_pieces_1 PROTO((rtx (*) (rtx, ...),
138 enum machine_mode,
139 struct clear_by_pieces *));
140 static int is_zeros_p PROTO((tree));
141 static int mostly_zeros_p PROTO((tree));
142 static void store_constructor_field PROTO((rtx, int, int, enum machine_mode,
143 tree, tree, int, int));
144 static void store_constructor PROTO((tree, rtx, int, int));
145 static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
146 enum machine_mode, int, int,
147 int, int));
148 static enum memory_use_mode
149 get_memory_usage_from_modifier PROTO((enum expand_modifier));
150 static tree save_noncopied_parts PROTO((tree, tree));
151 static tree init_noncopied_parts PROTO((tree, tree));
152 static int safe_from_p PROTO((rtx, tree, int));
153 static int fixed_type_p PROTO((tree));
154 static rtx var_rtx PROTO((tree));
155 static rtx expand_increment PROTO((tree, int, int));
156 static void preexpand_calls PROTO((tree));
157 static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
158 static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
159 static void do_compare_and_jump PROTO((tree, enum rtx_code, enum rtx_code, rtx, rtx));
160 static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
162 /* Record for each mode whether we can move a register directly to or
163 from an object of that mode in memory. If we can't, we won't try
164 to use that mode directly when accessing a field of that mode. */
166 static char direct_load[NUM_MACHINE_MODES];
167 static char direct_store[NUM_MACHINE_MODES];
169 /* If a memory-to-memory move would take MOVE_RATIO or more simple
170 move-instruction sequences, we will do a movstr or libcall instead. */
172 #ifndef MOVE_RATIO
173 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
174 #define MOVE_RATIO 2
175 #else
176 /* If we are optimizing for space (-Os), cut down the default move ratio */
177 #define MOVE_RATIO (optimize_size ? 3 : 15)
178 #endif
179 #endif
181 /* This macro is used to determine whether move_by_pieces should be called
182 to perform a structure copy. */
183 #ifndef MOVE_BY_PIECES_P
184 #define MOVE_BY_PIECES_P(SIZE, ALIGN) (move_by_pieces_ninsns \
185 (SIZE, ALIGN) < MOVE_RATIO)
186 #endif
188 /* This array records the insn_code of insns to perform block moves. */
189 enum insn_code movstr_optab[NUM_MACHINE_MODES];
191 /* This array records the insn_code of insns to perform block clears. */
192 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
194 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
196 #ifndef SLOW_UNALIGNED_ACCESS
197 #define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
198 #endif
200 /* This is run once per compilation to set up which modes can be used
201 directly in memory and to initialize the block move optab. */
203 void
204 init_expr_once ()
206 rtx insn, pat;
207 enum machine_mode mode;
208 int num_clobbers;
209 rtx mem, mem1;
210 char *free_point;
212 start_sequence ();
214 /* Since we are on the permanent obstack, we must be sure we save this
215 spot AFTER we call start_sequence, since it will reuse the rtl it
216 makes. */
217 free_point = (char *) oballoc (0);
219 /* Try indexing by frame ptr and try by stack ptr.
220 It is known that on the Convex the stack ptr isn't a valid index.
221 With luck, one or the other is valid on any machine. */
222 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
223 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
225 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
226 pat = PATTERN (insn);
228 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
229 mode = (enum machine_mode) ((int) mode + 1))
231 int regno;
232 rtx reg;
234 direct_load[(int) mode] = direct_store[(int) mode] = 0;
235 PUT_MODE (mem, mode);
236 PUT_MODE (mem1, mode);
238 /* See if there is some register that can be used in this mode and
239 directly loaded or stored from memory. */
241 if (mode != VOIDmode && mode != BLKmode)
242 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
243 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
244 regno++)
246 if (! HARD_REGNO_MODE_OK (regno, mode))
247 continue;
249 reg = gen_rtx_REG (mode, regno);
251 SET_SRC (pat) = mem;
252 SET_DEST (pat) = reg;
253 if (recog (pat, insn, &num_clobbers) >= 0)
254 direct_load[(int) mode] = 1;
256 SET_SRC (pat) = mem1;
257 SET_DEST (pat) = reg;
258 if (recog (pat, insn, &num_clobbers) >= 0)
259 direct_load[(int) mode] = 1;
261 SET_SRC (pat) = reg;
262 SET_DEST (pat) = mem;
263 if (recog (pat, insn, &num_clobbers) >= 0)
264 direct_store[(int) mode] = 1;
266 SET_SRC (pat) = reg;
267 SET_DEST (pat) = mem1;
268 if (recog (pat, insn, &num_clobbers) >= 0)
269 direct_store[(int) mode] = 1;
273 end_sequence ();
274 obfree (free_point);
277 /* This is run at the start of compiling a function. */
279 void
280 init_expr ()
282 current_function->expr
283 = (struct expr_status *) xmalloc (sizeof (struct expr_status));
285 pending_chain = 0;
286 pending_stack_adjust = 0;
287 inhibit_defer_pop = 0;
288 saveregs_value = 0;
289 apply_args_value = 0;
290 forced_labels = 0;
293 void
294 mark_expr_status (p)
295 struct expr_status *p;
297 if (p == NULL)
298 return;
300 ggc_mark_rtx (p->x_saveregs_value);
301 ggc_mark_rtx (p->x_apply_args_value);
302 ggc_mark_rtx (p->x_forced_labels);
305 void
306 free_expr_status (f)
307 struct function *f;
309 free (f->expr);
310 f->expr = NULL;
313 /* Small sanity check that the queue is empty at the end of a function. */
314 void
315 finish_expr_for_function ()
317 if (pending_chain)
318 abort ();
321 /* Manage the queue of increment instructions to be output
322 for POSTINCREMENT_EXPR expressions, etc. */
324 /* Queue up to increment (or change) VAR later. BODY says how:
325 BODY should be the same thing you would pass to emit_insn
326 to increment right away. It will go to emit_insn later on.
328 The value is a QUEUED expression to be used in place of VAR
329 where you want to guarantee the pre-incrementation value of VAR. */
331 static rtx
332 enqueue_insn (var, body)
333 rtx var, body;
335 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
336 body, pending_chain);
337 return pending_chain;
340 /* Use protect_from_queue to convert a QUEUED expression
341 into something that you can put immediately into an instruction.
342 If the queued incrementation has not happened yet,
343 protect_from_queue returns the variable itself.
344 If the incrementation has happened, protect_from_queue returns a temp
345 that contains a copy of the old value of the variable.
347 Any time an rtx which might possibly be a QUEUED is to be put
348 into an instruction, it must be passed through protect_from_queue first.
349 QUEUED expressions are not meaningful in instructions.
351 Do not pass a value through protect_from_queue and then hold
352 on to it for a while before putting it in an instruction!
353 If the queue is flushed in between, incorrect code will result. */
356 protect_from_queue (x, modify)
357 register rtx x;
358 int modify;
360 register RTX_CODE code = GET_CODE (x);
362 #if 0 /* A QUEUED can hang around after the queue is forced out. */
363 /* Shortcut for most common case. */
364 if (pending_chain == 0)
365 return x;
366 #endif
368 if (code != QUEUED)
370 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
371 use of autoincrement. Make a copy of the contents of the memory
372 location rather than a copy of the address, but not if the value is
373 of mode BLKmode. Don't modify X in place since it might be
374 shared. */
375 if (code == MEM && GET_MODE (x) != BLKmode
376 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
378 register rtx y = XEXP (x, 0);
379 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
381 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
382 MEM_COPY_ATTRIBUTES (new, x);
383 MEM_ALIAS_SET (new) = MEM_ALIAS_SET (x);
385 if (QUEUED_INSN (y))
387 register rtx temp = gen_reg_rtx (GET_MODE (new));
388 emit_insn_before (gen_move_insn (temp, new),
389 QUEUED_INSN (y));
390 return temp;
392 return new;
394 /* Otherwise, recursively protect the subexpressions of all
395 the kinds of rtx's that can contain a QUEUED. */
396 if (code == MEM)
398 rtx tem = protect_from_queue (XEXP (x, 0), 0);
399 if (tem != XEXP (x, 0))
401 x = copy_rtx (x);
402 XEXP (x, 0) = tem;
405 else if (code == PLUS || code == MULT)
407 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
408 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
409 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
411 x = copy_rtx (x);
412 XEXP (x, 0) = new0;
413 XEXP (x, 1) = new1;
416 return x;
418 /* If the increment has not happened, use the variable itself. */
419 if (QUEUED_INSN (x) == 0)
420 return QUEUED_VAR (x);
421 /* If the increment has happened and a pre-increment copy exists,
422 use that copy. */
423 if (QUEUED_COPY (x) != 0)
424 return QUEUED_COPY (x);
425 /* The increment has happened but we haven't set up a pre-increment copy.
426 Set one up now, and use it. */
427 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
428 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
429 QUEUED_INSN (x));
430 return QUEUED_COPY (x);
433 /* Return nonzero if X contains a QUEUED expression:
434 if it contains anything that will be altered by a queued increment.
435 We handle only combinations of MEM, PLUS, MINUS and MULT operators
436 since memory addresses generally contain only those. */
439 queued_subexp_p (x)
440 rtx x;
442 register enum rtx_code code = GET_CODE (x);
443 switch (code)
445 case QUEUED:
446 return 1;
447 case MEM:
448 return queued_subexp_p (XEXP (x, 0));
449 case MULT:
450 case PLUS:
451 case MINUS:
452 return (queued_subexp_p (XEXP (x, 0))
453 || queued_subexp_p (XEXP (x, 1)));
454 default:
455 return 0;
459 /* Perform all the pending incrementations. */
461 void
462 emit_queue ()
464 register rtx p;
465 while ((p = pending_chain))
467 rtx body = QUEUED_BODY (p);
469 if (GET_CODE (body) == SEQUENCE)
471 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
472 emit_insn (QUEUED_BODY (p));
474 else
475 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
476 pending_chain = QUEUED_NEXT (p);
480 /* Copy data from FROM to TO, where the machine modes are not the same.
481 Both modes may be integer, or both may be floating.
482 UNSIGNEDP should be nonzero if FROM is an unsigned type.
483 This causes zero-extension instead of sign-extension. */
485 void
486 convert_move (to, from, unsignedp)
487 register rtx to, from;
488 int unsignedp;
490 enum machine_mode to_mode = GET_MODE (to);
491 enum machine_mode from_mode = GET_MODE (from);
492 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
493 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
494 enum insn_code code;
495 rtx libcall;
497 /* rtx code for making an equivalent value. */
498 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
500 to = protect_from_queue (to, 1);
501 from = protect_from_queue (from, 0);
503 if (to_real != from_real)
504 abort ();
506 /* If FROM is a SUBREG that indicates that we have already done at least
507 the required extension, strip it. We don't handle such SUBREGs as
508 TO here. */
510 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
511 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
512 >= GET_MODE_SIZE (to_mode))
513 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
514 from = gen_lowpart (to_mode, from), from_mode = to_mode;
516 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
517 abort ();
519 if (to_mode == from_mode
520 || (from_mode == VOIDmode && CONSTANT_P (from)))
522 emit_move_insn (to, from);
523 return;
526 if (to_real)
528 rtx value;
530 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
532 /* Try converting directly if the insn is supported. */
533 if ((code = can_extend_p (to_mode, from_mode, 0))
534 != CODE_FOR_nothing)
536 emit_unop_insn (code, to, from, UNKNOWN);
537 return;
541 #ifdef HAVE_trunchfqf2
542 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
544 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
545 return;
547 #endif
548 #ifdef HAVE_trunctqfqf2
549 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
551 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
552 return;
554 #endif
555 #ifdef HAVE_truncsfqf2
556 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
558 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
559 return;
561 #endif
562 #ifdef HAVE_truncdfqf2
563 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
565 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
566 return;
568 #endif
569 #ifdef HAVE_truncxfqf2
570 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
572 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
573 return;
575 #endif
576 #ifdef HAVE_trunctfqf2
577 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
579 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
580 return;
582 #endif
584 #ifdef HAVE_trunctqfhf2
585 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
587 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
588 return;
590 #endif
591 #ifdef HAVE_truncsfhf2
592 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
594 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
595 return;
597 #endif
598 #ifdef HAVE_truncdfhf2
599 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
601 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
602 return;
604 #endif
605 #ifdef HAVE_truncxfhf2
606 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
608 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
609 return;
611 #endif
612 #ifdef HAVE_trunctfhf2
613 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
615 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
616 return;
618 #endif
620 #ifdef HAVE_truncsftqf2
621 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
623 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
624 return;
626 #endif
627 #ifdef HAVE_truncdftqf2
628 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
630 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
631 return;
633 #endif
634 #ifdef HAVE_truncxftqf2
635 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
637 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
638 return;
640 #endif
641 #ifdef HAVE_trunctftqf2
642 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
644 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
645 return;
647 #endif
649 #ifdef HAVE_truncdfsf2
650 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
652 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
653 return;
655 #endif
656 #ifdef HAVE_truncxfsf2
657 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
659 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
660 return;
662 #endif
663 #ifdef HAVE_trunctfsf2
664 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
666 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
667 return;
669 #endif
670 #ifdef HAVE_truncxfdf2
671 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
673 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
674 return;
676 #endif
677 #ifdef HAVE_trunctfdf2
678 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
680 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
681 return;
683 #endif
685 libcall = (rtx) 0;
686 switch (from_mode)
688 case SFmode:
689 switch (to_mode)
691 case DFmode:
692 libcall = extendsfdf2_libfunc;
693 break;
695 case XFmode:
696 libcall = extendsfxf2_libfunc;
697 break;
699 case TFmode:
700 libcall = extendsftf2_libfunc;
701 break;
703 default:
704 break;
706 break;
708 case DFmode:
709 switch (to_mode)
711 case SFmode:
712 libcall = truncdfsf2_libfunc;
713 break;
715 case XFmode:
716 libcall = extenddfxf2_libfunc;
717 break;
719 case TFmode:
720 libcall = extenddftf2_libfunc;
721 break;
723 default:
724 break;
726 break;
728 case XFmode:
729 switch (to_mode)
731 case SFmode:
732 libcall = truncxfsf2_libfunc;
733 break;
735 case DFmode:
736 libcall = truncxfdf2_libfunc;
737 break;
739 default:
740 break;
742 break;
744 case TFmode:
745 switch (to_mode)
747 case SFmode:
748 libcall = trunctfsf2_libfunc;
749 break;
751 case DFmode:
752 libcall = trunctfdf2_libfunc;
753 break;
755 default:
756 break;
758 break;
760 default:
761 break;
764 if (libcall == (rtx) 0)
765 /* This conversion is not implemented yet. */
766 abort ();
768 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
769 1, from, from_mode);
770 emit_move_insn (to, value);
771 return;
774 /* Now both modes are integers. */
776 /* Handle expanding beyond a word. */
777 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
778 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
780 rtx insns;
781 rtx lowpart;
782 rtx fill_value;
783 rtx lowfrom;
784 int i;
785 enum machine_mode lowpart_mode;
786 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
788 /* Try converting directly if the insn is supported. */
789 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
790 != CODE_FOR_nothing)
792 /* If FROM is a SUBREG, put it into a register. Do this
793 so that we always generate the same set of insns for
794 better cse'ing; if an intermediate assignment occurred,
795 we won't be doing the operation directly on the SUBREG. */
796 if (optimize > 0 && GET_CODE (from) == SUBREG)
797 from = force_reg (from_mode, from);
798 emit_unop_insn (code, to, from, equiv_code);
799 return;
801 /* Next, try converting via full word. */
802 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
803 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
804 != CODE_FOR_nothing))
806 if (GET_CODE (to) == REG)
807 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
808 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
809 emit_unop_insn (code, to,
810 gen_lowpart (word_mode, to), equiv_code);
811 return;
814 /* No special multiword conversion insn; do it by hand. */
815 start_sequence ();
817 /* Since we will turn this into a no conflict block, we must ensure
818 that the source does not overlap the target. */
820 if (reg_overlap_mentioned_p (to, from))
821 from = force_reg (from_mode, from);
823 /* Get a copy of FROM widened to a word, if necessary. */
824 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
825 lowpart_mode = word_mode;
826 else
827 lowpart_mode = from_mode;
829 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
831 lowpart = gen_lowpart (lowpart_mode, to);
832 emit_move_insn (lowpart, lowfrom);
834 /* Compute the value to put in each remaining word. */
835 if (unsignedp)
836 fill_value = const0_rtx;
837 else
839 #ifdef HAVE_slt
840 if (HAVE_slt
841 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
842 && STORE_FLAG_VALUE == -1)
844 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
845 lowpart_mode, 0, 0);
846 fill_value = gen_reg_rtx (word_mode);
847 emit_insn (gen_slt (fill_value));
849 else
850 #endif
852 fill_value
853 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
854 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
855 NULL_RTX, 0);
856 fill_value = convert_to_mode (word_mode, fill_value, 1);
860 /* Fill the remaining words. */
861 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
863 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
864 rtx subword = operand_subword (to, index, 1, to_mode);
866 if (subword == 0)
867 abort ();
869 if (fill_value != subword)
870 emit_move_insn (subword, fill_value);
873 insns = get_insns ();
874 end_sequence ();
876 emit_no_conflict_block (insns, to, from, NULL_RTX,
877 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
878 return;
881 /* Truncating multi-word to a word or less. */
882 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
883 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
885 if (!((GET_CODE (from) == MEM
886 && ! MEM_VOLATILE_P (from)
887 && direct_load[(int) to_mode]
888 && ! mode_dependent_address_p (XEXP (from, 0)))
889 || GET_CODE (from) == REG
890 || GET_CODE (from) == SUBREG))
891 from = force_reg (from_mode, from);
892 convert_move (to, gen_lowpart (word_mode, from), 0);
893 return;
896 /* Handle pointer conversion */ /* SPEE 900220 */
897 if (to_mode == PQImode)
899 if (from_mode != QImode)
900 from = convert_to_mode (QImode, from, unsignedp);
902 #ifdef HAVE_truncqipqi2
903 if (HAVE_truncqipqi2)
905 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
906 return;
908 #endif /* HAVE_truncqipqi2 */
909 abort ();
912 if (from_mode == PQImode)
914 if (to_mode != QImode)
916 from = convert_to_mode (QImode, from, unsignedp);
917 from_mode = QImode;
919 else
921 #ifdef HAVE_extendpqiqi2
922 if (HAVE_extendpqiqi2)
924 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
925 return;
927 #endif /* HAVE_extendpqiqi2 */
928 abort ();
932 if (to_mode == PSImode)
934 if (from_mode != SImode)
935 from = convert_to_mode (SImode, from, unsignedp);
937 #ifdef HAVE_truncsipsi2
938 if (HAVE_truncsipsi2)
940 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
941 return;
943 #endif /* HAVE_truncsipsi2 */
944 abort ();
947 if (from_mode == PSImode)
949 if (to_mode != SImode)
951 from = convert_to_mode (SImode, from, unsignedp);
952 from_mode = SImode;
954 else
956 #ifdef HAVE_extendpsisi2
957 if (HAVE_extendpsisi2)
959 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
960 return;
962 #endif /* HAVE_extendpsisi2 */
963 abort ();
967 if (to_mode == PDImode)
969 if (from_mode != DImode)
970 from = convert_to_mode (DImode, from, unsignedp);
972 #ifdef HAVE_truncdipdi2
973 if (HAVE_truncdipdi2)
975 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
976 return;
978 #endif /* HAVE_truncdipdi2 */
979 abort ();
982 if (from_mode == PDImode)
984 if (to_mode != DImode)
986 from = convert_to_mode (DImode, from, unsignedp);
987 from_mode = DImode;
989 else
991 #ifdef HAVE_extendpdidi2
992 if (HAVE_extendpdidi2)
994 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
995 return;
997 #endif /* HAVE_extendpdidi2 */
998 abort ();
1002 /* Now follow all the conversions between integers
1003 no more than a word long. */
1005 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1006 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1007 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1008 GET_MODE_BITSIZE (from_mode)))
1010 if (!((GET_CODE (from) == MEM
1011 && ! MEM_VOLATILE_P (from)
1012 && direct_load[(int) to_mode]
1013 && ! mode_dependent_address_p (XEXP (from, 0)))
1014 || GET_CODE (from) == REG
1015 || GET_CODE (from) == SUBREG))
1016 from = force_reg (from_mode, from);
1017 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1018 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1019 from = copy_to_reg (from);
1020 emit_move_insn (to, gen_lowpart (to_mode, from));
1021 return;
1024 /* Handle extension. */
1025 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1027 /* Convert directly if that works. */
1028 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1029 != CODE_FOR_nothing)
1031 emit_unop_insn (code, to, from, equiv_code);
1032 return;
1034 else
1036 enum machine_mode intermediate;
1037 rtx tmp;
1038 tree shift_amount;
1040 /* Search for a mode to convert via. */
1041 for (intermediate = from_mode; intermediate != VOIDmode;
1042 intermediate = GET_MODE_WIDER_MODE (intermediate))
1043 if (((can_extend_p (to_mode, intermediate, unsignedp)
1044 != CODE_FOR_nothing)
1045 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1046 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1047 GET_MODE_BITSIZE (intermediate))))
1048 && (can_extend_p (intermediate, from_mode, unsignedp)
1049 != CODE_FOR_nothing))
1051 convert_move (to, convert_to_mode (intermediate, from,
1052 unsignedp), unsignedp);
1053 return;
1056 /* No suitable intermediate mode.
1057 Generate what we need with shifts. */
1058 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1059 - GET_MODE_BITSIZE (from_mode), 0);
1060 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1061 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1062 to, unsignedp);
1063 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1064 to, unsignedp);
1065 if (tmp != to)
1066 emit_move_insn (to, tmp);
1067 return;
1071 /* Support special truncate insns for certain modes. */
1073 if (from_mode == DImode && to_mode == SImode)
1075 #ifdef HAVE_truncdisi2
1076 if (HAVE_truncdisi2)
1078 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1079 return;
1081 #endif
1082 convert_move (to, force_reg (from_mode, from), unsignedp);
1083 return;
1086 if (from_mode == DImode && to_mode == HImode)
1088 #ifdef HAVE_truncdihi2
1089 if (HAVE_truncdihi2)
1091 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1092 return;
1094 #endif
1095 convert_move (to, force_reg (from_mode, from), unsignedp);
1096 return;
1099 if (from_mode == DImode && to_mode == QImode)
1101 #ifdef HAVE_truncdiqi2
1102 if (HAVE_truncdiqi2)
1104 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1105 return;
1107 #endif
1108 convert_move (to, force_reg (from_mode, from), unsignedp);
1109 return;
1112 if (from_mode == SImode && to_mode == HImode)
1114 #ifdef HAVE_truncsihi2
1115 if (HAVE_truncsihi2)
1117 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1118 return;
1120 #endif
1121 convert_move (to, force_reg (from_mode, from), unsignedp);
1122 return;
1125 if (from_mode == SImode && to_mode == QImode)
1127 #ifdef HAVE_truncsiqi2
1128 if (HAVE_truncsiqi2)
1130 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1131 return;
1133 #endif
1134 convert_move (to, force_reg (from_mode, from), unsignedp);
1135 return;
1138 if (from_mode == HImode && to_mode == QImode)
1140 #ifdef HAVE_trunchiqi2
1141 if (HAVE_trunchiqi2)
1143 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1144 return;
1146 #endif
1147 convert_move (to, force_reg (from_mode, from), unsignedp);
1148 return;
1151 if (from_mode == TImode && to_mode == DImode)
1153 #ifdef HAVE_trunctidi2
1154 if (HAVE_trunctidi2)
1156 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1157 return;
1159 #endif
1160 convert_move (to, force_reg (from_mode, from), unsignedp);
1161 return;
1164 if (from_mode == TImode && to_mode == SImode)
1166 #ifdef HAVE_trunctisi2
1167 if (HAVE_trunctisi2)
1169 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1170 return;
1172 #endif
1173 convert_move (to, force_reg (from_mode, from), unsignedp);
1174 return;
1177 if (from_mode == TImode && to_mode == HImode)
1179 #ifdef HAVE_trunctihi2
1180 if (HAVE_trunctihi2)
1182 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1183 return;
1185 #endif
1186 convert_move (to, force_reg (from_mode, from), unsignedp);
1187 return;
1190 if (from_mode == TImode && to_mode == QImode)
1192 #ifdef HAVE_trunctiqi2
1193 if (HAVE_trunctiqi2)
1195 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1196 return;
1198 #endif
1199 convert_move (to, force_reg (from_mode, from), unsignedp);
1200 return;
1203 /* Handle truncation of volatile memrefs, and so on;
1204 the things that couldn't be truncated directly,
1205 and for which there was no special instruction. */
1206 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1208 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1209 emit_move_insn (to, temp);
1210 return;
1213 /* Mode combination is not recognized. */
1214 abort ();
1217 /* Return an rtx for a value that would result
1218 from converting X to mode MODE.
1219 Both X and MODE may be floating, or both integer.
1220 UNSIGNEDP is nonzero if X is an unsigned value.
1221 This can be done by referring to a part of X in place
1222 or by copying to a new temporary with conversion.
1224 This function *must not* call protect_from_queue
1225 except when putting X into an insn (in which case convert_move does it). */
1228 convert_to_mode (mode, x, unsignedp)
1229 enum machine_mode mode;
1230 rtx x;
1231 int unsignedp;
1233 return convert_modes (mode, VOIDmode, x, unsignedp);
1236 /* Return an rtx for a value that would result
1237 from converting X from mode OLDMODE to mode MODE.
1238 Both modes may be floating, or both integer.
1239 UNSIGNEDP is nonzero if X is an unsigned value.
1241 This can be done by referring to a part of X in place
1242 or by copying to a new temporary with conversion.
1244 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1246 This function *must not* call protect_from_queue
1247 except when putting X into an insn (in which case convert_move does it). */
1250 convert_modes (mode, oldmode, x, unsignedp)
1251 enum machine_mode mode, oldmode;
1252 rtx x;
1253 int unsignedp;
1255 register rtx temp;
1257 /* If FROM is a SUBREG that indicates that we have already done at least
1258 the required extension, strip it. */
1260 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1261 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1262 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1263 x = gen_lowpart (mode, x);
1265 if (GET_MODE (x) != VOIDmode)
1266 oldmode = GET_MODE (x);
1268 if (mode == oldmode)
1269 return x;
1271 /* There is one case that we must handle specially: If we are converting
1272 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1273 we are to interpret the constant as unsigned, gen_lowpart will do
1274 the wrong if the constant appears negative. What we want to do is
1275 make the high-order word of the constant zero, not all ones. */
1277 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1278 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1279 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1281 HOST_WIDE_INT val = INTVAL (x);
1283 if (oldmode != VOIDmode
1284 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1286 int width = GET_MODE_BITSIZE (oldmode);
1288 /* We need to zero extend VAL. */
1289 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1292 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1295 /* We can do this with a gen_lowpart if both desired and current modes
1296 are integer, and this is either a constant integer, a register, or a
1297 non-volatile MEM. Except for the constant case where MODE is no
1298 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1300 if ((GET_CODE (x) == CONST_INT
1301 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1302 || (GET_MODE_CLASS (mode) == MODE_INT
1303 && GET_MODE_CLASS (oldmode) == MODE_INT
1304 && (GET_CODE (x) == CONST_DOUBLE
1305 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1306 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1307 && direct_load[(int) mode])
1308 || (GET_CODE (x) == REG
1309 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1310 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1312 /* ?? If we don't know OLDMODE, we have to assume here that
1313 X does not need sign- or zero-extension. This may not be
1314 the case, but it's the best we can do. */
1315 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1316 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1318 HOST_WIDE_INT val = INTVAL (x);
1319 int width = GET_MODE_BITSIZE (oldmode);
1321 /* We must sign or zero-extend in this case. Start by
1322 zero-extending, then sign extend if we need to. */
1323 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1324 if (! unsignedp
1325 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1326 val |= (HOST_WIDE_INT) (-1) << width;
1328 return GEN_INT (val);
1331 return gen_lowpart (mode, x);
1334 temp = gen_reg_rtx (mode);
1335 convert_move (temp, x, unsignedp);
1336 return temp;
1340 /* This macro is used to determine what the largest unit size that
1341 move_by_pieces can use is. */
1343 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1344 move efficiently, as opposed to MOVE_MAX which is the maximum
1345 number of bhytes we can move with a single instruction. */
1347 #ifndef MOVE_MAX_PIECES
1348 #define MOVE_MAX_PIECES MOVE_MAX
1349 #endif
1351 /* Generate several move instructions to copy LEN bytes
1352 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1353 The caller must pass FROM and TO
1354 through protect_from_queue before calling.
1355 ALIGN (in bytes) is maximum alignment we can assume. */
1357 void
1358 move_by_pieces (to, from, len, align)
1359 rtx to, from;
1360 int len, align;
1362 struct move_by_pieces data;
1363 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1364 int max_size = MOVE_MAX_PIECES + 1;
1365 enum machine_mode mode = VOIDmode, tmode;
1366 enum insn_code icode;
1368 data.offset = 0;
1369 data.to_addr = to_addr;
1370 data.from_addr = from_addr;
1371 data.to = to;
1372 data.from = from;
1373 data.autinc_to
1374 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1375 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1376 data.autinc_from
1377 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1378 || GET_CODE (from_addr) == POST_INC
1379 || GET_CODE (from_addr) == POST_DEC);
1381 data.explicit_inc_from = 0;
1382 data.explicit_inc_to = 0;
1383 data.reverse
1384 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1385 if (data.reverse) data.offset = len;
1386 data.len = len;
1388 data.to_struct = MEM_IN_STRUCT_P (to);
1389 data.from_struct = MEM_IN_STRUCT_P (from);
1390 data.to_readonly = RTX_UNCHANGING_P (to);
1391 data.from_readonly = RTX_UNCHANGING_P (from);
1393 /* If copying requires more than two move insns,
1394 copy addresses to registers (to make displacements shorter)
1395 and use post-increment if available. */
1396 if (!(data.autinc_from && data.autinc_to)
1397 && move_by_pieces_ninsns (len, align) > 2)
1399 /* Find the mode of the largest move... */
1400 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1401 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1402 if (GET_MODE_SIZE (tmode) < max_size)
1403 mode = tmode;
1405 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1407 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1408 data.autinc_from = 1;
1409 data.explicit_inc_from = -1;
1411 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1413 data.from_addr = copy_addr_to_reg (from_addr);
1414 data.autinc_from = 1;
1415 data.explicit_inc_from = 1;
1417 if (!data.autinc_from && CONSTANT_P (from_addr))
1418 data.from_addr = copy_addr_to_reg (from_addr);
1419 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1421 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1422 data.autinc_to = 1;
1423 data.explicit_inc_to = -1;
1425 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1427 data.to_addr = copy_addr_to_reg (to_addr);
1428 data.autinc_to = 1;
1429 data.explicit_inc_to = 1;
1431 if (!data.autinc_to && CONSTANT_P (to_addr))
1432 data.to_addr = copy_addr_to_reg (to_addr);
1435 if (! SLOW_UNALIGNED_ACCESS
1436 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1437 align = MOVE_MAX;
1439 /* First move what we can in the largest integer mode, then go to
1440 successively smaller modes. */
1442 while (max_size > 1)
1444 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1445 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1446 if (GET_MODE_SIZE (tmode) < max_size)
1447 mode = tmode;
1449 if (mode == VOIDmode)
1450 break;
1452 icode = mov_optab->handlers[(int) mode].insn_code;
1453 if (icode != CODE_FOR_nothing
1454 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1455 GET_MODE_SIZE (mode)))
1456 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1458 max_size = GET_MODE_SIZE (mode);
1461 /* The code above should have handled everything. */
1462 if (data.len > 0)
1463 abort ();
1466 /* Return number of insns required to move L bytes by pieces.
1467 ALIGN (in bytes) is maximum alignment we can assume. */
1469 static int
1470 move_by_pieces_ninsns (l, align)
1471 unsigned int l;
1472 int align;
1474 register int n_insns = 0;
1475 int max_size = MOVE_MAX + 1;
1477 if (! SLOW_UNALIGNED_ACCESS
1478 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1479 align = MOVE_MAX;
1481 while (max_size > 1)
1483 enum machine_mode mode = VOIDmode, tmode;
1484 enum insn_code icode;
1486 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1487 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1488 if (GET_MODE_SIZE (tmode) < max_size)
1489 mode = tmode;
1491 if (mode == VOIDmode)
1492 break;
1494 icode = mov_optab->handlers[(int) mode].insn_code;
1495 if (icode != CODE_FOR_nothing
1496 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1497 GET_MODE_SIZE (mode)))
1498 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1500 max_size = GET_MODE_SIZE (mode);
1503 return n_insns;
1506 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1507 with move instructions for mode MODE. GENFUN is the gen_... function
1508 to make a move insn for that mode. DATA has all the other info. */
1510 static void
1511 move_by_pieces_1 (genfun, mode, data)
1512 rtx (*genfun) PROTO ((rtx, ...));
1513 enum machine_mode mode;
1514 struct move_by_pieces *data;
1516 register int size = GET_MODE_SIZE (mode);
1517 register rtx to1, from1;
1519 while (data->len >= size)
1521 if (data->reverse) data->offset -= size;
1523 to1 = (data->autinc_to
1524 ? gen_rtx_MEM (mode, data->to_addr)
1525 : copy_rtx (change_address (data->to, mode,
1526 plus_constant (data->to_addr,
1527 data->offset))));
1528 MEM_IN_STRUCT_P (to1) = data->to_struct;
1529 RTX_UNCHANGING_P (to1) = data->to_readonly;
1531 from1
1532 = (data->autinc_from
1533 ? gen_rtx_MEM (mode, data->from_addr)
1534 : copy_rtx (change_address (data->from, mode,
1535 plus_constant (data->from_addr,
1536 data->offset))));
1537 MEM_IN_STRUCT_P (from1) = data->from_struct;
1538 RTX_UNCHANGING_P (from1) = data->from_readonly;
1540 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1541 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1542 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1543 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1545 emit_insn ((*genfun) (to1, from1));
1546 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1547 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1548 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1549 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1551 if (! data->reverse) data->offset += size;
1553 data->len -= size;
1557 /* Emit code to move a block Y to a block X.
1558 This may be done with string-move instructions,
1559 with multiple scalar move instructions, or with a library call.
1561 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1562 with mode BLKmode.
1563 SIZE is an rtx that says how long they are.
1564 ALIGN is the maximum alignment we can assume they have,
1565 measured in bytes.
1567 Return the address of the new block, if memcpy is called and returns it,
1568 0 otherwise. */
1571 emit_block_move (x, y, size, align)
1572 rtx x, y;
1573 rtx size;
1574 int align;
1576 rtx retval = 0;
1577 #ifdef TARGET_MEM_FUNCTIONS
1578 static tree fn;
1579 tree call_expr, arg_list;
1580 #endif
1582 if (GET_MODE (x) != BLKmode)
1583 abort ();
1585 if (GET_MODE (y) != BLKmode)
1586 abort ();
1588 x = protect_from_queue (x, 1);
1589 y = protect_from_queue (y, 0);
1590 size = protect_from_queue (size, 0);
1592 if (GET_CODE (x) != MEM)
1593 abort ();
1594 if (GET_CODE (y) != MEM)
1595 abort ();
1596 if (size == 0)
1597 abort ();
1599 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1600 move_by_pieces (x, y, INTVAL (size), align);
1601 else
1603 /* Try the most limited insn first, because there's no point
1604 including more than one in the machine description unless
1605 the more limited one has some advantage. */
1607 rtx opalign = GEN_INT (align);
1608 enum machine_mode mode;
1610 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1611 mode = GET_MODE_WIDER_MODE (mode))
1613 enum insn_code code = movstr_optab[(int) mode];
1614 insn_operand_predicate_fn pred;
1616 if (code != CODE_FOR_nothing
1617 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1618 here because if SIZE is less than the mode mask, as it is
1619 returned by the macro, it will definitely be less than the
1620 actual mode mask. */
1621 && ((GET_CODE (size) == CONST_INT
1622 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1623 <= (GET_MODE_MASK (mode) >> 1)))
1624 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1625 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1626 || (*pred) (x, BLKmode))
1627 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1628 || (*pred) (y, BLKmode))
1629 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1630 || (*pred) (opalign, VOIDmode)))
1632 rtx op2;
1633 rtx last = get_last_insn ();
1634 rtx pat;
1636 op2 = convert_to_mode (mode, size, 1);
1637 pred = insn_data[(int) code].operand[2].predicate;
1638 if (pred != 0 && ! (*pred) (op2, mode))
1639 op2 = copy_to_mode_reg (mode, op2);
1641 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1642 if (pat)
1644 emit_insn (pat);
1645 return 0;
1647 else
1648 delete_insns_since (last);
1652 /* X, Y, or SIZE may have been passed through protect_from_queue.
1654 It is unsafe to save the value generated by protect_from_queue
1655 and reuse it later. Consider what happens if emit_queue is
1656 called before the return value from protect_from_queue is used.
1658 Expansion of the CALL_EXPR below will call emit_queue before
1659 we are finished emitting RTL for argument setup. So if we are
1660 not careful we could get the wrong value for an argument.
1662 To avoid this problem we go ahead and emit code to copy X, Y &
1663 SIZE into new pseudos. We can then place those new pseudos
1664 into an RTL_EXPR and use them later, even after a call to
1665 emit_queue.
1667 Note this is not strictly needed for library calls since they
1668 do not call emit_queue before loading their arguments. However,
1669 we may need to have library calls call emit_queue in the future
1670 since failing to do so could cause problems for targets which
1671 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1672 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1673 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1675 #ifdef TARGET_MEM_FUNCTIONS
1676 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1677 #else
1678 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1679 TREE_UNSIGNED (integer_type_node));
1680 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1681 #endif
1683 #ifdef TARGET_MEM_FUNCTIONS
1684 /* It is incorrect to use the libcall calling conventions to call
1685 memcpy in this context.
1687 This could be a user call to memcpy and the user may wish to
1688 examine the return value from memcpy.
1690 For targets where libcalls and normal calls have different conventions
1691 for returning pointers, we could end up generating incorrect code.
1693 So instead of using a libcall sequence we build up a suitable
1694 CALL_EXPR and expand the call in the normal fashion. */
1695 if (fn == NULL_TREE)
1697 tree fntype;
1699 /* This was copied from except.c, I don't know if all this is
1700 necessary in this context or not. */
1701 fn = get_identifier ("memcpy");
1702 push_obstacks_nochange ();
1703 end_temporary_allocation ();
1704 fntype = build_pointer_type (void_type_node);
1705 fntype = build_function_type (fntype, NULL_TREE);
1706 fn = build_decl (FUNCTION_DECL, fn, fntype);
1707 ggc_add_tree_root (&fn, 1);
1708 DECL_EXTERNAL (fn) = 1;
1709 TREE_PUBLIC (fn) = 1;
1710 DECL_ARTIFICIAL (fn) = 1;
1711 make_decl_rtl (fn, NULL_PTR, 1);
1712 assemble_external (fn);
1713 pop_obstacks ();
1716 /* We need to make an argument list for the function call.
1718 memcpy has three arguments, the first two are void * addresses and
1719 the last is a size_t byte count for the copy. */
1720 arg_list
1721 = build_tree_list (NULL_TREE,
1722 make_tree (build_pointer_type (void_type_node), x));
1723 TREE_CHAIN (arg_list)
1724 = build_tree_list (NULL_TREE,
1725 make_tree (build_pointer_type (void_type_node), y));
1726 TREE_CHAIN (TREE_CHAIN (arg_list))
1727 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1728 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1730 /* Now we have to build up the CALL_EXPR itself. */
1731 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1732 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1733 call_expr, arg_list, NULL_TREE);
1734 TREE_SIDE_EFFECTS (call_expr) = 1;
1736 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1737 #else
1738 emit_library_call (bcopy_libfunc, 0,
1739 VOIDmode, 3, y, Pmode, x, Pmode,
1740 convert_to_mode (TYPE_MODE (integer_type_node), size,
1741 TREE_UNSIGNED (integer_type_node)),
1742 TYPE_MODE (integer_type_node));
1743 #endif
1746 return retval;
1749 /* Copy all or part of a value X into registers starting at REGNO.
1750 The number of registers to be filled is NREGS. */
1752 void
1753 move_block_to_reg (regno, x, nregs, mode)
1754 int regno;
1755 rtx x;
1756 int nregs;
1757 enum machine_mode mode;
1759 int i;
1760 #ifdef HAVE_load_multiple
1761 rtx pat;
1762 rtx last;
1763 #endif
1765 if (nregs == 0)
1766 return;
1768 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1769 x = validize_mem (force_const_mem (mode, x));
1771 /* See if the machine can do this with a load multiple insn. */
1772 #ifdef HAVE_load_multiple
1773 if (HAVE_load_multiple)
1775 last = get_last_insn ();
1776 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1777 GEN_INT (nregs));
1778 if (pat)
1780 emit_insn (pat);
1781 return;
1783 else
1784 delete_insns_since (last);
1786 #endif
1788 for (i = 0; i < nregs; i++)
1789 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1790 operand_subword_force (x, i, mode));
1793 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1794 The number of registers to be filled is NREGS. SIZE indicates the number
1795 of bytes in the object X. */
1798 void
1799 move_block_from_reg (regno, x, nregs, size)
1800 int regno;
1801 rtx x;
1802 int nregs;
1803 int size;
1805 int i;
1806 #ifdef HAVE_store_multiple
1807 rtx pat;
1808 rtx last;
1809 #endif
1810 enum machine_mode mode;
1812 /* If SIZE is that of a mode no bigger than a word, just use that
1813 mode's store operation. */
1814 if (size <= UNITS_PER_WORD
1815 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1817 emit_move_insn (change_address (x, mode, NULL),
1818 gen_rtx_REG (mode, regno));
1819 return;
1822 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1823 to the left before storing to memory. Note that the previous test
1824 doesn't handle all cases (e.g. SIZE == 3). */
1825 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1827 rtx tem = operand_subword (x, 0, 1, BLKmode);
1828 rtx shift;
1830 if (tem == 0)
1831 abort ();
1833 shift = expand_shift (LSHIFT_EXPR, word_mode,
1834 gen_rtx_REG (word_mode, regno),
1835 build_int_2 ((UNITS_PER_WORD - size)
1836 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1837 emit_move_insn (tem, shift);
1838 return;
1841 /* See if the machine can do this with a store multiple insn. */
1842 #ifdef HAVE_store_multiple
1843 if (HAVE_store_multiple)
1845 last = get_last_insn ();
1846 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1847 GEN_INT (nregs));
1848 if (pat)
1850 emit_insn (pat);
1851 return;
1853 else
1854 delete_insns_since (last);
1856 #endif
1858 for (i = 0; i < nregs; i++)
1860 rtx tem = operand_subword (x, i, 1, BLKmode);
1862 if (tem == 0)
1863 abort ();
1865 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1869 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1870 registers represented by a PARALLEL. SSIZE represents the total size of
1871 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1872 SRC in bits. */
1873 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1874 the balance will be in what would be the low-order memory addresses, i.e.
1875 left justified for big endian, right justified for little endian. This
1876 happens to be true for the targets currently using this support. If this
1877 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1878 would be needed. */
1880 void
1881 emit_group_load (dst, orig_src, ssize, align)
1882 rtx dst, orig_src;
1883 int align, ssize;
1885 rtx *tmps, src;
1886 int start, i;
1888 if (GET_CODE (dst) != PARALLEL)
1889 abort ();
1891 /* Check for a NULL entry, used to indicate that the parameter goes
1892 both on the stack and in registers. */
1893 if (XEXP (XVECEXP (dst, 0, 0), 0))
1894 start = 0;
1895 else
1896 start = 1;
1898 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (dst, 0));
1900 /* If we won't be loading directly from memory, protect the real source
1901 from strange tricks we might play. */
1902 src = orig_src;
1903 if (GET_CODE (src) != MEM)
1905 src = gen_reg_rtx (GET_MODE (orig_src));
1906 emit_move_insn (src, orig_src);
1909 /* Process the pieces. */
1910 for (i = start; i < XVECLEN (dst, 0); i++)
1912 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1913 int bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1914 int bytelen = GET_MODE_SIZE (mode);
1915 int shift = 0;
1917 /* Handle trailing fragments that run over the size of the struct. */
1918 if (ssize >= 0 && bytepos + bytelen > ssize)
1920 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1921 bytelen = ssize - bytepos;
1922 if (bytelen <= 0)
1923 abort();
1926 /* Optimize the access just a bit. */
1927 if (GET_CODE (src) == MEM
1928 && align*BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
1929 && bytepos*BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1930 && bytelen == GET_MODE_SIZE (mode))
1932 tmps[i] = gen_reg_rtx (mode);
1933 emit_move_insn (tmps[i],
1934 change_address (src, mode,
1935 plus_constant (XEXP (src, 0),
1936 bytepos)));
1938 else if (GET_CODE (src) == CONCAT)
1940 if (bytepos == 0
1941 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
1942 tmps[i] = XEXP (src, 0);
1943 else if (bytepos == GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
1944 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
1945 tmps[i] = XEXP (src, 1);
1946 else
1947 abort ();
1949 else
1951 tmps[i] = extract_bit_field (src, bytelen*BITS_PER_UNIT,
1952 bytepos*BITS_PER_UNIT, 1, NULL_RTX,
1953 mode, mode, align, ssize);
1956 if (BYTES_BIG_ENDIAN && shift)
1958 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
1959 tmps[i], 0, OPTAB_WIDEN);
1962 emit_queue();
1964 /* Copy the extracted pieces into the proper (probable) hard regs. */
1965 for (i = start; i < XVECLEN (dst, 0); i++)
1966 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
1969 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
1970 registers represented by a PARALLEL. SSIZE represents the total size of
1971 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
1973 void
1974 emit_group_store (orig_dst, src, ssize, align)
1975 rtx orig_dst, src;
1976 int ssize, align;
1978 rtx *tmps, dst;
1979 int start, i;
1981 if (GET_CODE (src) != PARALLEL)
1982 abort ();
1984 /* Check for a NULL entry, used to indicate that the parameter goes
1985 both on the stack and in registers. */
1986 if (XEXP (XVECEXP (src, 0, 0), 0))
1987 start = 0;
1988 else
1989 start = 1;
1991 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (src, 0));
1993 /* Copy the (probable) hard regs into pseudos. */
1994 for (i = start; i < XVECLEN (src, 0); i++)
1996 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1997 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1998 emit_move_insn (tmps[i], reg);
2000 emit_queue();
2002 /* If we won't be storing directly into memory, protect the real destination
2003 from strange tricks we might play. */
2004 dst = orig_dst;
2005 if (GET_CODE (dst) == PARALLEL)
2007 rtx temp;
2009 /* We can get a PARALLEL dst if there is a conditional expression in
2010 a return statement. In that case, the dst and src are the same,
2011 so no action is necessary. */
2012 if (rtx_equal_p (dst, src))
2013 return;
2015 /* It is unclear if we can ever reach here, but we may as well handle
2016 it. Allocate a temporary, and split this into a store/load to/from
2017 the temporary. */
2019 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2020 emit_group_store (temp, src, ssize, align);
2021 emit_group_load (dst, temp, ssize, align);
2022 return;
2024 else if (GET_CODE (dst) != MEM)
2026 dst = gen_reg_rtx (GET_MODE (orig_dst));
2027 /* Make life a bit easier for combine. */
2028 emit_move_insn (dst, const0_rtx);
2030 else if (! MEM_IN_STRUCT_P (dst))
2032 /* store_bit_field requires that memory operations have
2033 mem_in_struct_p set; we might not. */
2035 dst = copy_rtx (orig_dst);
2036 MEM_SET_IN_STRUCT_P (dst, 1);
2039 /* Process the pieces. */
2040 for (i = start; i < XVECLEN (src, 0); i++)
2042 int bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2043 enum machine_mode mode = GET_MODE (tmps[i]);
2044 int bytelen = GET_MODE_SIZE (mode);
2046 /* Handle trailing fragments that run over the size of the struct. */
2047 if (ssize >= 0 && bytepos + bytelen > ssize)
2049 if (BYTES_BIG_ENDIAN)
2051 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2052 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2053 tmps[i], 0, OPTAB_WIDEN);
2055 bytelen = ssize - bytepos;
2058 /* Optimize the access just a bit. */
2059 if (GET_CODE (dst) == MEM
2060 && align*BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
2061 && bytepos*BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2062 && bytelen == GET_MODE_SIZE (mode))
2064 emit_move_insn (change_address (dst, mode,
2065 plus_constant (XEXP (dst, 0),
2066 bytepos)),
2067 tmps[i]);
2069 else
2071 store_bit_field (dst, bytelen*BITS_PER_UNIT, bytepos*BITS_PER_UNIT,
2072 mode, tmps[i], align, ssize);
2075 emit_queue();
2077 /* Copy from the pseudo into the (probable) hard reg. */
2078 if (GET_CODE (dst) == REG)
2079 emit_move_insn (orig_dst, dst);
2082 /* Generate code to copy a BLKmode object of TYPE out of a
2083 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2084 is null, a stack temporary is created. TGTBLK is returned.
2086 The primary purpose of this routine is to handle functions
2087 that return BLKmode structures in registers. Some machines
2088 (the PA for example) want to return all small structures
2089 in registers regardless of the structure's alignment.
2093 copy_blkmode_from_reg(tgtblk,srcreg,type)
2094 rtx tgtblk;
2095 rtx srcreg;
2096 tree type;
2098 int bytes = int_size_in_bytes (type);
2099 rtx src = NULL, dst = NULL;
2100 int bitsize = MIN (TYPE_ALIGN (type), (unsigned int) BITS_PER_WORD);
2101 int bitpos, xbitpos, big_endian_correction = 0;
2103 if (tgtblk == 0)
2105 tgtblk = assign_stack_temp (BLKmode, bytes, 0);
2106 MEM_SET_IN_STRUCT_P (tgtblk, AGGREGATE_TYPE_P (type));
2107 preserve_temp_slots (tgtblk);
2110 /* This code assumes srcreg is at least a full word. If it isn't,
2111 copy it into a new pseudo which is a full word. */
2112 if (GET_MODE (srcreg) != BLKmode
2113 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2114 srcreg = convert_to_mode (word_mode, srcreg,
2115 TREE_UNSIGNED (type));
2117 /* Structures whose size is not a multiple of a word are aligned
2118 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2119 machine, this means we must skip the empty high order bytes when
2120 calculating the bit offset. */
2121 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2122 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2123 * BITS_PER_UNIT));
2125 /* Copy the structure BITSIZE bites at a time.
2127 We could probably emit more efficient code for machines
2128 which do not use strict alignment, but it doesn't seem
2129 worth the effort at the current time. */
2130 for (bitpos = 0, xbitpos = big_endian_correction;
2131 bitpos < bytes * BITS_PER_UNIT;
2132 bitpos += bitsize, xbitpos += bitsize)
2135 /* We need a new source operand each time xbitpos is on a
2136 word boundary and when xbitpos == big_endian_correction
2137 (the first time through). */
2138 if (xbitpos % BITS_PER_WORD == 0
2139 || xbitpos == big_endian_correction)
2140 src = operand_subword_force (srcreg,
2141 xbitpos / BITS_PER_WORD,
2142 BLKmode);
2144 /* We need a new destination operand each time bitpos is on
2145 a word boundary. */
2146 if (bitpos % BITS_PER_WORD == 0)
2147 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2149 /* Use xbitpos for the source extraction (right justified) and
2150 xbitpos for the destination store (left justified). */
2151 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2152 extract_bit_field (src, bitsize,
2153 xbitpos % BITS_PER_WORD, 1,
2154 NULL_RTX, word_mode,
2155 word_mode,
2156 bitsize / BITS_PER_UNIT,
2157 BITS_PER_WORD),
2158 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
2160 return tgtblk;
2164 /* Add a USE expression for REG to the (possibly empty) list pointed
2165 to by CALL_FUSAGE. REG must denote a hard register. */
2167 void
2168 use_reg (call_fusage, reg)
2169 rtx *call_fusage, reg;
2171 if (GET_CODE (reg) != REG
2172 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2173 abort();
2175 *call_fusage
2176 = gen_rtx_EXPR_LIST (VOIDmode,
2177 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2180 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2181 starting at REGNO. All of these registers must be hard registers. */
2183 void
2184 use_regs (call_fusage, regno, nregs)
2185 rtx *call_fusage;
2186 int regno;
2187 int nregs;
2189 int i;
2191 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2192 abort ();
2194 for (i = 0; i < nregs; i++)
2195 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2198 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2199 PARALLEL REGS. This is for calls that pass values in multiple
2200 non-contiguous locations. The Irix 6 ABI has examples of this. */
2202 void
2203 use_group_regs (call_fusage, regs)
2204 rtx *call_fusage;
2205 rtx regs;
2207 int i;
2209 for (i = 0; i < XVECLEN (regs, 0); i++)
2211 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2213 /* A NULL entry means the parameter goes both on the stack and in
2214 registers. This can also be a MEM for targets that pass values
2215 partially on the stack and partially in registers. */
2216 if (reg != 0 && GET_CODE (reg) == REG)
2217 use_reg (call_fusage, reg);
2221 /* Generate several move instructions to clear LEN bytes of block TO.
2222 (A MEM rtx with BLKmode). The caller must pass TO through
2223 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
2224 we can assume. */
2226 static void
2227 clear_by_pieces (to, len, align)
2228 rtx to;
2229 int len, align;
2231 struct clear_by_pieces data;
2232 rtx to_addr = XEXP (to, 0);
2233 int max_size = MOVE_MAX_PIECES + 1;
2234 enum machine_mode mode = VOIDmode, tmode;
2235 enum insn_code icode;
2237 data.offset = 0;
2238 data.to_addr = to_addr;
2239 data.to = to;
2240 data.autinc_to
2241 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2242 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2244 data.explicit_inc_to = 0;
2245 data.reverse
2246 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2247 if (data.reverse) data.offset = len;
2248 data.len = len;
2250 data.to_struct = MEM_IN_STRUCT_P (to);
2252 /* If copying requires more than two move insns,
2253 copy addresses to registers (to make displacements shorter)
2254 and use post-increment if available. */
2255 if (!data.autinc_to
2256 && move_by_pieces_ninsns (len, align) > 2)
2258 /* Determine the main mode we'll be using */
2259 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2260 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2261 if (GET_MODE_SIZE (tmode) < max_size)
2262 mode = tmode;
2264 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
2266 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
2267 data.autinc_to = 1;
2268 data.explicit_inc_to = -1;
2270 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
2272 data.to_addr = copy_addr_to_reg (to_addr);
2273 data.autinc_to = 1;
2274 data.explicit_inc_to = 1;
2276 if (!data.autinc_to && CONSTANT_P (to_addr))
2277 data.to_addr = copy_addr_to_reg (to_addr);
2280 if (! SLOW_UNALIGNED_ACCESS
2281 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
2282 align = MOVE_MAX;
2284 /* First move what we can in the largest integer mode, then go to
2285 successively smaller modes. */
2287 while (max_size > 1)
2289 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2290 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2291 if (GET_MODE_SIZE (tmode) < max_size)
2292 mode = tmode;
2294 if (mode == VOIDmode)
2295 break;
2297 icode = mov_optab->handlers[(int) mode].insn_code;
2298 if (icode != CODE_FOR_nothing
2299 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
2300 GET_MODE_SIZE (mode)))
2301 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2303 max_size = GET_MODE_SIZE (mode);
2306 /* The code above should have handled everything. */
2307 if (data.len != 0)
2308 abort ();
2311 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2312 with move instructions for mode MODE. GENFUN is the gen_... function
2313 to make a move insn for that mode. DATA has all the other info. */
2315 static void
2316 clear_by_pieces_1 (genfun, mode, data)
2317 rtx (*genfun) PROTO ((rtx, ...));
2318 enum machine_mode mode;
2319 struct clear_by_pieces *data;
2321 register int size = GET_MODE_SIZE (mode);
2322 register rtx to1;
2324 while (data->len >= size)
2326 if (data->reverse) data->offset -= size;
2328 to1 = (data->autinc_to
2329 ? gen_rtx_MEM (mode, data->to_addr)
2330 : copy_rtx (change_address (data->to, mode,
2331 plus_constant (data->to_addr,
2332 data->offset))));
2333 MEM_IN_STRUCT_P (to1) = data->to_struct;
2335 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2336 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2338 emit_insn ((*genfun) (to1, const0_rtx));
2339 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2340 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2342 if (! data->reverse) data->offset += size;
2344 data->len -= size;
2348 /* Write zeros through the storage of OBJECT.
2349 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
2350 the maximum alignment we can is has, measured in bytes.
2352 If we call a function that returns the length of the block, return it. */
2355 clear_storage (object, size, align)
2356 rtx object;
2357 rtx size;
2358 int align;
2360 #ifdef TARGET_MEM_FUNCTIONS
2361 static tree fn;
2362 tree call_expr, arg_list;
2363 #endif
2364 rtx retval = 0;
2366 if (GET_MODE (object) == BLKmode)
2368 object = protect_from_queue (object, 1);
2369 size = protect_from_queue (size, 0);
2371 if (GET_CODE (size) == CONST_INT
2372 && MOVE_BY_PIECES_P (INTVAL (size), align))
2373 clear_by_pieces (object, INTVAL (size), align);
2375 else
2377 /* Try the most limited insn first, because there's no point
2378 including more than one in the machine description unless
2379 the more limited one has some advantage. */
2381 rtx opalign = GEN_INT (align);
2382 enum machine_mode mode;
2384 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2385 mode = GET_MODE_WIDER_MODE (mode))
2387 enum insn_code code = clrstr_optab[(int) mode];
2388 insn_operand_predicate_fn pred;
2390 if (code != CODE_FOR_nothing
2391 /* We don't need MODE to be narrower than
2392 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2393 the mode mask, as it is returned by the macro, it will
2394 definitely be less than the actual mode mask. */
2395 && ((GET_CODE (size) == CONST_INT
2396 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2397 <= (GET_MODE_MASK (mode) >> 1)))
2398 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2399 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2400 || (*pred) (object, BLKmode))
2401 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2402 || (*pred) (opalign, VOIDmode)))
2404 rtx op1;
2405 rtx last = get_last_insn ();
2406 rtx pat;
2408 op1 = convert_to_mode (mode, size, 1);
2409 pred = insn_data[(int) code].operand[1].predicate;
2410 if (pred != 0 && ! (*pred) (op1, mode))
2411 op1 = copy_to_mode_reg (mode, op1);
2413 pat = GEN_FCN ((int) code) (object, op1, opalign);
2414 if (pat)
2416 emit_insn (pat);
2417 return 0;
2419 else
2420 delete_insns_since (last);
2424 /* OBJECT or SIZE may have been passed through protect_from_queue.
2426 It is unsafe to save the value generated by protect_from_queue
2427 and reuse it later. Consider what happens if emit_queue is
2428 called before the return value from protect_from_queue is used.
2430 Expansion of the CALL_EXPR below will call emit_queue before
2431 we are finished emitting RTL for argument setup. So if we are
2432 not careful we could get the wrong value for an argument.
2434 To avoid this problem we go ahead and emit code to copy OBJECT
2435 and SIZE into new pseudos. We can then place those new pseudos
2436 into an RTL_EXPR and use them later, even after a call to
2437 emit_queue.
2439 Note this is not strictly needed for library calls since they
2440 do not call emit_queue before loading their arguments. However,
2441 we may need to have library calls call emit_queue in the future
2442 since failing to do so could cause problems for targets which
2443 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2444 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2446 #ifdef TARGET_MEM_FUNCTIONS
2447 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2448 #else
2449 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2450 TREE_UNSIGNED (integer_type_node));
2451 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2452 #endif
2455 #ifdef TARGET_MEM_FUNCTIONS
2456 /* It is incorrect to use the libcall calling conventions to call
2457 memset in this context.
2459 This could be a user call to memset and the user may wish to
2460 examine the return value from memset.
2462 For targets where libcalls and normal calls have different
2463 conventions for returning pointers, we could end up generating
2464 incorrect code.
2466 So instead of using a libcall sequence we build up a suitable
2467 CALL_EXPR and expand the call in the normal fashion. */
2468 if (fn == NULL_TREE)
2470 tree fntype;
2472 /* This was copied from except.c, I don't know if all this is
2473 necessary in this context or not. */
2474 fn = get_identifier ("memset");
2475 push_obstacks_nochange ();
2476 end_temporary_allocation ();
2477 fntype = build_pointer_type (void_type_node);
2478 fntype = build_function_type (fntype, NULL_TREE);
2479 fn = build_decl (FUNCTION_DECL, fn, fntype);
2480 ggc_add_tree_root (&fn, 1);
2481 DECL_EXTERNAL (fn) = 1;
2482 TREE_PUBLIC (fn) = 1;
2483 DECL_ARTIFICIAL (fn) = 1;
2484 make_decl_rtl (fn, NULL_PTR, 1);
2485 assemble_external (fn);
2486 pop_obstacks ();
2489 /* We need to make an argument list for the function call.
2491 memset has three arguments, the first is a void * addresses, the
2492 second a integer with the initialization value, the last is a
2493 size_t byte count for the copy. */
2494 arg_list
2495 = build_tree_list (NULL_TREE,
2496 make_tree (build_pointer_type (void_type_node),
2497 object));
2498 TREE_CHAIN (arg_list)
2499 = build_tree_list (NULL_TREE,
2500 make_tree (integer_type_node, const0_rtx));
2501 TREE_CHAIN (TREE_CHAIN (arg_list))
2502 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2503 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2505 /* Now we have to build up the CALL_EXPR itself. */
2506 call_expr = build1 (ADDR_EXPR,
2507 build_pointer_type (TREE_TYPE (fn)), fn);
2508 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2509 call_expr, arg_list, NULL_TREE);
2510 TREE_SIDE_EFFECTS (call_expr) = 1;
2512 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2513 #else
2514 emit_library_call (bzero_libfunc, 0,
2515 VOIDmode, 2, object, Pmode, size,
2516 TYPE_MODE (integer_type_node));
2517 #endif
2520 else
2521 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2523 return retval;
2526 /* Generate code to copy Y into X.
2527 Both Y and X must have the same mode, except that
2528 Y can be a constant with VOIDmode.
2529 This mode cannot be BLKmode; use emit_block_move for that.
2531 Return the last instruction emitted. */
2534 emit_move_insn (x, y)
2535 rtx x, y;
2537 enum machine_mode mode = GET_MODE (x);
2539 x = protect_from_queue (x, 1);
2540 y = protect_from_queue (y, 0);
2542 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2543 abort ();
2545 /* Never force constant_p_rtx to memory. */
2546 if (GET_CODE (y) == CONSTANT_P_RTX)
2548 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2549 y = force_const_mem (mode, y);
2551 /* If X or Y are memory references, verify that their addresses are valid
2552 for the machine. */
2553 if (GET_CODE (x) == MEM
2554 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2555 && ! push_operand (x, GET_MODE (x)))
2556 || (flag_force_addr
2557 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2558 x = change_address (x, VOIDmode, XEXP (x, 0));
2560 if (GET_CODE (y) == MEM
2561 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2562 || (flag_force_addr
2563 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2564 y = change_address (y, VOIDmode, XEXP (y, 0));
2566 if (mode == BLKmode)
2567 abort ();
2569 return emit_move_insn_1 (x, y);
2572 /* Low level part of emit_move_insn.
2573 Called just like emit_move_insn, but assumes X and Y
2574 are basically valid. */
2577 emit_move_insn_1 (x, y)
2578 rtx x, y;
2580 enum machine_mode mode = GET_MODE (x);
2581 enum machine_mode submode;
2582 enum mode_class class = GET_MODE_CLASS (mode);
2583 int i;
2585 if (mode >= MAX_MACHINE_MODE)
2586 abort ();
2588 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2589 return
2590 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2592 /* Expand complex moves by moving real part and imag part, if possible. */
2593 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2594 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2595 * BITS_PER_UNIT),
2596 (class == MODE_COMPLEX_INT
2597 ? MODE_INT : MODE_FLOAT),
2599 && (mov_optab->handlers[(int) submode].insn_code
2600 != CODE_FOR_nothing))
2602 /* Don't split destination if it is a stack push. */
2603 int stack = push_operand (x, GET_MODE (x));
2605 /* If this is a stack, push the highpart first, so it
2606 will be in the argument order.
2608 In that case, change_address is used only to convert
2609 the mode, not to change the address. */
2610 if (stack)
2612 /* Note that the real part always precedes the imag part in memory
2613 regardless of machine's endianness. */
2614 #ifdef STACK_GROWS_DOWNWARD
2615 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2616 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2617 gen_imagpart (submode, y)));
2618 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2619 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2620 gen_realpart (submode, y)));
2621 #else
2622 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2623 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2624 gen_realpart (submode, y)));
2625 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2626 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2627 gen_imagpart (submode, y)));
2628 #endif
2630 else
2632 /* Show the output dies here. This is necessary for pseudos;
2633 hard regs shouldn't appear here except as return values.
2634 We never want to emit such a clobber after reload. */
2635 if (x != y
2636 && ! (reload_in_progress || reload_completed))
2638 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2641 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2642 (gen_realpart (submode, x), gen_realpart (submode, y)));
2643 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2644 (gen_imagpart (submode, x), gen_imagpart (submode, y)));
2647 return get_last_insn ();
2650 /* This will handle any multi-word mode that lacks a move_insn pattern.
2651 However, you will get better code if you define such patterns,
2652 even if they must turn into multiple assembler instructions. */
2653 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2655 rtx last_insn = 0;
2657 #ifdef PUSH_ROUNDING
2659 /* If X is a push on the stack, do the push now and replace
2660 X with a reference to the stack pointer. */
2661 if (push_operand (x, GET_MODE (x)))
2663 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2664 x = change_address (x, VOIDmode, stack_pointer_rtx);
2666 #endif
2668 /* Show the output dies here. This is necessary for pseudos;
2669 hard regs shouldn't appear here except as return values.
2670 We never want to emit such a clobber after reload. */
2671 if (x != y
2672 && ! (reload_in_progress || reload_completed))
2674 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2677 for (i = 0;
2678 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2679 i++)
2681 rtx xpart = operand_subword (x, i, 1, mode);
2682 rtx ypart = operand_subword (y, i, 1, mode);
2684 /* If we can't get a part of Y, put Y into memory if it is a
2685 constant. Otherwise, force it into a register. If we still
2686 can't get a part of Y, abort. */
2687 if (ypart == 0 && CONSTANT_P (y))
2689 y = force_const_mem (mode, y);
2690 ypart = operand_subword (y, i, 1, mode);
2692 else if (ypart == 0)
2693 ypart = operand_subword_force (y, i, mode);
2695 if (xpart == 0 || ypart == 0)
2696 abort ();
2698 last_insn = emit_move_insn (xpart, ypart);
2701 return last_insn;
2703 else
2704 abort ();
2707 /* Pushing data onto the stack. */
2709 /* Push a block of length SIZE (perhaps variable)
2710 and return an rtx to address the beginning of the block.
2711 Note that it is not possible for the value returned to be a QUEUED.
2712 The value may be virtual_outgoing_args_rtx.
2714 EXTRA is the number of bytes of padding to push in addition to SIZE.
2715 BELOW nonzero means this padding comes at low addresses;
2716 otherwise, the padding comes at high addresses. */
2719 push_block (size, extra, below)
2720 rtx size;
2721 int extra, below;
2723 register rtx temp;
2725 size = convert_modes (Pmode, ptr_mode, size, 1);
2726 if (CONSTANT_P (size))
2727 anti_adjust_stack (plus_constant (size, extra));
2728 else if (GET_CODE (size) == REG && extra == 0)
2729 anti_adjust_stack (size);
2730 else
2732 rtx temp = copy_to_mode_reg (Pmode, size);
2733 if (extra != 0)
2734 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2735 temp, 0, OPTAB_LIB_WIDEN);
2736 anti_adjust_stack (temp);
2739 #if defined (STACK_GROWS_DOWNWARD) \
2740 || (defined (ARGS_GROW_DOWNWARD) \
2741 && !defined (ACCUMULATE_OUTGOING_ARGS))
2743 /* Return the lowest stack address when STACK or ARGS grow downward and
2744 we are not aaccumulating outgoing arguments (the c4x port uses such
2745 conventions). */
2746 temp = virtual_outgoing_args_rtx;
2747 if (extra != 0 && below)
2748 temp = plus_constant (temp, extra);
2749 #else
2750 if (GET_CODE (size) == CONST_INT)
2751 temp = plus_constant (virtual_outgoing_args_rtx,
2752 - INTVAL (size) - (below ? 0 : extra));
2753 else if (extra != 0 && !below)
2754 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2755 negate_rtx (Pmode, plus_constant (size, extra)));
2756 else
2757 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2758 negate_rtx (Pmode, size));
2759 #endif
2761 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2765 gen_push_operand ()
2767 return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2770 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2771 block of SIZE bytes. */
2773 static rtx
2774 get_push_address (size)
2775 int size;
2777 register rtx temp;
2779 if (STACK_PUSH_CODE == POST_DEC)
2780 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2781 else if (STACK_PUSH_CODE == POST_INC)
2782 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2783 else
2784 temp = stack_pointer_rtx;
2786 return copy_to_reg (temp);
2789 /* Generate code to push X onto the stack, assuming it has mode MODE and
2790 type TYPE.
2791 MODE is redundant except when X is a CONST_INT (since they don't
2792 carry mode info).
2793 SIZE is an rtx for the size of data to be copied (in bytes),
2794 needed only if X is BLKmode.
2796 ALIGN (in bytes) is maximum alignment we can assume.
2798 If PARTIAL and REG are both nonzero, then copy that many of the first
2799 words of X into registers starting with REG, and push the rest of X.
2800 The amount of space pushed is decreased by PARTIAL words,
2801 rounded *down* to a multiple of PARM_BOUNDARY.
2802 REG must be a hard register in this case.
2803 If REG is zero but PARTIAL is not, take any all others actions for an
2804 argument partially in registers, but do not actually load any
2805 registers.
2807 EXTRA is the amount in bytes of extra space to leave next to this arg.
2808 This is ignored if an argument block has already been allocated.
2810 On a machine that lacks real push insns, ARGS_ADDR is the address of
2811 the bottom of the argument block for this call. We use indexing off there
2812 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2813 argument block has not been preallocated.
2815 ARGS_SO_FAR is the size of args previously pushed for this call.
2817 REG_PARM_STACK_SPACE is nonzero if functions require stack space
2818 for arguments passed in registers. If nonzero, it will be the number
2819 of bytes required. */
2821 void
2822 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2823 args_addr, args_so_far, reg_parm_stack_space)
2824 register rtx x;
2825 enum machine_mode mode;
2826 tree type;
2827 rtx size;
2828 int align;
2829 int partial;
2830 rtx reg;
2831 int extra;
2832 rtx args_addr;
2833 rtx args_so_far;
2834 int reg_parm_stack_space;
2836 rtx xinner;
2837 enum direction stack_direction
2838 #ifdef STACK_GROWS_DOWNWARD
2839 = downward;
2840 #else
2841 = upward;
2842 #endif
2844 /* Decide where to pad the argument: `downward' for below,
2845 `upward' for above, or `none' for don't pad it.
2846 Default is below for small data on big-endian machines; else above. */
2847 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2849 /* Invert direction if stack is post-update. */
2850 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2851 if (where_pad != none)
2852 where_pad = (where_pad == downward ? upward : downward);
2854 xinner = x = protect_from_queue (x, 0);
2856 if (mode == BLKmode)
2858 /* Copy a block into the stack, entirely or partially. */
2860 register rtx temp;
2861 int used = partial * UNITS_PER_WORD;
2862 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2863 int skip;
2865 if (size == 0)
2866 abort ();
2868 used -= offset;
2870 /* USED is now the # of bytes we need not copy to the stack
2871 because registers will take care of them. */
2873 if (partial != 0)
2874 xinner = change_address (xinner, BLKmode,
2875 plus_constant (XEXP (xinner, 0), used));
2877 /* If the partial register-part of the arg counts in its stack size,
2878 skip the part of stack space corresponding to the registers.
2879 Otherwise, start copying to the beginning of the stack space,
2880 by setting SKIP to 0. */
2881 skip = (reg_parm_stack_space == 0) ? 0 : used;
2883 #ifdef PUSH_ROUNDING
2884 /* Do it with several push insns if that doesn't take lots of insns
2885 and if there is no difficulty with push insns that skip bytes
2886 on the stack for alignment purposes. */
2887 if (args_addr == 0
2888 && GET_CODE (size) == CONST_INT
2889 && skip == 0
2890 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
2891 /* Here we avoid the case of a structure whose weak alignment
2892 forces many pushes of a small amount of data,
2893 and such small pushes do rounding that causes trouble. */
2894 && ((! SLOW_UNALIGNED_ACCESS)
2895 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
2896 || PUSH_ROUNDING (align) == align)
2897 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2899 /* Push padding now if padding above and stack grows down,
2900 or if padding below and stack grows up.
2901 But if space already allocated, this has already been done. */
2902 if (extra && args_addr == 0
2903 && where_pad != none && where_pad != stack_direction)
2904 anti_adjust_stack (GEN_INT (extra));
2906 move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
2907 INTVAL (size) - used, align);
2909 if (current_function_check_memory_usage && ! in_check_memory_usage)
2911 rtx temp;
2913 in_check_memory_usage = 1;
2914 temp = get_push_address (INTVAL(size) - used);
2915 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
2916 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2917 temp, Pmode,
2918 XEXP (xinner, 0), Pmode,
2919 GEN_INT (INTVAL(size) - used),
2920 TYPE_MODE (sizetype));
2921 else
2922 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2923 temp, Pmode,
2924 GEN_INT (INTVAL(size) - used),
2925 TYPE_MODE (sizetype),
2926 GEN_INT (MEMORY_USE_RW),
2927 TYPE_MODE (integer_type_node));
2928 in_check_memory_usage = 0;
2931 else
2932 #endif /* PUSH_ROUNDING */
2934 /* Otherwise make space on the stack and copy the data
2935 to the address of that space. */
2937 /* Deduct words put into registers from the size we must copy. */
2938 if (partial != 0)
2940 if (GET_CODE (size) == CONST_INT)
2941 size = GEN_INT (INTVAL (size) - used);
2942 else
2943 size = expand_binop (GET_MODE (size), sub_optab, size,
2944 GEN_INT (used), NULL_RTX, 0,
2945 OPTAB_LIB_WIDEN);
2948 /* Get the address of the stack space.
2949 In this case, we do not deal with EXTRA separately.
2950 A single stack adjust will do. */
2951 if (! args_addr)
2953 temp = push_block (size, extra, where_pad == downward);
2954 extra = 0;
2956 else if (GET_CODE (args_so_far) == CONST_INT)
2957 temp = memory_address (BLKmode,
2958 plus_constant (args_addr,
2959 skip + INTVAL (args_so_far)));
2960 else
2961 temp = memory_address (BLKmode,
2962 plus_constant (gen_rtx_PLUS (Pmode,
2963 args_addr,
2964 args_so_far),
2965 skip));
2966 if (current_function_check_memory_usage && ! in_check_memory_usage)
2968 rtx target;
2970 in_check_memory_usage = 1;
2971 target = copy_to_reg (temp);
2972 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
2973 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2974 target, Pmode,
2975 XEXP (xinner, 0), Pmode,
2976 size, TYPE_MODE (sizetype));
2977 else
2978 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2979 target, Pmode,
2980 size, TYPE_MODE (sizetype),
2981 GEN_INT (MEMORY_USE_RW),
2982 TYPE_MODE (integer_type_node));
2983 in_check_memory_usage = 0;
2986 /* TEMP is the address of the block. Copy the data there. */
2987 if (GET_CODE (size) == CONST_INT
2988 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align)))
2990 move_by_pieces (gen_rtx_MEM (BLKmode, temp), xinner,
2991 INTVAL (size), align);
2992 goto ret;
2994 else
2996 rtx opalign = GEN_INT (align);
2997 enum machine_mode mode;
2998 rtx target = gen_rtx_MEM (BLKmode, temp);
3000 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3001 mode != VOIDmode;
3002 mode = GET_MODE_WIDER_MODE (mode))
3004 enum insn_code code = movstr_optab[(int) mode];
3005 insn_operand_predicate_fn pred;
3007 if (code != CODE_FOR_nothing
3008 && ((GET_CODE (size) == CONST_INT
3009 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3010 <= (GET_MODE_MASK (mode) >> 1)))
3011 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3012 && (!(pred = insn_data[(int) code].operand[0].predicate)
3013 || ((*pred) (target, BLKmode)))
3014 && (!(pred = insn_data[(int) code].operand[1].predicate)
3015 || ((*pred) (xinner, BLKmode)))
3016 && (!(pred = insn_data[(int) code].operand[3].predicate)
3017 || ((*pred) (opalign, VOIDmode))))
3019 rtx op2 = convert_to_mode (mode, size, 1);
3020 rtx last = get_last_insn ();
3021 rtx pat;
3023 pred = insn_data[(int) code].operand[2].predicate;
3024 if (pred != 0 && ! (*pred) (op2, mode))
3025 op2 = copy_to_mode_reg (mode, op2);
3027 pat = GEN_FCN ((int) code) (target, xinner,
3028 op2, opalign);
3029 if (pat)
3031 emit_insn (pat);
3032 goto ret;
3034 else
3035 delete_insns_since (last);
3040 #ifndef ACCUMULATE_OUTGOING_ARGS
3041 /* If the source is referenced relative to the stack pointer,
3042 copy it to another register to stabilize it. We do not need
3043 to do this if we know that we won't be changing sp. */
3045 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3046 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3047 temp = copy_to_reg (temp);
3048 #endif
3050 /* Make inhibit_defer_pop nonzero around the library call
3051 to force it to pop the bcopy-arguments right away. */
3052 NO_DEFER_POP;
3053 #ifdef TARGET_MEM_FUNCTIONS
3054 emit_library_call (memcpy_libfunc, 0,
3055 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3056 convert_to_mode (TYPE_MODE (sizetype),
3057 size, TREE_UNSIGNED (sizetype)),
3058 TYPE_MODE (sizetype));
3059 #else
3060 emit_library_call (bcopy_libfunc, 0,
3061 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3062 convert_to_mode (TYPE_MODE (integer_type_node),
3063 size,
3064 TREE_UNSIGNED (integer_type_node)),
3065 TYPE_MODE (integer_type_node));
3066 #endif
3067 OK_DEFER_POP;
3070 else if (partial > 0)
3072 /* Scalar partly in registers. */
3074 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3075 int i;
3076 int not_stack;
3077 /* # words of start of argument
3078 that we must make space for but need not store. */
3079 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3080 int args_offset = INTVAL (args_so_far);
3081 int skip;
3083 /* Push padding now if padding above and stack grows down,
3084 or if padding below and stack grows up.
3085 But if space already allocated, this has already been done. */
3086 if (extra && args_addr == 0
3087 && where_pad != none && where_pad != stack_direction)
3088 anti_adjust_stack (GEN_INT (extra));
3090 /* If we make space by pushing it, we might as well push
3091 the real data. Otherwise, we can leave OFFSET nonzero
3092 and leave the space uninitialized. */
3093 if (args_addr == 0)
3094 offset = 0;
3096 /* Now NOT_STACK gets the number of words that we don't need to
3097 allocate on the stack. */
3098 not_stack = partial - offset;
3100 /* If the partial register-part of the arg counts in its stack size,
3101 skip the part of stack space corresponding to the registers.
3102 Otherwise, start copying to the beginning of the stack space,
3103 by setting SKIP to 0. */
3104 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3106 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3107 x = validize_mem (force_const_mem (mode, x));
3109 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3110 SUBREGs of such registers are not allowed. */
3111 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3112 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3113 x = copy_to_reg (x);
3115 /* Loop over all the words allocated on the stack for this arg. */
3116 /* We can do it by words, because any scalar bigger than a word
3117 has a size a multiple of a word. */
3118 #ifndef PUSH_ARGS_REVERSED
3119 for (i = not_stack; i < size; i++)
3120 #else
3121 for (i = size - 1; i >= not_stack; i--)
3122 #endif
3123 if (i >= not_stack + offset)
3124 emit_push_insn (operand_subword_force (x, i, mode),
3125 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3126 0, args_addr,
3127 GEN_INT (args_offset + ((i - not_stack + skip)
3128 * UNITS_PER_WORD)),
3129 reg_parm_stack_space);
3131 else
3133 rtx addr;
3134 rtx target = NULL_RTX;
3136 /* Push padding now if padding above and stack grows down,
3137 or if padding below and stack grows up.
3138 But if space already allocated, this has already been done. */
3139 if (extra && args_addr == 0
3140 && where_pad != none && where_pad != stack_direction)
3141 anti_adjust_stack (GEN_INT (extra));
3143 #ifdef PUSH_ROUNDING
3144 if (args_addr == 0)
3145 addr = gen_push_operand ();
3146 else
3147 #endif
3149 if (GET_CODE (args_so_far) == CONST_INT)
3150 addr
3151 = memory_address (mode,
3152 plus_constant (args_addr,
3153 INTVAL (args_so_far)));
3154 else
3155 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3156 args_so_far));
3157 target = addr;
3160 emit_move_insn (gen_rtx_MEM (mode, addr), x);
3162 if (current_function_check_memory_usage && ! in_check_memory_usage)
3164 in_check_memory_usage = 1;
3165 if (target == 0)
3166 target = get_push_address (GET_MODE_SIZE (mode));
3168 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3169 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3170 target, Pmode,
3171 XEXP (x, 0), Pmode,
3172 GEN_INT (GET_MODE_SIZE (mode)),
3173 TYPE_MODE (sizetype));
3174 else
3175 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3176 target, Pmode,
3177 GEN_INT (GET_MODE_SIZE (mode)),
3178 TYPE_MODE (sizetype),
3179 GEN_INT (MEMORY_USE_RW),
3180 TYPE_MODE (integer_type_node));
3181 in_check_memory_usage = 0;
3185 ret:
3186 /* If part should go in registers, copy that part
3187 into the appropriate registers. Do this now, at the end,
3188 since mem-to-mem copies above may do function calls. */
3189 if (partial > 0 && reg != 0)
3191 /* Handle calls that pass values in multiple non-contiguous locations.
3192 The Irix 6 ABI has examples of this. */
3193 if (GET_CODE (reg) == PARALLEL)
3194 emit_group_load (reg, x, -1, align); /* ??? size? */
3195 else
3196 move_block_to_reg (REGNO (reg), x, partial, mode);
3199 if (extra && args_addr == 0 && where_pad == stack_direction)
3200 anti_adjust_stack (GEN_INT (extra));
3203 /* Expand an assignment that stores the value of FROM into TO.
3204 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3205 (This may contain a QUEUED rtx;
3206 if the value is constant, this rtx is a constant.)
3207 Otherwise, the returned value is NULL_RTX.
3209 SUGGEST_REG is no longer actually used.
3210 It used to mean, copy the value through a register
3211 and return that register, if that is possible.
3212 We now use WANT_VALUE to decide whether to do this. */
3215 expand_assignment (to, from, want_value, suggest_reg)
3216 tree to, from;
3217 int want_value;
3218 int suggest_reg ATTRIBUTE_UNUSED;
3220 register rtx to_rtx = 0;
3221 rtx result;
3223 /* Don't crash if the lhs of the assignment was erroneous. */
3225 if (TREE_CODE (to) == ERROR_MARK)
3227 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3228 return want_value ? result : NULL_RTX;
3231 /* Assignment of a structure component needs special treatment
3232 if the structure component's rtx is not simply a MEM.
3233 Assignment of an array element at a constant index, and assignment of
3234 an array element in an unaligned packed structure field, has the same
3235 problem. */
3237 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3238 || TREE_CODE (to) == ARRAY_REF)
3240 enum machine_mode mode1;
3241 int bitsize;
3242 int bitpos;
3243 tree offset;
3244 int unsignedp;
3245 int volatilep = 0;
3246 tree tem;
3247 int alignment;
3249 push_temp_slots ();
3250 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3251 &unsignedp, &volatilep, &alignment);
3253 /* If we are going to use store_bit_field and extract_bit_field,
3254 make sure to_rtx will be safe for multiple use. */
3256 if (mode1 == VOIDmode && want_value)
3257 tem = stabilize_reference (tem);
3259 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3260 if (offset != 0)
3262 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3264 if (GET_CODE (to_rtx) != MEM)
3265 abort ();
3267 if (GET_MODE (offset_rtx) != ptr_mode)
3269 #ifdef POINTERS_EXTEND_UNSIGNED
3270 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
3271 #else
3272 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3273 #endif
3276 /* A constant address in TO_RTX can have VOIDmode, we must not try
3277 to call force_reg for that case. Avoid that case. */
3278 if (GET_CODE (to_rtx) == MEM
3279 && GET_MODE (to_rtx) == BLKmode
3280 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3281 && bitsize
3282 && (bitpos % bitsize) == 0
3283 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3284 && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
3286 rtx temp = change_address (to_rtx, mode1,
3287 plus_constant (XEXP (to_rtx, 0),
3288 (bitpos /
3289 BITS_PER_UNIT)));
3290 if (GET_CODE (XEXP (temp, 0)) == REG)
3291 to_rtx = temp;
3292 else
3293 to_rtx = change_address (to_rtx, mode1,
3294 force_reg (GET_MODE (XEXP (temp, 0)),
3295 XEXP (temp, 0)));
3296 bitpos = 0;
3299 to_rtx = change_address (to_rtx, VOIDmode,
3300 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3301 force_reg (ptr_mode,
3302 offset_rtx)));
3305 if (volatilep)
3307 if (GET_CODE (to_rtx) == MEM)
3309 /* When the offset is zero, to_rtx is the address of the
3310 structure we are storing into, and hence may be shared.
3311 We must make a new MEM before setting the volatile bit. */
3312 if (offset == 0)
3313 to_rtx = copy_rtx (to_rtx);
3315 MEM_VOLATILE_P (to_rtx) = 1;
3317 #if 0 /* This was turned off because, when a field is volatile
3318 in an object which is not volatile, the object may be in a register,
3319 and then we would abort over here. */
3320 else
3321 abort ();
3322 #endif
3325 if (TREE_CODE (to) == COMPONENT_REF
3326 && TREE_READONLY (TREE_OPERAND (to, 1)))
3328 if (offset == 0)
3329 to_rtx = copy_rtx (to_rtx);
3331 RTX_UNCHANGING_P (to_rtx) = 1;
3334 /* Check the access. */
3335 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3337 rtx to_addr;
3338 int size;
3339 int best_mode_size;
3340 enum machine_mode best_mode;
3342 best_mode = get_best_mode (bitsize, bitpos,
3343 TYPE_ALIGN (TREE_TYPE (tem)),
3344 mode1, volatilep);
3345 if (best_mode == VOIDmode)
3346 best_mode = QImode;
3348 best_mode_size = GET_MODE_BITSIZE (best_mode);
3349 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3350 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3351 size *= GET_MODE_SIZE (best_mode);
3353 /* Check the access right of the pointer. */
3354 if (size)
3355 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3356 to_addr, Pmode,
3357 GEN_INT (size), TYPE_MODE (sizetype),
3358 GEN_INT (MEMORY_USE_WO),
3359 TYPE_MODE (integer_type_node));
3362 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3363 (want_value
3364 /* Spurious cast makes HPUX compiler happy. */
3365 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
3366 : VOIDmode),
3367 unsignedp,
3368 /* Required alignment of containing datum. */
3369 alignment,
3370 int_size_in_bytes (TREE_TYPE (tem)),
3371 get_alias_set (to));
3372 preserve_temp_slots (result);
3373 free_temp_slots ();
3374 pop_temp_slots ();
3376 /* If the value is meaningful, convert RESULT to the proper mode.
3377 Otherwise, return nothing. */
3378 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3379 TYPE_MODE (TREE_TYPE (from)),
3380 result,
3381 TREE_UNSIGNED (TREE_TYPE (to)))
3382 : NULL_RTX);
3385 /* If the rhs is a function call and its value is not an aggregate,
3386 call the function before we start to compute the lhs.
3387 This is needed for correct code for cases such as
3388 val = setjmp (buf) on machines where reference to val
3389 requires loading up part of an address in a separate insn.
3391 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
3392 a promoted variable where the zero- or sign- extension needs to be done.
3393 Handling this in the normal way is safe because no computation is done
3394 before the call. */
3395 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3396 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3397 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
3399 rtx value;
3401 push_temp_slots ();
3402 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3403 if (to_rtx == 0)
3404 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3406 /* Handle calls that return values in multiple non-contiguous locations.
3407 The Irix 6 ABI has examples of this. */
3408 if (GET_CODE (to_rtx) == PARALLEL)
3409 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3410 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3411 else if (GET_MODE (to_rtx) == BLKmode)
3412 emit_block_move (to_rtx, value, expr_size (from),
3413 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3414 else
3416 #ifdef POINTERS_EXTEND_UNSIGNED
3417 if (TREE_CODE (TREE_TYPE (to)) == REFERENCE_TYPE
3418 || TREE_CODE (TREE_TYPE (to)) == POINTER_TYPE)
3419 value = convert_memory_address (GET_MODE (to_rtx), value);
3420 #endif
3421 emit_move_insn (to_rtx, value);
3423 preserve_temp_slots (to_rtx);
3424 free_temp_slots ();
3425 pop_temp_slots ();
3426 return want_value ? to_rtx : NULL_RTX;
3429 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3430 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3432 if (to_rtx == 0)
3434 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3435 if (GET_CODE (to_rtx) == MEM)
3436 MEM_ALIAS_SET (to_rtx) = get_alias_set (to);
3439 /* Don't move directly into a return register. */
3440 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
3442 rtx temp;
3444 push_temp_slots ();
3445 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3446 emit_move_insn (to_rtx, temp);
3447 preserve_temp_slots (to_rtx);
3448 free_temp_slots ();
3449 pop_temp_slots ();
3450 return want_value ? to_rtx : NULL_RTX;
3453 /* In case we are returning the contents of an object which overlaps
3454 the place the value is being stored, use a safe function when copying
3455 a value through a pointer into a structure value return block. */
3456 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3457 && current_function_returns_struct
3458 && !current_function_returns_pcc_struct)
3460 rtx from_rtx, size;
3462 push_temp_slots ();
3463 size = expr_size (from);
3464 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3465 EXPAND_MEMORY_USE_DONT);
3467 /* Copy the rights of the bitmap. */
3468 if (current_function_check_memory_usage)
3469 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3470 XEXP (to_rtx, 0), Pmode,
3471 XEXP (from_rtx, 0), Pmode,
3472 convert_to_mode (TYPE_MODE (sizetype),
3473 size, TREE_UNSIGNED (sizetype)),
3474 TYPE_MODE (sizetype));
3476 #ifdef TARGET_MEM_FUNCTIONS
3477 emit_library_call (memcpy_libfunc, 0,
3478 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3479 XEXP (from_rtx, 0), Pmode,
3480 convert_to_mode (TYPE_MODE (sizetype),
3481 size, TREE_UNSIGNED (sizetype)),
3482 TYPE_MODE (sizetype));
3483 #else
3484 emit_library_call (bcopy_libfunc, 0,
3485 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3486 XEXP (to_rtx, 0), Pmode,
3487 convert_to_mode (TYPE_MODE (integer_type_node),
3488 size, TREE_UNSIGNED (integer_type_node)),
3489 TYPE_MODE (integer_type_node));
3490 #endif
3492 preserve_temp_slots (to_rtx);
3493 free_temp_slots ();
3494 pop_temp_slots ();
3495 return want_value ? to_rtx : NULL_RTX;
3498 /* Compute FROM and store the value in the rtx we got. */
3500 push_temp_slots ();
3501 result = store_expr (from, to_rtx, want_value);
3502 preserve_temp_slots (result);
3503 free_temp_slots ();
3504 pop_temp_slots ();
3505 return want_value ? result : NULL_RTX;
3508 /* Generate code for computing expression EXP,
3509 and storing the value into TARGET.
3510 TARGET may contain a QUEUED rtx.
3512 If WANT_VALUE is nonzero, return a copy of the value
3513 not in TARGET, so that we can be sure to use the proper
3514 value in a containing expression even if TARGET has something
3515 else stored in it. If possible, we copy the value through a pseudo
3516 and return that pseudo. Or, if the value is constant, we try to
3517 return the constant. In some cases, we return a pseudo
3518 copied *from* TARGET.
3520 If the mode is BLKmode then we may return TARGET itself.
3521 It turns out that in BLKmode it doesn't cause a problem.
3522 because C has no operators that could combine two different
3523 assignments into the same BLKmode object with different values
3524 with no sequence point. Will other languages need this to
3525 be more thorough?
3527 If WANT_VALUE is 0, we return NULL, to make sure
3528 to catch quickly any cases where the caller uses the value
3529 and fails to set WANT_VALUE. */
3532 store_expr (exp, target, want_value)
3533 register tree exp;
3534 register rtx target;
3535 int want_value;
3537 register rtx temp;
3538 int dont_return_target = 0;
3540 if (TREE_CODE (exp) == COMPOUND_EXPR)
3542 /* Perform first part of compound expression, then assign from second
3543 part. */
3544 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3545 emit_queue ();
3546 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3548 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3550 /* For conditional expression, get safe form of the target. Then
3551 test the condition, doing the appropriate assignment on either
3552 side. This avoids the creation of unnecessary temporaries.
3553 For non-BLKmode, it is more efficient not to do this. */
3555 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3557 emit_queue ();
3558 target = protect_from_queue (target, 1);
3560 do_pending_stack_adjust ();
3561 NO_DEFER_POP;
3562 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3563 start_cleanup_deferral ();
3564 store_expr (TREE_OPERAND (exp, 1), target, 0);
3565 end_cleanup_deferral ();
3566 emit_queue ();
3567 emit_jump_insn (gen_jump (lab2));
3568 emit_barrier ();
3569 emit_label (lab1);
3570 start_cleanup_deferral ();
3571 store_expr (TREE_OPERAND (exp, 2), target, 0);
3572 end_cleanup_deferral ();
3573 emit_queue ();
3574 emit_label (lab2);
3575 OK_DEFER_POP;
3577 return want_value ? target : NULL_RTX;
3579 else if (queued_subexp_p (target))
3580 /* If target contains a postincrement, let's not risk
3581 using it as the place to generate the rhs. */
3583 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3585 /* Expand EXP into a new pseudo. */
3586 temp = gen_reg_rtx (GET_MODE (target));
3587 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3589 else
3590 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3592 /* If target is volatile, ANSI requires accessing the value
3593 *from* the target, if it is accessed. So make that happen.
3594 In no case return the target itself. */
3595 if (! MEM_VOLATILE_P (target) && want_value)
3596 dont_return_target = 1;
3598 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3599 && GET_MODE (target) != BLKmode)
3600 /* If target is in memory and caller wants value in a register instead,
3601 arrange that. Pass TARGET as target for expand_expr so that,
3602 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3603 We know expand_expr will not use the target in that case.
3604 Don't do this if TARGET is volatile because we are supposed
3605 to write it and then read it. */
3607 temp = expand_expr (exp, target, GET_MODE (target), 0);
3608 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3609 temp = copy_to_reg (temp);
3610 dont_return_target = 1;
3612 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3613 /* If this is an scalar in a register that is stored in a wider mode
3614 than the declared mode, compute the result into its declared mode
3615 and then convert to the wider mode. Our value is the computed
3616 expression. */
3618 /* If we don't want a value, we can do the conversion inside EXP,
3619 which will often result in some optimizations. Do the conversion
3620 in two steps: first change the signedness, if needed, then
3621 the extend. But don't do this if the type of EXP is a subtype
3622 of something else since then the conversion might involve
3623 more than just converting modes. */
3624 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3625 && TREE_TYPE (TREE_TYPE (exp)) == 0)
3627 if (TREE_UNSIGNED (TREE_TYPE (exp))
3628 != SUBREG_PROMOTED_UNSIGNED_P (target))
3630 = convert
3631 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3632 TREE_TYPE (exp)),
3633 exp);
3635 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3636 SUBREG_PROMOTED_UNSIGNED_P (target)),
3637 exp);
3640 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3642 /* If TEMP is a volatile MEM and we want a result value, make
3643 the access now so it gets done only once. Likewise if
3644 it contains TARGET. */
3645 if (GET_CODE (temp) == MEM && want_value
3646 && (MEM_VOLATILE_P (temp)
3647 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
3648 temp = copy_to_reg (temp);
3650 /* If TEMP is a VOIDmode constant, use convert_modes to make
3651 sure that we properly convert it. */
3652 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3653 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3654 TYPE_MODE (TREE_TYPE (exp)), temp,
3655 SUBREG_PROMOTED_UNSIGNED_P (target));
3657 convert_move (SUBREG_REG (target), temp,
3658 SUBREG_PROMOTED_UNSIGNED_P (target));
3660 /* If we promoted a constant, change the mode back down to match
3661 target. Otherwise, the caller might get confused by a result whose
3662 mode is larger than expected. */
3664 if (want_value && GET_MODE (temp) != GET_MODE (target)
3665 && GET_MODE (temp) != VOIDmode)
3667 temp = gen_rtx_SUBREG (GET_MODE (target), temp, 0);
3668 SUBREG_PROMOTED_VAR_P (temp) = 1;
3669 SUBREG_PROMOTED_UNSIGNED_P (temp)
3670 = SUBREG_PROMOTED_UNSIGNED_P (target);
3673 return want_value ? temp : NULL_RTX;
3675 else
3677 temp = expand_expr (exp, target, GET_MODE (target), 0);
3678 /* Return TARGET if it's a specified hardware register.
3679 If TARGET is a volatile mem ref, either return TARGET
3680 or return a reg copied *from* TARGET; ANSI requires this.
3682 Otherwise, if TEMP is not TARGET, return TEMP
3683 if it is constant (for efficiency),
3684 or if we really want the correct value. */
3685 if (!(target && GET_CODE (target) == REG
3686 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3687 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3688 && ! rtx_equal_p (temp, target)
3689 && (CONSTANT_P (temp) || want_value))
3690 dont_return_target = 1;
3693 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3694 the same as that of TARGET, adjust the constant. This is needed, for
3695 example, in case it is a CONST_DOUBLE and we want only a word-sized
3696 value. */
3697 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3698 && TREE_CODE (exp) != ERROR_MARK
3699 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3700 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3701 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3703 if (current_function_check_memory_usage
3704 && GET_CODE (target) == MEM
3705 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
3707 if (GET_CODE (temp) == MEM)
3708 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3709 XEXP (target, 0), Pmode,
3710 XEXP (temp, 0), Pmode,
3711 expr_size (exp), TYPE_MODE (sizetype));
3712 else
3713 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3714 XEXP (target, 0), Pmode,
3715 expr_size (exp), TYPE_MODE (sizetype),
3716 GEN_INT (MEMORY_USE_WO),
3717 TYPE_MODE (integer_type_node));
3720 /* If value was not generated in the target, store it there.
3721 Convert the value to TARGET's type first if nec. */
3722 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
3723 one or both of them are volatile memory refs, we have to distinguish
3724 two cases:
3725 - expand_expr has used TARGET. In this case, we must not generate
3726 another copy. This can be detected by TARGET being equal according
3727 to == .
3728 - expand_expr has not used TARGET - that means that the source just
3729 happens to have the same RTX form. Since temp will have been created
3730 by expand_expr, it will compare unequal according to == .
3731 We must generate a copy in this case, to reach the correct number
3732 of volatile memory references. */
3734 if ((! rtx_equal_p (temp, target)
3735 || (temp != target && (side_effects_p (temp)
3736 || side_effects_p (target))))
3737 && TREE_CODE (exp) != ERROR_MARK)
3739 target = protect_from_queue (target, 1);
3740 if (GET_MODE (temp) != GET_MODE (target)
3741 && GET_MODE (temp) != VOIDmode)
3743 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3744 if (dont_return_target)
3746 /* In this case, we will return TEMP,
3747 so make sure it has the proper mode.
3748 But don't forget to store the value into TARGET. */
3749 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3750 emit_move_insn (target, temp);
3752 else
3753 convert_move (target, temp, unsignedp);
3756 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3758 /* Handle copying a string constant into an array.
3759 The string constant may be shorter than the array.
3760 So copy just the string's actual length, and clear the rest. */
3761 rtx size;
3762 rtx addr;
3764 /* Get the size of the data type of the string,
3765 which is actually the size of the target. */
3766 size = expr_size (exp);
3767 if (GET_CODE (size) == CONST_INT
3768 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3769 emit_block_move (target, temp, size,
3770 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3771 else
3773 /* Compute the size of the data to copy from the string. */
3774 tree copy_size
3775 = size_binop (MIN_EXPR,
3776 make_tree (sizetype, size),
3777 convert (sizetype,
3778 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
3779 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3780 VOIDmode, 0);
3781 rtx label = 0;
3783 /* Copy that much. */
3784 emit_block_move (target, temp, copy_size_rtx,
3785 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3787 /* Figure out how much is left in TARGET that we have to clear.
3788 Do all calculations in ptr_mode. */
3790 addr = XEXP (target, 0);
3791 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3793 if (GET_CODE (copy_size_rtx) == CONST_INT)
3795 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
3796 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
3798 else
3800 addr = force_reg (ptr_mode, addr);
3801 addr = expand_binop (ptr_mode, add_optab, addr,
3802 copy_size_rtx, NULL_RTX, 0,
3803 OPTAB_LIB_WIDEN);
3805 size = expand_binop (ptr_mode, sub_optab, size,
3806 copy_size_rtx, NULL_RTX, 0,
3807 OPTAB_LIB_WIDEN);
3809 label = gen_label_rtx ();
3810 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
3811 GET_MODE (size), 0, 0, label);
3814 if (size != const0_rtx)
3816 /* Be sure we can write on ADDR. */
3817 if (current_function_check_memory_usage)
3818 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3819 addr, Pmode,
3820 size, TYPE_MODE (sizetype),
3821 GEN_INT (MEMORY_USE_WO),
3822 TYPE_MODE (integer_type_node));
3823 #ifdef TARGET_MEM_FUNCTIONS
3824 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
3825 addr, ptr_mode,
3826 const0_rtx, TYPE_MODE (integer_type_node),
3827 convert_to_mode (TYPE_MODE (sizetype),
3828 size,
3829 TREE_UNSIGNED (sizetype)),
3830 TYPE_MODE (sizetype));
3831 #else
3832 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
3833 addr, ptr_mode,
3834 convert_to_mode (TYPE_MODE (integer_type_node),
3835 size,
3836 TREE_UNSIGNED (integer_type_node)),
3837 TYPE_MODE (integer_type_node));
3838 #endif
3841 if (label)
3842 emit_label (label);
3845 /* Handle calls that return values in multiple non-contiguous locations.
3846 The Irix 6 ABI has examples of this. */
3847 else if (GET_CODE (target) == PARALLEL)
3848 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
3849 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3850 else if (GET_MODE (temp) == BLKmode)
3851 emit_block_move (target, temp, expr_size (exp),
3852 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3853 else
3854 emit_move_insn (target, temp);
3857 /* If we don't want a value, return NULL_RTX. */
3858 if (! want_value)
3859 return NULL_RTX;
3861 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3862 ??? The latter test doesn't seem to make sense. */
3863 else if (dont_return_target && GET_CODE (temp) != MEM)
3864 return temp;
3866 /* Return TARGET itself if it is a hard register. */
3867 else if (want_value && GET_MODE (target) != BLKmode
3868 && ! (GET_CODE (target) == REG
3869 && REGNO (target) < FIRST_PSEUDO_REGISTER))
3870 return copy_to_reg (target);
3872 else
3873 return target;
3876 /* Return 1 if EXP just contains zeros. */
3878 static int
3879 is_zeros_p (exp)
3880 tree exp;
3882 tree elt;
3884 switch (TREE_CODE (exp))
3886 case CONVERT_EXPR:
3887 case NOP_EXPR:
3888 case NON_LVALUE_EXPR:
3889 return is_zeros_p (TREE_OPERAND (exp, 0));
3891 case INTEGER_CST:
3892 return TREE_INT_CST_LOW (exp) == 0 && TREE_INT_CST_HIGH (exp) == 0;
3894 case COMPLEX_CST:
3895 return
3896 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
3898 case REAL_CST:
3899 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
3901 case CONSTRUCTOR:
3902 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3903 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
3904 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3905 if (! is_zeros_p (TREE_VALUE (elt)))
3906 return 0;
3908 return 1;
3910 default:
3911 return 0;
3915 /* Return 1 if EXP contains mostly (3/4) zeros. */
3917 static int
3918 mostly_zeros_p (exp)
3919 tree exp;
3921 if (TREE_CODE (exp) == CONSTRUCTOR)
3923 int elts = 0, zeros = 0;
3924 tree elt = CONSTRUCTOR_ELTS (exp);
3925 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3927 /* If there are no ranges of true bits, it is all zero. */
3928 return elt == NULL_TREE;
3930 for (; elt; elt = TREE_CHAIN (elt))
3932 /* We do not handle the case where the index is a RANGE_EXPR,
3933 so the statistic will be somewhat inaccurate.
3934 We do make a more accurate count in store_constructor itself,
3935 so since this function is only used for nested array elements,
3936 this should be close enough. */
3937 if (mostly_zeros_p (TREE_VALUE (elt)))
3938 zeros++;
3939 elts++;
3942 return 4 * zeros >= 3 * elts;
3945 return is_zeros_p (exp);
3948 /* Helper function for store_constructor.
3949 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
3950 TYPE is the type of the CONSTRUCTOR, not the element type.
3951 ALIGN and CLEARED are as for store_constructor.
3953 This provides a recursive shortcut back to store_constructor when it isn't
3954 necessary to go through store_field. This is so that we can pass through
3955 the cleared field to let store_constructor know that we may not have to
3956 clear a substructure if the outer structure has already been cleared. */
3958 static void
3959 store_constructor_field (target, bitsize, bitpos,
3960 mode, exp, type, align, cleared)
3961 rtx target;
3962 int bitsize, bitpos;
3963 enum machine_mode mode;
3964 tree exp, type;
3965 int align;
3966 int cleared;
3968 if (TREE_CODE (exp) == CONSTRUCTOR
3969 && bitpos % BITS_PER_UNIT == 0
3970 /* If we have a non-zero bitpos for a register target, then we just
3971 let store_field do the bitfield handling. This is unlikely to
3972 generate unnecessary clear instructions anyways. */
3973 && (bitpos == 0 || GET_CODE (target) == MEM))
3975 if (bitpos != 0)
3976 target = change_address (target, VOIDmode,
3977 plus_constant (XEXP (target, 0),
3978 bitpos / BITS_PER_UNIT));
3979 store_constructor (exp, target, align, cleared);
3981 else
3982 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0,
3983 (align + BITS_PER_UNIT - 1) / BITS_PER_UNIT,
3984 int_size_in_bytes (type), cleared);
3987 /* Store the value of constructor EXP into the rtx TARGET.
3988 TARGET is either a REG or a MEM.
3989 ALIGN is the maximum known alignment for TARGET, in bits.
3990 CLEARED is true if TARGET is known to have been zero'd. */
3992 static void
3993 store_constructor (exp, target, align, cleared)
3994 tree exp;
3995 rtx target;
3996 int align;
3997 int cleared;
3999 tree type = TREE_TYPE (exp);
4000 #ifdef WORD_REGISTER_OPERATIONS
4001 rtx exp_size = expr_size (exp);
4002 #endif
4004 /* We know our target cannot conflict, since safe_from_p has been called. */
4005 #if 0
4006 /* Don't try copying piece by piece into a hard register
4007 since that is vulnerable to being clobbered by EXP.
4008 Instead, construct in a pseudo register and then copy it all. */
4009 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
4011 rtx temp = gen_reg_rtx (GET_MODE (target));
4012 store_constructor (exp, temp, 0);
4013 emit_move_insn (target, temp);
4014 return;
4016 #endif
4018 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4019 || TREE_CODE (type) == QUAL_UNION_TYPE)
4021 register tree elt;
4023 /* Inform later passes that the whole union value is dead. */
4024 if (TREE_CODE (type) == UNION_TYPE
4025 || TREE_CODE (type) == QUAL_UNION_TYPE)
4026 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4028 /* If we are building a static constructor into a register,
4029 set the initial value as zero so we can fold the value into
4030 a constant. But if more than one register is involved,
4031 this probably loses. */
4032 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4033 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4035 if (! cleared)
4036 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4038 cleared = 1;
4041 /* If the constructor has fewer fields than the structure
4042 or if we are initializing the structure to mostly zeros,
4043 clear the whole structure first. */
4044 else if ((list_length (CONSTRUCTOR_ELTS (exp))
4045 != list_length (TYPE_FIELDS (type)))
4046 || mostly_zeros_p (exp))
4048 if (! cleared)
4049 clear_storage (target, expr_size (exp),
4050 (align + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
4052 cleared = 1;
4054 else
4055 /* Inform later passes that the old value is dead. */
4056 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4058 /* Store each element of the constructor into
4059 the corresponding field of TARGET. */
4061 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4063 register tree field = TREE_PURPOSE (elt);
4064 #ifdef WORD_REGISTER_OPERATIONS
4065 tree value = TREE_VALUE (elt);
4066 #endif
4067 register enum machine_mode mode;
4068 int bitsize;
4069 int bitpos = 0;
4070 int unsignedp;
4071 tree pos, constant = 0, offset = 0;
4072 rtx to_rtx = target;
4074 /* Just ignore missing fields.
4075 We cleared the whole structure, above,
4076 if any fields are missing. */
4077 if (field == 0)
4078 continue;
4080 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4081 continue;
4083 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
4084 unsignedp = TREE_UNSIGNED (field);
4085 mode = DECL_MODE (field);
4086 if (DECL_BIT_FIELD (field))
4087 mode = VOIDmode;
4089 pos = DECL_FIELD_BITPOS (field);
4090 if (TREE_CODE (pos) == INTEGER_CST)
4091 constant = pos;
4092 else if (TREE_CODE (pos) == PLUS_EXPR
4093 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4094 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
4095 else
4096 offset = pos;
4098 if (constant)
4099 bitpos = TREE_INT_CST_LOW (constant);
4101 if (offset)
4103 rtx offset_rtx;
4105 if (contains_placeholder_p (offset))
4106 offset = build (WITH_RECORD_EXPR, sizetype,
4107 offset, make_tree (TREE_TYPE (exp), target));
4109 offset = size_binop (FLOOR_DIV_EXPR, offset,
4110 size_int (BITS_PER_UNIT));
4112 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4113 if (GET_CODE (to_rtx) != MEM)
4114 abort ();
4116 if (GET_MODE (offset_rtx) != ptr_mode)
4118 #ifdef POINTERS_EXTEND_UNSIGNED
4119 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
4120 #else
4121 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4122 #endif
4125 to_rtx
4126 = change_address (to_rtx, VOIDmode,
4127 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
4128 force_reg (ptr_mode,
4129 offset_rtx)));
4132 if (TREE_READONLY (field))
4134 if (GET_CODE (to_rtx) == MEM)
4135 to_rtx = copy_rtx (to_rtx);
4137 RTX_UNCHANGING_P (to_rtx) = 1;
4140 #ifdef WORD_REGISTER_OPERATIONS
4141 /* If this initializes a field that is smaller than a word, at the
4142 start of a word, try to widen it to a full word.
4143 This special case allows us to output C++ member function
4144 initializations in a form that the optimizers can understand. */
4145 if (constant
4146 && GET_CODE (target) == REG
4147 && bitsize < BITS_PER_WORD
4148 && bitpos % BITS_PER_WORD == 0
4149 && GET_MODE_CLASS (mode) == MODE_INT
4150 && TREE_CODE (value) == INTEGER_CST
4151 && GET_CODE (exp_size) == CONST_INT
4152 && bitpos + BITS_PER_WORD <= INTVAL (exp_size) * BITS_PER_UNIT)
4154 tree type = TREE_TYPE (value);
4155 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4157 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4158 value = convert (type, value);
4160 if (BYTES_BIG_ENDIAN)
4161 value
4162 = fold (build (LSHIFT_EXPR, type, value,
4163 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4164 bitsize = BITS_PER_WORD;
4165 mode = word_mode;
4167 #endif
4168 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4169 TREE_VALUE (elt), type,
4170 MIN (align,
4171 DECL_ALIGN (TREE_PURPOSE (elt))),
4172 cleared);
4175 else if (TREE_CODE (type) == ARRAY_TYPE)
4177 register tree elt;
4178 register int i;
4179 int need_to_clear;
4180 tree domain = TYPE_DOMAIN (type);
4181 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
4182 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
4183 tree elttype = TREE_TYPE (type);
4185 /* If the constructor has fewer elements than the array,
4186 clear the whole array first. Similarly if this is
4187 static constructor of a non-BLKmode object. */
4188 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4189 need_to_clear = 1;
4190 else
4192 HOST_WIDE_INT count = 0, zero_count = 0;
4193 need_to_clear = 0;
4194 /* This loop is a more accurate version of the loop in
4195 mostly_zeros_p (it handles RANGE_EXPR in an index).
4196 It is also needed to check for missing elements. */
4197 for (elt = CONSTRUCTOR_ELTS (exp);
4198 elt != NULL_TREE;
4199 elt = TREE_CHAIN (elt))
4201 tree index = TREE_PURPOSE (elt);
4202 HOST_WIDE_INT this_node_count;
4203 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4205 tree lo_index = TREE_OPERAND (index, 0);
4206 tree hi_index = TREE_OPERAND (index, 1);
4207 if (TREE_CODE (lo_index) != INTEGER_CST
4208 || TREE_CODE (hi_index) != INTEGER_CST)
4210 need_to_clear = 1;
4211 break;
4213 this_node_count = TREE_INT_CST_LOW (hi_index)
4214 - TREE_INT_CST_LOW (lo_index) + 1;
4216 else
4217 this_node_count = 1;
4218 count += this_node_count;
4219 if (mostly_zeros_p (TREE_VALUE (elt)))
4220 zero_count += this_node_count;
4222 /* Clear the entire array first if there are any missing elements,
4223 or if the incidence of zero elements is >= 75%. */
4224 if (count < maxelt - minelt + 1
4225 || 4 * zero_count >= 3 * count)
4226 need_to_clear = 1;
4228 if (need_to_clear)
4230 if (! cleared)
4231 clear_storage (target, expr_size (exp),
4232 (align + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
4233 cleared = 1;
4235 else
4236 /* Inform later passes that the old value is dead. */
4237 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4239 /* Store each element of the constructor into
4240 the corresponding element of TARGET, determined
4241 by counting the elements. */
4242 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4243 elt;
4244 elt = TREE_CHAIN (elt), i++)
4246 register enum machine_mode mode;
4247 int bitsize;
4248 int bitpos;
4249 int unsignedp;
4250 tree value = TREE_VALUE (elt);
4251 int align = TYPE_ALIGN (TREE_TYPE (value));
4252 tree index = TREE_PURPOSE (elt);
4253 rtx xtarget = target;
4255 if (cleared && is_zeros_p (value))
4256 continue;
4258 mode = TYPE_MODE (elttype);
4259 bitsize = GET_MODE_BITSIZE (mode);
4260 unsignedp = TREE_UNSIGNED (elttype);
4262 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4264 tree lo_index = TREE_OPERAND (index, 0);
4265 tree hi_index = TREE_OPERAND (index, 1);
4266 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4267 struct nesting *loop;
4268 HOST_WIDE_INT lo, hi, count;
4269 tree position;
4271 /* If the range is constant and "small", unroll the loop. */
4272 if (TREE_CODE (lo_index) == INTEGER_CST
4273 && TREE_CODE (hi_index) == INTEGER_CST
4274 && (lo = TREE_INT_CST_LOW (lo_index),
4275 hi = TREE_INT_CST_LOW (hi_index),
4276 count = hi - lo + 1,
4277 (GET_CODE (target) != MEM
4278 || count <= 2
4279 || (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
4280 && TREE_INT_CST_LOW (TYPE_SIZE (elttype)) * count
4281 <= 40 * 8))))
4283 lo -= minelt; hi -= minelt;
4284 for (; lo <= hi; lo++)
4286 bitpos = lo * TREE_INT_CST_LOW (TYPE_SIZE (elttype));
4287 store_constructor_field (target, bitsize, bitpos, mode,
4288 value, type, align, cleared);
4291 else
4293 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4294 loop_top = gen_label_rtx ();
4295 loop_end = gen_label_rtx ();
4297 unsignedp = TREE_UNSIGNED (domain);
4299 index = build_decl (VAR_DECL, NULL_TREE, domain);
4301 DECL_RTL (index) = index_r
4302 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4303 &unsignedp, 0));
4305 if (TREE_CODE (value) == SAVE_EXPR
4306 && SAVE_EXPR_RTL (value) == 0)
4308 /* Make sure value gets expanded once before the
4309 loop. */
4310 expand_expr (value, const0_rtx, VOIDmode, 0);
4311 emit_queue ();
4313 store_expr (lo_index, index_r, 0);
4314 loop = expand_start_loop (0);
4316 /* Assign value to element index. */
4317 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
4318 size_int (BITS_PER_UNIT));
4319 position = size_binop (MULT_EXPR,
4320 size_binop (MINUS_EXPR, index,
4321 TYPE_MIN_VALUE (domain)),
4322 position);
4323 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4324 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4325 xtarget = change_address (target, mode, addr);
4326 if (TREE_CODE (value) == CONSTRUCTOR)
4327 store_constructor (value, xtarget, align, cleared);
4328 else
4329 store_expr (value, xtarget, 0);
4331 expand_exit_loop_if_false (loop,
4332 build (LT_EXPR, integer_type_node,
4333 index, hi_index));
4335 expand_increment (build (PREINCREMENT_EXPR,
4336 TREE_TYPE (index),
4337 index, integer_one_node), 0, 0);
4338 expand_end_loop ();
4339 emit_label (loop_end);
4341 /* Needed by stupid register allocation. to extend the
4342 lifetime of pseudo-regs used by target past the end
4343 of the loop. */
4344 emit_insn (gen_rtx_USE (GET_MODE (target), target));
4347 else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
4348 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
4350 rtx pos_rtx, addr;
4351 tree position;
4353 if (index == 0)
4354 index = size_int (i);
4356 if (minelt)
4357 index = size_binop (MINUS_EXPR, index,
4358 TYPE_MIN_VALUE (domain));
4359 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
4360 size_int (BITS_PER_UNIT));
4361 position = size_binop (MULT_EXPR, index, position);
4362 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4363 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4364 xtarget = change_address (target, mode, addr);
4365 store_expr (value, xtarget, 0);
4367 else
4369 if (index != 0)
4370 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
4371 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
4372 else
4373 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
4374 store_constructor_field (target, bitsize, bitpos, mode, value,
4375 type, align, cleared);
4379 /* set constructor assignments */
4380 else if (TREE_CODE (type) == SET_TYPE)
4382 tree elt = CONSTRUCTOR_ELTS (exp);
4383 int nbytes = int_size_in_bytes (type), nbits;
4384 tree domain = TYPE_DOMAIN (type);
4385 tree domain_min, domain_max, bitlength;
4387 /* The default implementation strategy is to extract the constant
4388 parts of the constructor, use that to initialize the target,
4389 and then "or" in whatever non-constant ranges we need in addition.
4391 If a large set is all zero or all ones, it is
4392 probably better to set it using memset (if available) or bzero.
4393 Also, if a large set has just a single range, it may also be
4394 better to first clear all the first clear the set (using
4395 bzero/memset), and set the bits we want. */
4397 /* Check for all zeros. */
4398 if (elt == NULL_TREE)
4400 if (!cleared)
4401 clear_storage (target, expr_size (exp),
4402 TYPE_ALIGN (type) / BITS_PER_UNIT);
4403 return;
4406 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4407 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4408 bitlength = size_binop (PLUS_EXPR,
4409 size_binop (MINUS_EXPR, domain_max, domain_min),
4410 size_one_node);
4412 if (nbytes < 0 || TREE_CODE (bitlength) != INTEGER_CST)
4413 abort ();
4414 nbits = TREE_INT_CST_LOW (bitlength);
4416 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4417 are "complicated" (more than one range), initialize (the
4418 constant parts) by copying from a constant. */
4419 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4420 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4422 int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4423 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4424 char *bit_buffer = (char *) alloca (nbits);
4425 HOST_WIDE_INT word = 0;
4426 int bit_pos = 0;
4427 int ibit = 0;
4428 int offset = 0; /* In bytes from beginning of set. */
4429 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4430 for (;;)
4432 if (bit_buffer[ibit])
4434 if (BYTES_BIG_ENDIAN)
4435 word |= (1 << (set_word_size - 1 - bit_pos));
4436 else
4437 word |= 1 << bit_pos;
4439 bit_pos++; ibit++;
4440 if (bit_pos >= set_word_size || ibit == nbits)
4442 if (word != 0 || ! cleared)
4444 rtx datum = GEN_INT (word);
4445 rtx to_rtx;
4446 /* The assumption here is that it is safe to use
4447 XEXP if the set is multi-word, but not if
4448 it's single-word. */
4449 if (GET_CODE (target) == MEM)
4451 to_rtx = plus_constant (XEXP (target, 0), offset);
4452 to_rtx = change_address (target, mode, to_rtx);
4454 else if (offset == 0)
4455 to_rtx = target;
4456 else
4457 abort ();
4458 emit_move_insn (to_rtx, datum);
4460 if (ibit == nbits)
4461 break;
4462 word = 0;
4463 bit_pos = 0;
4464 offset += set_word_size / BITS_PER_UNIT;
4468 else if (!cleared)
4470 /* Don't bother clearing storage if the set is all ones. */
4471 if (TREE_CHAIN (elt) != NULL_TREE
4472 || (TREE_PURPOSE (elt) == NULL_TREE
4473 ? nbits != 1
4474 : (TREE_CODE (TREE_VALUE (elt)) != INTEGER_CST
4475 || TREE_CODE (TREE_PURPOSE (elt)) != INTEGER_CST
4476 || (TREE_INT_CST_LOW (TREE_VALUE (elt))
4477 - TREE_INT_CST_LOW (TREE_PURPOSE (elt)) + 1
4478 != nbits))))
4479 clear_storage (target, expr_size (exp),
4480 TYPE_ALIGN (type) / BITS_PER_UNIT);
4483 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4485 /* start of range of element or NULL */
4486 tree startbit = TREE_PURPOSE (elt);
4487 /* end of range of element, or element value */
4488 tree endbit = TREE_VALUE (elt);
4489 #ifdef TARGET_MEM_FUNCTIONS
4490 HOST_WIDE_INT startb, endb;
4491 #endif
4492 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4494 bitlength_rtx = expand_expr (bitlength,
4495 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4497 /* handle non-range tuple element like [ expr ] */
4498 if (startbit == NULL_TREE)
4500 startbit = save_expr (endbit);
4501 endbit = startbit;
4503 startbit = convert (sizetype, startbit);
4504 endbit = convert (sizetype, endbit);
4505 if (! integer_zerop (domain_min))
4507 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4508 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4510 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4511 EXPAND_CONST_ADDRESS);
4512 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4513 EXPAND_CONST_ADDRESS);
4515 if (REG_P (target))
4517 targetx = assign_stack_temp (GET_MODE (target),
4518 GET_MODE_SIZE (GET_MODE (target)),
4520 emit_move_insn (targetx, target);
4522 else if (GET_CODE (target) == MEM)
4523 targetx = target;
4524 else
4525 abort ();
4527 #ifdef TARGET_MEM_FUNCTIONS
4528 /* Optimization: If startbit and endbit are
4529 constants divisible by BITS_PER_UNIT,
4530 call memset instead. */
4531 if (TREE_CODE (startbit) == INTEGER_CST
4532 && TREE_CODE (endbit) == INTEGER_CST
4533 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4534 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4536 emit_library_call (memset_libfunc, 0,
4537 VOIDmode, 3,
4538 plus_constant (XEXP (targetx, 0),
4539 startb / BITS_PER_UNIT),
4540 Pmode,
4541 constm1_rtx, TYPE_MODE (integer_type_node),
4542 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4543 TYPE_MODE (sizetype));
4545 else
4546 #endif
4548 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4549 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
4550 bitlength_rtx, TYPE_MODE (sizetype),
4551 startbit_rtx, TYPE_MODE (sizetype),
4552 endbit_rtx, TYPE_MODE (sizetype));
4554 if (REG_P (target))
4555 emit_move_insn (target, targetx);
4559 else
4560 abort ();
4563 /* Store the value of EXP (an expression tree)
4564 into a subfield of TARGET which has mode MODE and occupies
4565 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4566 If MODE is VOIDmode, it means that we are storing into a bit-field.
4568 If VALUE_MODE is VOIDmode, return nothing in particular.
4569 UNSIGNEDP is not used in this case.
4571 Otherwise, return an rtx for the value stored. This rtx
4572 has mode VALUE_MODE if that is convenient to do.
4573 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4575 ALIGN is the alignment that TARGET is known to have, measured in bytes.
4576 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
4578 ALIAS_SET is the alias set for the destination. This value will
4579 (in general) be different from that for TARGET, since TARGET is a
4580 reference to the containing structure. */
4582 static rtx
4583 store_field (target, bitsize, bitpos, mode, exp, value_mode,
4584 unsignedp, align, total_size, alias_set)
4585 rtx target;
4586 int bitsize, bitpos;
4587 enum machine_mode mode;
4588 tree exp;
4589 enum machine_mode value_mode;
4590 int unsignedp;
4591 int align;
4592 int total_size;
4593 int alias_set;
4595 HOST_WIDE_INT width_mask = 0;
4597 if (TREE_CODE (exp) == ERROR_MARK)
4598 return const0_rtx;
4600 if (bitsize < HOST_BITS_PER_WIDE_INT)
4601 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
4603 /* If we are storing into an unaligned field of an aligned union that is
4604 in a register, we may have the mode of TARGET being an integer mode but
4605 MODE == BLKmode. In that case, get an aligned object whose size and
4606 alignment are the same as TARGET and store TARGET into it (we can avoid
4607 the store if the field being stored is the entire width of TARGET). Then
4608 call ourselves recursively to store the field into a BLKmode version of
4609 that object. Finally, load from the object into TARGET. This is not
4610 very efficient in general, but should only be slightly more expensive
4611 than the otherwise-required unaligned accesses. Perhaps this can be
4612 cleaned up later. */
4614 if (mode == BLKmode
4615 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4617 rtx object = assign_stack_temp (GET_MODE (target),
4618 GET_MODE_SIZE (GET_MODE (target)), 0);
4619 rtx blk_object = copy_rtx (object);
4621 MEM_SET_IN_STRUCT_P (object, 1);
4622 MEM_SET_IN_STRUCT_P (blk_object, 1);
4623 PUT_MODE (blk_object, BLKmode);
4625 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4626 emit_move_insn (object, target);
4628 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4629 align, total_size, alias_set);
4631 /* Even though we aren't returning target, we need to
4632 give it the updated value. */
4633 emit_move_insn (target, object);
4635 return blk_object;
4638 /* If the structure is in a register or if the component
4639 is a bit field, we cannot use addressing to access it.
4640 Use bit-field techniques or SUBREG to store in it. */
4642 if (mode == VOIDmode
4643 || (mode != BLKmode && ! direct_store[(int) mode]
4644 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
4645 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
4646 || GET_CODE (target) == REG
4647 || GET_CODE (target) == SUBREG
4648 /* If the field isn't aligned enough to store as an ordinary memref,
4649 store it as a bit field. */
4650 || (SLOW_UNALIGNED_ACCESS
4651 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
4652 || (SLOW_UNALIGNED_ACCESS && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
4654 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4656 /* If BITSIZE is narrower than the size of the type of EXP
4657 we will be narrowing TEMP. Normally, what's wanted are the
4658 low-order bits. However, if EXP's type is a record and this is
4659 big-endian machine, we want the upper BITSIZE bits. */
4660 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4661 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4662 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4663 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4664 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4665 - bitsize),
4666 temp, 1);
4668 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4669 MODE. */
4670 if (mode != VOIDmode && mode != BLKmode
4671 && mode != TYPE_MODE (TREE_TYPE (exp)))
4672 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4674 /* If the modes of TARGET and TEMP are both BLKmode, both
4675 must be in memory and BITPOS must be aligned on a byte
4676 boundary. If so, we simply do a block copy. */
4677 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4679 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4680 || bitpos % BITS_PER_UNIT != 0)
4681 abort ();
4683 target = change_address (target, VOIDmode,
4684 plus_constant (XEXP (target, 0),
4685 bitpos / BITS_PER_UNIT));
4687 emit_block_move (target, temp,
4688 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4689 / BITS_PER_UNIT),
4692 return value_mode == VOIDmode ? const0_rtx : target;
4695 /* Store the value in the bitfield. */
4696 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4697 if (value_mode != VOIDmode)
4699 /* The caller wants an rtx for the value. */
4700 /* If possible, avoid refetching from the bitfield itself. */
4701 if (width_mask != 0
4702 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
4704 tree count;
4705 enum machine_mode tmode;
4707 if (unsignedp)
4708 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4709 tmode = GET_MODE (temp);
4710 if (tmode == VOIDmode)
4711 tmode = value_mode;
4712 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4713 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4714 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4716 return extract_bit_field (target, bitsize, bitpos, unsignedp,
4717 NULL_RTX, value_mode, 0, align,
4718 total_size);
4720 return const0_rtx;
4722 else
4724 rtx addr = XEXP (target, 0);
4725 rtx to_rtx;
4727 /* If a value is wanted, it must be the lhs;
4728 so make the address stable for multiple use. */
4730 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4731 && ! CONSTANT_ADDRESS_P (addr)
4732 /* A frame-pointer reference is already stable. */
4733 && ! (GET_CODE (addr) == PLUS
4734 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4735 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4736 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4737 addr = copy_to_reg (addr);
4739 /* Now build a reference to just the desired component. */
4741 to_rtx = copy_rtx (change_address (target, mode,
4742 plus_constant (addr,
4743 (bitpos
4744 / BITS_PER_UNIT))));
4745 MEM_SET_IN_STRUCT_P (to_rtx, 1);
4746 MEM_ALIAS_SET (to_rtx) = alias_set;
4748 return store_expr (exp, to_rtx, value_mode != VOIDmode);
4752 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4753 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
4754 ARRAY_REFs and find the ultimate containing object, which we return.
4756 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4757 bit position, and *PUNSIGNEDP to the signedness of the field.
4758 If the position of the field is variable, we store a tree
4759 giving the variable offset (in units) in *POFFSET.
4760 This offset is in addition to the bit position.
4761 If the position is not variable, we store 0 in *POFFSET.
4762 We set *PALIGNMENT to the alignment in bytes of the address that will be
4763 computed. This is the alignment of the thing we return if *POFFSET
4764 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
4766 If any of the extraction expressions is volatile,
4767 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4769 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4770 is a mode that can be used to access the field. In that case, *PBITSIZE
4771 is redundant.
4773 If the field describes a variable-sized object, *PMODE is set to
4774 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
4775 this case, but the address of the object can be found. */
4777 tree
4778 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
4779 punsignedp, pvolatilep, palignment)
4780 tree exp;
4781 int *pbitsize;
4782 int *pbitpos;
4783 tree *poffset;
4784 enum machine_mode *pmode;
4785 int *punsignedp;
4786 int *pvolatilep;
4787 int *palignment;
4789 tree orig_exp = exp;
4790 tree size_tree = 0;
4791 enum machine_mode mode = VOIDmode;
4792 tree offset = integer_zero_node;
4793 unsigned int alignment = BIGGEST_ALIGNMENT;
4795 if (TREE_CODE (exp) == COMPONENT_REF)
4797 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4798 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4799 mode = DECL_MODE (TREE_OPERAND (exp, 1));
4800 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
4802 else if (TREE_CODE (exp) == BIT_FIELD_REF)
4804 size_tree = TREE_OPERAND (exp, 1);
4805 *punsignedp = TREE_UNSIGNED (exp);
4807 else
4809 mode = TYPE_MODE (TREE_TYPE (exp));
4810 if (mode == BLKmode)
4811 size_tree = TYPE_SIZE (TREE_TYPE (exp));
4813 *pbitsize = GET_MODE_BITSIZE (mode);
4814 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4817 if (size_tree)
4819 if (TREE_CODE (size_tree) != INTEGER_CST)
4820 mode = BLKmode, *pbitsize = -1;
4821 else
4822 *pbitsize = TREE_INT_CST_LOW (size_tree);
4825 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4826 and find the ultimate containing object. */
4828 *pbitpos = 0;
4830 while (1)
4832 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
4834 tree pos = (TREE_CODE (exp) == COMPONENT_REF
4835 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
4836 : TREE_OPERAND (exp, 2));
4837 tree constant = integer_zero_node, var = pos;
4839 /* If this field hasn't been filled in yet, don't go
4840 past it. This should only happen when folding expressions
4841 made during type construction. */
4842 if (pos == 0)
4843 break;
4845 /* Assume here that the offset is a multiple of a unit.
4846 If not, there should be an explicitly added constant. */
4847 if (TREE_CODE (pos) == PLUS_EXPR
4848 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4849 constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
4850 else if (TREE_CODE (pos) == INTEGER_CST)
4851 constant = pos, var = integer_zero_node;
4853 *pbitpos += TREE_INT_CST_LOW (constant);
4854 offset = size_binop (PLUS_EXPR, offset,
4855 size_binop (EXACT_DIV_EXPR, var,
4856 size_int (BITS_PER_UNIT)));
4859 else if (TREE_CODE (exp) == ARRAY_REF)
4861 /* This code is based on the code in case ARRAY_REF in expand_expr
4862 below. We assume here that the size of an array element is
4863 always an integral multiple of BITS_PER_UNIT. */
4865 tree index = TREE_OPERAND (exp, 1);
4866 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
4867 tree low_bound
4868 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4869 tree index_type = TREE_TYPE (index);
4870 tree xindex;
4872 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
4874 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
4875 index);
4876 index_type = TREE_TYPE (index);
4879 /* Optimize the special-case of a zero lower bound.
4881 We convert the low_bound to sizetype to avoid some problems
4882 with constant folding. (E.g. suppose the lower bound is 1,
4883 and its mode is QI. Without the conversion, (ARRAY
4884 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
4885 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
4887 But sizetype isn't quite right either (especially if
4888 the lowbound is negative). FIXME */
4890 if (! integer_zerop (low_bound))
4891 index = fold (build (MINUS_EXPR, index_type, index,
4892 convert (sizetype, low_bound)));
4894 if (TREE_CODE (index) == INTEGER_CST)
4896 index = convert (sbitsizetype, index);
4897 index_type = TREE_TYPE (index);
4900 xindex = fold (build (MULT_EXPR, sbitsizetype, index,
4901 convert (sbitsizetype,
4902 TYPE_SIZE (TREE_TYPE (exp)))));
4904 if (TREE_CODE (xindex) == INTEGER_CST
4905 && TREE_INT_CST_HIGH (xindex) == 0)
4906 *pbitpos += TREE_INT_CST_LOW (xindex);
4907 else
4909 /* Either the bit offset calculated above is not constant, or
4910 it overflowed. In either case, redo the multiplication
4911 against the size in units. This is especially important
4912 in the non-constant case to avoid a division at runtime. */
4913 xindex = fold (build (MULT_EXPR, ssizetype, index,
4914 convert (ssizetype,
4915 TYPE_SIZE_UNIT (TREE_TYPE (exp)))));
4917 if (contains_placeholder_p (xindex))
4918 xindex = build (WITH_RECORD_EXPR, sizetype, xindex, exp);
4920 offset = size_binop (PLUS_EXPR, offset, xindex);
4923 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
4924 && ! ((TREE_CODE (exp) == NOP_EXPR
4925 || TREE_CODE (exp) == CONVERT_EXPR)
4926 && ! (TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
4927 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0)))
4928 != UNION_TYPE))
4929 && (TYPE_MODE (TREE_TYPE (exp))
4930 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
4931 break;
4933 /* If any reference in the chain is volatile, the effect is volatile. */
4934 if (TREE_THIS_VOLATILE (exp))
4935 *pvolatilep = 1;
4937 /* If the offset is non-constant already, then we can't assume any
4938 alignment more than the alignment here. */
4939 if (! integer_zerop (offset))
4940 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4942 exp = TREE_OPERAND (exp, 0);
4945 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
4946 alignment = MIN (alignment, DECL_ALIGN (exp));
4947 else if (TREE_TYPE (exp) != 0)
4948 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4950 if (integer_zerop (offset))
4951 offset = 0;
4953 if (offset != 0 && contains_placeholder_p (offset))
4954 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
4956 *pmode = mode;
4957 *poffset = offset;
4958 *palignment = alignment / BITS_PER_UNIT;
4959 return exp;
4962 /* Subroutine of expand_exp: compute memory_usage from modifier. */
4963 static enum memory_use_mode
4964 get_memory_usage_from_modifier (modifier)
4965 enum expand_modifier modifier;
4967 switch (modifier)
4969 case EXPAND_NORMAL:
4970 case EXPAND_SUM:
4971 return MEMORY_USE_RO;
4972 break;
4973 case EXPAND_MEMORY_USE_WO:
4974 return MEMORY_USE_WO;
4975 break;
4976 case EXPAND_MEMORY_USE_RW:
4977 return MEMORY_USE_RW;
4978 break;
4979 case EXPAND_MEMORY_USE_DONT:
4980 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
4981 MEMORY_USE_DONT, because they are modifiers to a call of
4982 expand_expr in the ADDR_EXPR case of expand_expr. */
4983 case EXPAND_CONST_ADDRESS:
4984 case EXPAND_INITIALIZER:
4985 return MEMORY_USE_DONT;
4986 case EXPAND_MEMORY_USE_BAD:
4987 default:
4988 abort ();
4992 /* Given an rtx VALUE that may contain additions and multiplications,
4993 return an equivalent value that just refers to a register or memory.
4994 This is done by generating instructions to perform the arithmetic
4995 and returning a pseudo-register containing the value.
4997 The returned value may be a REG, SUBREG, MEM or constant. */
5000 force_operand (value, target)
5001 rtx value, target;
5003 register optab binoptab = 0;
5004 /* Use a temporary to force order of execution of calls to
5005 `force_operand'. */
5006 rtx tmp;
5007 register rtx op2;
5008 /* Use subtarget as the target for operand 0 of a binary operation. */
5009 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
5011 /* Check for a PIC address load. */
5012 if (flag_pic
5013 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5014 && XEXP (value, 0) == pic_offset_table_rtx
5015 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5016 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5017 || GET_CODE (XEXP (value, 1)) == CONST))
5019 if (!subtarget)
5020 subtarget = gen_reg_rtx (GET_MODE (value));
5021 emit_move_insn (subtarget, value);
5022 return subtarget;
5025 if (GET_CODE (value) == PLUS)
5026 binoptab = add_optab;
5027 else if (GET_CODE (value) == MINUS)
5028 binoptab = sub_optab;
5029 else if (GET_CODE (value) == MULT)
5031 op2 = XEXP (value, 1);
5032 if (!CONSTANT_P (op2)
5033 && !(GET_CODE (op2) == REG && op2 != subtarget))
5034 subtarget = 0;
5035 tmp = force_operand (XEXP (value, 0), subtarget);
5036 return expand_mult (GET_MODE (value), tmp,
5037 force_operand (op2, NULL_RTX),
5038 target, 0);
5041 if (binoptab)
5043 op2 = XEXP (value, 1);
5044 if (!CONSTANT_P (op2)
5045 && !(GET_CODE (op2) == REG && op2 != subtarget))
5046 subtarget = 0;
5047 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5049 binoptab = add_optab;
5050 op2 = negate_rtx (GET_MODE (value), op2);
5053 /* Check for an addition with OP2 a constant integer and our first
5054 operand a PLUS of a virtual register and something else. In that
5055 case, we want to emit the sum of the virtual register and the
5056 constant first and then add the other value. This allows virtual
5057 register instantiation to simply modify the constant rather than
5058 creating another one around this addition. */
5059 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5060 && GET_CODE (XEXP (value, 0)) == PLUS
5061 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5062 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5063 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5065 rtx temp = expand_binop (GET_MODE (value), binoptab,
5066 XEXP (XEXP (value, 0), 0), op2,
5067 subtarget, 0, OPTAB_LIB_WIDEN);
5068 return expand_binop (GET_MODE (value), binoptab, temp,
5069 force_operand (XEXP (XEXP (value, 0), 1), 0),
5070 target, 0, OPTAB_LIB_WIDEN);
5073 tmp = force_operand (XEXP (value, 0), subtarget);
5074 return expand_binop (GET_MODE (value), binoptab, tmp,
5075 force_operand (op2, NULL_RTX),
5076 target, 0, OPTAB_LIB_WIDEN);
5077 /* We give UNSIGNEDP = 0 to expand_binop
5078 because the only operations we are expanding here are signed ones. */
5080 return value;
5083 /* Subroutine of expand_expr:
5084 save the non-copied parts (LIST) of an expr (LHS), and return a list
5085 which can restore these values to their previous values,
5086 should something modify their storage. */
5088 static tree
5089 save_noncopied_parts (lhs, list)
5090 tree lhs;
5091 tree list;
5093 tree tail;
5094 tree parts = 0;
5096 for (tail = list; tail; tail = TREE_CHAIN (tail))
5097 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5098 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
5099 else
5101 tree part = TREE_VALUE (tail);
5102 tree part_type = TREE_TYPE (part);
5103 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
5104 rtx target = assign_temp (part_type, 0, 1, 1);
5105 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
5106 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
5107 parts = tree_cons (to_be_saved,
5108 build (RTL_EXPR, part_type, NULL_TREE,
5109 (tree) target),
5110 parts);
5111 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
5113 return parts;
5116 /* Subroutine of expand_expr:
5117 record the non-copied parts (LIST) of an expr (LHS), and return a list
5118 which specifies the initial values of these parts. */
5120 static tree
5121 init_noncopied_parts (lhs, list)
5122 tree lhs;
5123 tree list;
5125 tree tail;
5126 tree parts = 0;
5128 for (tail = list; tail; tail = TREE_CHAIN (tail))
5129 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5130 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
5131 else if (TREE_PURPOSE (tail))
5133 tree part = TREE_VALUE (tail);
5134 tree part_type = TREE_TYPE (part);
5135 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
5136 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5138 return parts;
5141 /* Subroutine of expand_expr: return nonzero iff there is no way that
5142 EXP can reference X, which is being modified. TOP_P is nonzero if this
5143 call is going to be used to determine whether we need a temporary
5144 for EXP, as opposed to a recursive call to this function.
5146 It is always safe for this routine to return zero since it merely
5147 searches for optimization opportunities. */
5149 static int
5150 safe_from_p (x, exp, top_p)
5151 rtx x;
5152 tree exp;
5153 int top_p;
5155 rtx exp_rtl = 0;
5156 int i, nops;
5157 static int save_expr_count;
5158 static int save_expr_size = 0;
5159 static tree *save_expr_rewritten;
5160 static tree save_expr_trees[256];
5162 if (x == 0
5163 /* If EXP has varying size, we MUST use a target since we currently
5164 have no way of allocating temporaries of variable size
5165 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5166 So we assume here that something at a higher level has prevented a
5167 clash. This is somewhat bogus, but the best we can do. Only
5168 do this when X is BLKmode and when we are at the top level. */
5169 || (top_p && TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
5170 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5171 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5172 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5173 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5174 != INTEGER_CST)
5175 && GET_MODE (x) == BLKmode))
5176 return 1;
5178 if (top_p && save_expr_size == 0)
5180 int rtn;
5182 save_expr_count = 0;
5183 save_expr_size = sizeof (save_expr_trees) / sizeof (save_expr_trees[0]);
5184 save_expr_rewritten = &save_expr_trees[0];
5186 rtn = safe_from_p (x, exp, 1);
5188 for (i = 0; i < save_expr_count; ++i)
5190 if (TREE_CODE (save_expr_trees[i]) != ERROR_MARK)
5191 abort ();
5192 TREE_SET_CODE (save_expr_trees[i], SAVE_EXPR);
5195 save_expr_size = 0;
5197 return rtn;
5200 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5201 find the underlying pseudo. */
5202 if (GET_CODE (x) == SUBREG)
5204 x = SUBREG_REG (x);
5205 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5206 return 0;
5209 /* If X is a location in the outgoing argument area, it is always safe. */
5210 if (GET_CODE (x) == MEM
5211 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5212 || (GET_CODE (XEXP (x, 0)) == PLUS
5213 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
5214 return 1;
5216 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5218 case 'd':
5219 exp_rtl = DECL_RTL (exp);
5220 break;
5222 case 'c':
5223 return 1;
5225 case 'x':
5226 if (TREE_CODE (exp) == TREE_LIST)
5227 return ((TREE_VALUE (exp) == 0
5228 || safe_from_p (x, TREE_VALUE (exp), 0))
5229 && (TREE_CHAIN (exp) == 0
5230 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5231 else if (TREE_CODE (exp) == ERROR_MARK)
5232 return 1; /* An already-visited SAVE_EXPR? */
5233 else
5234 return 0;
5236 case '1':
5237 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5239 case '2':
5240 case '<':
5241 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5242 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5244 case 'e':
5245 case 'r':
5246 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5247 the expression. If it is set, we conflict iff we are that rtx or
5248 both are in memory. Otherwise, we check all operands of the
5249 expression recursively. */
5251 switch (TREE_CODE (exp))
5253 case ADDR_EXPR:
5254 return (staticp (TREE_OPERAND (exp, 0))
5255 || safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5256 || TREE_STATIC (exp));
5258 case INDIRECT_REF:
5259 if (GET_CODE (x) == MEM)
5260 return 0;
5261 break;
5263 case CALL_EXPR:
5264 exp_rtl = CALL_EXPR_RTL (exp);
5265 if (exp_rtl == 0)
5267 /* Assume that the call will clobber all hard registers and
5268 all of memory. */
5269 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5270 || GET_CODE (x) == MEM)
5271 return 0;
5274 break;
5276 case RTL_EXPR:
5277 /* If a sequence exists, we would have to scan every instruction
5278 in the sequence to see if it was safe. This is probably not
5279 worthwhile. */
5280 if (RTL_EXPR_SEQUENCE (exp))
5281 return 0;
5283 exp_rtl = RTL_EXPR_RTL (exp);
5284 break;
5286 case WITH_CLEANUP_EXPR:
5287 exp_rtl = RTL_EXPR_RTL (exp);
5288 break;
5290 case CLEANUP_POINT_EXPR:
5291 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5293 case SAVE_EXPR:
5294 exp_rtl = SAVE_EXPR_RTL (exp);
5295 if (exp_rtl)
5296 break;
5298 /* This SAVE_EXPR might appear many times in the top-level
5299 safe_from_p() expression, and if it has a complex
5300 subexpression, examining it multiple times could result
5301 in a combinatorial explosion. E.g. on an Alpha
5302 running at least 200MHz, a Fortran test case compiled with
5303 optimization took about 28 minutes to compile -- even though
5304 it was only a few lines long, and the complicated line causing
5305 so much time to be spent in the earlier version of safe_from_p()
5306 had only 293 or so unique nodes.
5308 So, turn this SAVE_EXPR into an ERROR_MARK for now, but remember
5309 where it is so we can turn it back in the top-level safe_from_p()
5310 when we're done. */
5312 /* For now, don't bother re-sizing the array. */
5313 if (save_expr_count >= save_expr_size)
5314 return 0;
5315 save_expr_rewritten[save_expr_count++] = exp;
5317 nops = tree_code_length[(int) SAVE_EXPR];
5318 for (i = 0; i < nops; i++)
5320 tree operand = TREE_OPERAND (exp, i);
5321 if (operand == NULL_TREE)
5322 continue;
5323 TREE_SET_CODE (exp, ERROR_MARK);
5324 if (!safe_from_p (x, operand, 0))
5325 return 0;
5326 TREE_SET_CODE (exp, SAVE_EXPR);
5328 TREE_SET_CODE (exp, ERROR_MARK);
5329 return 1;
5331 case BIND_EXPR:
5332 /* The only operand we look at is operand 1. The rest aren't
5333 part of the expression. */
5334 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5336 case METHOD_CALL_EXPR:
5337 /* This takes a rtx argument, but shouldn't appear here. */
5338 abort ();
5340 default:
5341 break;
5344 /* If we have an rtx, we do not need to scan our operands. */
5345 if (exp_rtl)
5346 break;
5348 nops = tree_code_length[(int) TREE_CODE (exp)];
5349 for (i = 0; i < nops; i++)
5350 if (TREE_OPERAND (exp, i) != 0
5351 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5352 return 0;
5355 /* If we have an rtl, find any enclosed object. Then see if we conflict
5356 with it. */
5357 if (exp_rtl)
5359 if (GET_CODE (exp_rtl) == SUBREG)
5361 exp_rtl = SUBREG_REG (exp_rtl);
5362 if (GET_CODE (exp_rtl) == REG
5363 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5364 return 0;
5367 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5368 are memory and EXP is not readonly. */
5369 return ! (rtx_equal_p (x, exp_rtl)
5370 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5371 && ! TREE_READONLY (exp)));
5374 /* If we reach here, it is safe. */
5375 return 1;
5378 /* Subroutine of expand_expr: return nonzero iff EXP is an
5379 expression whose type is statically determinable. */
5381 static int
5382 fixed_type_p (exp)
5383 tree exp;
5385 if (TREE_CODE (exp) == PARM_DECL
5386 || TREE_CODE (exp) == VAR_DECL
5387 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5388 || TREE_CODE (exp) == COMPONENT_REF
5389 || TREE_CODE (exp) == ARRAY_REF)
5390 return 1;
5391 return 0;
5394 /* Subroutine of expand_expr: return rtx if EXP is a
5395 variable or parameter; else return 0. */
5397 static rtx
5398 var_rtx (exp)
5399 tree exp;
5401 STRIP_NOPS (exp);
5402 switch (TREE_CODE (exp))
5404 case PARM_DECL:
5405 case VAR_DECL:
5406 return DECL_RTL (exp);
5407 default:
5408 return 0;
5412 #ifdef MAX_INTEGER_COMPUTATION_MODE
5413 void
5414 check_max_integer_computation_mode (exp)
5415 tree exp;
5417 enum tree_code code;
5418 enum machine_mode mode;
5420 /* Strip any NOPs that don't change the mode. */
5421 STRIP_NOPS (exp);
5422 code = TREE_CODE (exp);
5424 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5425 if (code == NOP_EXPR
5426 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5427 return;
5429 /* First check the type of the overall operation. We need only look at
5430 unary, binary and relational operations. */
5431 if (TREE_CODE_CLASS (code) == '1'
5432 || TREE_CODE_CLASS (code) == '2'
5433 || TREE_CODE_CLASS (code) == '<')
5435 mode = TYPE_MODE (TREE_TYPE (exp));
5436 if (GET_MODE_CLASS (mode) == MODE_INT
5437 && mode > MAX_INTEGER_COMPUTATION_MODE)
5438 fatal ("unsupported wide integer operation");
5441 /* Check operand of a unary op. */
5442 if (TREE_CODE_CLASS (code) == '1')
5444 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5445 if (GET_MODE_CLASS (mode) == MODE_INT
5446 && mode > MAX_INTEGER_COMPUTATION_MODE)
5447 fatal ("unsupported wide integer operation");
5450 /* Check operands of a binary/comparison op. */
5451 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5453 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5454 if (GET_MODE_CLASS (mode) == MODE_INT
5455 && mode > MAX_INTEGER_COMPUTATION_MODE)
5456 fatal ("unsupported wide integer operation");
5458 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5459 if (GET_MODE_CLASS (mode) == MODE_INT
5460 && mode > MAX_INTEGER_COMPUTATION_MODE)
5461 fatal ("unsupported wide integer operation");
5464 #endif
5467 /* expand_expr: generate code for computing expression EXP.
5468 An rtx for the computed value is returned. The value is never null.
5469 In the case of a void EXP, const0_rtx is returned.
5471 The value may be stored in TARGET if TARGET is nonzero.
5472 TARGET is just a suggestion; callers must assume that
5473 the rtx returned may not be the same as TARGET.
5475 If TARGET is CONST0_RTX, it means that the value will be ignored.
5477 If TMODE is not VOIDmode, it suggests generating the
5478 result in mode TMODE. But this is done only when convenient.
5479 Otherwise, TMODE is ignored and the value generated in its natural mode.
5480 TMODE is just a suggestion; callers must assume that
5481 the rtx returned may not have mode TMODE.
5483 Note that TARGET may have neither TMODE nor MODE. In that case, it
5484 probably will not be used.
5486 If MODIFIER is EXPAND_SUM then when EXP is an addition
5487 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5488 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5489 products as above, or REG or MEM, or constant.
5490 Ordinarily in such cases we would output mul or add instructions
5491 and then return a pseudo reg containing the sum.
5493 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5494 it also marks a label as absolutely required (it can't be dead).
5495 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
5496 This is used for outputting expressions used in initializers.
5498 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5499 with a constant address even if that address is not normally legitimate.
5500 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
5503 expand_expr (exp, target, tmode, modifier)
5504 register tree exp;
5505 rtx target;
5506 enum machine_mode tmode;
5507 enum expand_modifier modifier;
5509 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
5510 This is static so it will be accessible to our recursive callees. */
5511 static tree placeholder_list = 0;
5512 register rtx op0, op1, temp;
5513 tree type = TREE_TYPE (exp);
5514 int unsignedp = TREE_UNSIGNED (type);
5515 register enum machine_mode mode;
5516 register enum tree_code code = TREE_CODE (exp);
5517 optab this_optab;
5518 rtx subtarget, original_target;
5519 int ignore;
5520 tree context;
5521 /* Used by check-memory-usage to make modifier read only. */
5522 enum expand_modifier ro_modifier;
5524 /* Handle ERROR_MARK before anybody tries to access its type. */
5525 if (TREE_CODE (exp) == ERROR_MARK)
5527 op0 = CONST0_RTX (tmode);
5528 if (op0 != 0)
5529 return op0;
5530 return const0_rtx;
5533 mode = TYPE_MODE (type);
5534 /* Use subtarget as the target for operand 0 of a binary operation. */
5535 subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
5536 original_target = target;
5537 ignore = (target == const0_rtx
5538 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
5539 || code == CONVERT_EXPR || code == REFERENCE_EXPR
5540 || code == COND_EXPR)
5541 && TREE_CODE (type) == VOID_TYPE));
5543 /* Make a read-only version of the modifier. */
5544 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
5545 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
5546 ro_modifier = modifier;
5547 else
5548 ro_modifier = EXPAND_NORMAL;
5550 /* Don't use hard regs as subtargets, because the combiner
5551 can only handle pseudo regs. */
5552 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
5553 subtarget = 0;
5554 /* Avoid subtargets inside loops,
5555 since they hide some invariant expressions. */
5556 if (preserve_subexpressions_p ())
5557 subtarget = 0;
5559 /* If we are going to ignore this result, we need only do something
5560 if there is a side-effect somewhere in the expression. If there
5561 is, short-circuit the most common cases here. Note that we must
5562 not call expand_expr with anything but const0_rtx in case this
5563 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
5565 if (ignore)
5567 if (! TREE_SIDE_EFFECTS (exp))
5568 return const0_rtx;
5570 /* Ensure we reference a volatile object even if value is ignored. */
5571 if (TREE_THIS_VOLATILE (exp)
5572 && TREE_CODE (exp) != FUNCTION_DECL
5573 && mode != VOIDmode && mode != BLKmode)
5575 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
5576 if (GET_CODE (temp) == MEM)
5577 temp = copy_to_reg (temp);
5578 return const0_rtx;
5581 if (TREE_CODE_CLASS (code) == '1')
5582 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5583 VOIDmode, ro_modifier);
5584 else if (TREE_CODE_CLASS (code) == '2'
5585 || TREE_CODE_CLASS (code) == '<')
5587 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5588 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5589 return const0_rtx;
5591 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
5592 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
5593 /* If the second operand has no side effects, just evaluate
5594 the first. */
5595 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5596 VOIDmode, ro_modifier);
5598 target = 0;
5601 #ifdef MAX_INTEGER_COMPUTATION_MODE
5602 /* Only check stuff here if the mode we want is different from the mode
5603 of the expression; if it's the same, check_max_integer_computiation_mode
5604 will handle it. Do we really need to check this stuff at all? */
5606 if (target
5607 && GET_MODE (target) != mode
5608 && TREE_CODE (exp) != INTEGER_CST
5609 && TREE_CODE (exp) != PARM_DECL
5610 && TREE_CODE (exp) != ARRAY_REF
5611 && TREE_CODE (exp) != COMPONENT_REF
5612 && TREE_CODE (exp) != BIT_FIELD_REF
5613 && TREE_CODE (exp) != INDIRECT_REF
5614 && TREE_CODE (exp) != CALL_EXPR
5615 && TREE_CODE (exp) != VAR_DECL
5616 && TREE_CODE (exp) != RTL_EXPR)
5618 enum machine_mode mode = GET_MODE (target);
5620 if (GET_MODE_CLASS (mode) == MODE_INT
5621 && mode > MAX_INTEGER_COMPUTATION_MODE)
5622 fatal ("unsupported wide integer operation");
5625 if (tmode != mode
5626 && TREE_CODE (exp) != INTEGER_CST
5627 && TREE_CODE (exp) != PARM_DECL
5628 && TREE_CODE (exp) != ARRAY_REF
5629 && TREE_CODE (exp) != COMPONENT_REF
5630 && TREE_CODE (exp) != BIT_FIELD_REF
5631 && TREE_CODE (exp) != INDIRECT_REF
5632 && TREE_CODE (exp) != VAR_DECL
5633 && TREE_CODE (exp) != CALL_EXPR
5634 && TREE_CODE (exp) != RTL_EXPR
5635 && GET_MODE_CLASS (tmode) == MODE_INT
5636 && tmode > MAX_INTEGER_COMPUTATION_MODE)
5637 fatal ("unsupported wide integer operation");
5639 check_max_integer_computation_mode (exp);
5640 #endif
5642 /* If will do cse, generate all results into pseudo registers
5643 since 1) that allows cse to find more things
5644 and 2) otherwise cse could produce an insn the machine
5645 cannot support. */
5647 if (! cse_not_expected && mode != BLKmode && target
5648 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
5649 target = subtarget;
5651 switch (code)
5653 case LABEL_DECL:
5655 tree function = decl_function_context (exp);
5656 /* Handle using a label in a containing function. */
5657 if (function != current_function_decl
5658 && function != inline_function_decl && function != 0)
5660 struct function *p = find_function_data (function);
5661 /* Allocate in the memory associated with the function
5662 that the label is in. */
5663 push_obstacks (p->function_obstack,
5664 p->function_maybepermanent_obstack);
5666 p->expr->x_forced_labels
5667 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
5668 p->expr->x_forced_labels);
5669 pop_obstacks ();
5671 else
5673 if (modifier == EXPAND_INITIALIZER)
5674 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5675 label_rtx (exp),
5676 forced_labels);
5679 temp = gen_rtx_MEM (FUNCTION_MODE,
5680 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
5681 if (function != current_function_decl
5682 && function != inline_function_decl && function != 0)
5683 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
5684 return temp;
5687 case PARM_DECL:
5688 if (DECL_RTL (exp) == 0)
5690 error_with_decl (exp, "prior parameter's size depends on `%s'");
5691 return CONST0_RTX (mode);
5694 /* ... fall through ... */
5696 case VAR_DECL:
5697 /* If a static var's type was incomplete when the decl was written,
5698 but the type is complete now, lay out the decl now. */
5699 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
5700 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
5702 push_obstacks_nochange ();
5703 end_temporary_allocation ();
5704 layout_decl (exp, 0);
5705 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
5706 pop_obstacks ();
5709 /* Although static-storage variables start off initialized, according to
5710 ANSI C, a memcpy could overwrite them with uninitialized values. So
5711 we check them too. This also lets us check for read-only variables
5712 accessed via a non-const declaration, in case it won't be detected
5713 any other way (e.g., in an embedded system or OS kernel without
5714 memory protection).
5716 Aggregates are not checked here; they're handled elsewhere. */
5717 if (current_function && current_function_check_memory_usage
5718 && code == VAR_DECL
5719 && GET_CODE (DECL_RTL (exp)) == MEM
5720 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5722 enum memory_use_mode memory_usage;
5723 memory_usage = get_memory_usage_from_modifier (modifier);
5725 if (memory_usage != MEMORY_USE_DONT)
5726 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5727 XEXP (DECL_RTL (exp), 0), Pmode,
5728 GEN_INT (int_size_in_bytes (type)),
5729 TYPE_MODE (sizetype),
5730 GEN_INT (memory_usage),
5731 TYPE_MODE (integer_type_node));
5734 /* ... fall through ... */
5736 case FUNCTION_DECL:
5737 case RESULT_DECL:
5738 if (DECL_RTL (exp) == 0)
5739 abort ();
5741 /* Ensure variable marked as used even if it doesn't go through
5742 a parser. If it hasn't be used yet, write out an external
5743 definition. */
5744 if (! TREE_USED (exp))
5746 assemble_external (exp);
5747 TREE_USED (exp) = 1;
5750 /* Show we haven't gotten RTL for this yet. */
5751 temp = 0;
5753 /* Handle variables inherited from containing functions. */
5754 context = decl_function_context (exp);
5756 /* We treat inline_function_decl as an alias for the current function
5757 because that is the inline function whose vars, types, etc.
5758 are being merged into the current function.
5759 See expand_inline_function. */
5761 if (context != 0 && context != current_function_decl
5762 && context != inline_function_decl
5763 /* If var is static, we don't need a static chain to access it. */
5764 && ! (GET_CODE (DECL_RTL (exp)) == MEM
5765 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
5767 rtx addr;
5769 /* Mark as non-local and addressable. */
5770 DECL_NONLOCAL (exp) = 1;
5771 if (DECL_NO_STATIC_CHAIN (current_function_decl))
5772 abort ();
5773 mark_addressable (exp);
5774 if (GET_CODE (DECL_RTL (exp)) != MEM)
5775 abort ();
5776 addr = XEXP (DECL_RTL (exp), 0);
5777 if (GET_CODE (addr) == MEM)
5778 addr = gen_rtx_MEM (Pmode,
5779 fix_lexical_addr (XEXP (addr, 0), exp));
5780 else
5781 addr = fix_lexical_addr (addr, exp);
5782 temp = change_address (DECL_RTL (exp), mode, addr);
5785 /* This is the case of an array whose size is to be determined
5786 from its initializer, while the initializer is still being parsed.
5787 See expand_decl. */
5789 else if (GET_CODE (DECL_RTL (exp)) == MEM
5790 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
5791 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
5792 XEXP (DECL_RTL (exp), 0));
5794 /* If DECL_RTL is memory, we are in the normal case and either
5795 the address is not valid or it is not a register and -fforce-addr
5796 is specified, get the address into a register. */
5798 else if (GET_CODE (DECL_RTL (exp)) == MEM
5799 && modifier != EXPAND_CONST_ADDRESS
5800 && modifier != EXPAND_SUM
5801 && modifier != EXPAND_INITIALIZER
5802 && (! memory_address_p (DECL_MODE (exp),
5803 XEXP (DECL_RTL (exp), 0))
5804 || (flag_force_addr
5805 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
5806 temp = change_address (DECL_RTL (exp), VOIDmode,
5807 copy_rtx (XEXP (DECL_RTL (exp), 0)));
5809 /* If we got something, return it. But first, set the alignment
5810 the address is a register. */
5811 if (temp != 0)
5813 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
5814 mark_reg_pointer (XEXP (temp, 0),
5815 DECL_ALIGN (exp) / BITS_PER_UNIT);
5817 return temp;
5820 /* If the mode of DECL_RTL does not match that of the decl, it
5821 must be a promoted value. We return a SUBREG of the wanted mode,
5822 but mark it so that we know that it was already extended. */
5824 if (GET_CODE (DECL_RTL (exp)) == REG
5825 && GET_MODE (DECL_RTL (exp)) != mode)
5827 /* Get the signedness used for this variable. Ensure we get the
5828 same mode we got when the variable was declared. */
5829 if (GET_MODE (DECL_RTL (exp))
5830 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
5831 abort ();
5833 temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
5834 SUBREG_PROMOTED_VAR_P (temp) = 1;
5835 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5836 return temp;
5839 return DECL_RTL (exp);
5841 case INTEGER_CST:
5842 return immed_double_const (TREE_INT_CST_LOW (exp),
5843 TREE_INT_CST_HIGH (exp),
5844 mode);
5846 case CONST_DECL:
5847 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
5848 EXPAND_MEMORY_USE_BAD);
5850 case REAL_CST:
5851 /* If optimized, generate immediate CONST_DOUBLE
5852 which will be turned into memory by reload if necessary.
5854 We used to force a register so that loop.c could see it. But
5855 this does not allow gen_* patterns to perform optimizations with
5856 the constants. It also produces two insns in cases like "x = 1.0;".
5857 On most machines, floating-point constants are not permitted in
5858 many insns, so we'd end up copying it to a register in any case.
5860 Now, we do the copying in expand_binop, if appropriate. */
5861 return immed_real_const (exp);
5863 case COMPLEX_CST:
5864 case STRING_CST:
5865 if (! TREE_CST_RTL (exp))
5866 output_constant_def (exp);
5868 /* TREE_CST_RTL probably contains a constant address.
5869 On RISC machines where a constant address isn't valid,
5870 make some insns to get that address into a register. */
5871 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
5872 && modifier != EXPAND_CONST_ADDRESS
5873 && modifier != EXPAND_INITIALIZER
5874 && modifier != EXPAND_SUM
5875 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
5876 || (flag_force_addr
5877 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
5878 return change_address (TREE_CST_RTL (exp), VOIDmode,
5879 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
5880 return TREE_CST_RTL (exp);
5882 case EXPR_WITH_FILE_LOCATION:
5884 rtx to_return;
5885 char *saved_input_filename = input_filename;
5886 int saved_lineno = lineno;
5887 input_filename = EXPR_WFL_FILENAME (exp);
5888 lineno = EXPR_WFL_LINENO (exp);
5889 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
5890 emit_line_note (input_filename, lineno);
5891 /* Possibly avoid switching back and force here */
5892 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
5893 input_filename = saved_input_filename;
5894 lineno = saved_lineno;
5895 return to_return;
5898 case SAVE_EXPR:
5899 context = decl_function_context (exp);
5901 /* If this SAVE_EXPR was at global context, assume we are an
5902 initialization function and move it into our context. */
5903 if (context == 0)
5904 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
5906 /* We treat inline_function_decl as an alias for the current function
5907 because that is the inline function whose vars, types, etc.
5908 are being merged into the current function.
5909 See expand_inline_function. */
5910 if (context == current_function_decl || context == inline_function_decl)
5911 context = 0;
5913 /* If this is non-local, handle it. */
5914 if (context)
5916 /* The following call just exists to abort if the context is
5917 not of a containing function. */
5918 find_function_data (context);
5920 temp = SAVE_EXPR_RTL (exp);
5921 if (temp && GET_CODE (temp) == REG)
5923 put_var_into_stack (exp);
5924 temp = SAVE_EXPR_RTL (exp);
5926 if (temp == 0 || GET_CODE (temp) != MEM)
5927 abort ();
5928 return change_address (temp, mode,
5929 fix_lexical_addr (XEXP (temp, 0), exp));
5931 if (SAVE_EXPR_RTL (exp) == 0)
5933 if (mode == VOIDmode)
5934 temp = const0_rtx;
5935 else
5936 temp = assign_temp (type, 3, 0, 0);
5938 SAVE_EXPR_RTL (exp) = temp;
5939 if (!optimize && GET_CODE (temp) == REG)
5940 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
5941 save_expr_regs);
5943 /* If the mode of TEMP does not match that of the expression, it
5944 must be a promoted value. We pass store_expr a SUBREG of the
5945 wanted mode but mark it so that we know that it was already
5946 extended. Note that `unsignedp' was modified above in
5947 this case. */
5949 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
5951 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
5952 SUBREG_PROMOTED_VAR_P (temp) = 1;
5953 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5956 if (temp == const0_rtx)
5957 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5958 EXPAND_MEMORY_USE_BAD);
5959 else
5960 store_expr (TREE_OPERAND (exp, 0), temp, 0);
5962 TREE_USED (exp) = 1;
5965 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
5966 must be a promoted value. We return a SUBREG of the wanted mode,
5967 but mark it so that we know that it was already extended. */
5969 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
5970 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
5972 /* Compute the signedness and make the proper SUBREG. */
5973 promote_mode (type, mode, &unsignedp, 0);
5974 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
5975 SUBREG_PROMOTED_VAR_P (temp) = 1;
5976 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5977 return temp;
5980 return SAVE_EXPR_RTL (exp);
5982 case UNSAVE_EXPR:
5984 rtx temp;
5985 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5986 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
5987 return temp;
5990 case PLACEHOLDER_EXPR:
5992 tree placeholder_expr;
5994 /* If there is an object on the head of the placeholder list,
5995 see if some object in it of type TYPE or a pointer to it. For
5996 further information, see tree.def. */
5997 for (placeholder_expr = placeholder_list;
5998 placeholder_expr != 0;
5999 placeholder_expr = TREE_CHAIN (placeholder_expr))
6001 tree need_type = TYPE_MAIN_VARIANT (type);
6002 tree object = 0;
6003 tree old_list = placeholder_list;
6004 tree elt;
6006 /* Find the outermost reference that is of the type we want.
6007 If none, see if any object has a type that is a pointer to
6008 the type we want. */
6009 for (elt = TREE_PURPOSE (placeholder_expr);
6010 elt != 0 && object == 0;
6012 = ((TREE_CODE (elt) == COMPOUND_EXPR
6013 || TREE_CODE (elt) == COND_EXPR)
6014 ? TREE_OPERAND (elt, 1)
6015 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6016 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6017 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6018 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6019 ? TREE_OPERAND (elt, 0) : 0))
6020 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6021 object = elt;
6023 for (elt = TREE_PURPOSE (placeholder_expr);
6024 elt != 0 && object == 0;
6026 = ((TREE_CODE (elt) == COMPOUND_EXPR
6027 || TREE_CODE (elt) == COND_EXPR)
6028 ? TREE_OPERAND (elt, 1)
6029 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6030 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6031 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6032 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6033 ? TREE_OPERAND (elt, 0) : 0))
6034 if (POINTER_TYPE_P (TREE_TYPE (elt))
6035 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6036 == need_type))
6037 object = build1 (INDIRECT_REF, need_type, elt);
6039 if (object != 0)
6041 /* Expand this object skipping the list entries before
6042 it was found in case it is also a PLACEHOLDER_EXPR.
6043 In that case, we want to translate it using subsequent
6044 entries. */
6045 placeholder_list = TREE_CHAIN (placeholder_expr);
6046 temp = expand_expr (object, original_target, tmode,
6047 ro_modifier);
6048 placeholder_list = old_list;
6049 return temp;
6054 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6055 abort ();
6057 case WITH_RECORD_EXPR:
6058 /* Put the object on the placeholder list, expand our first operand,
6059 and pop the list. */
6060 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6061 placeholder_list);
6062 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
6063 tmode, ro_modifier);
6064 placeholder_list = TREE_CHAIN (placeholder_list);
6065 return target;
6067 case GOTO_EXPR:
6068 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6069 expand_goto (TREE_OPERAND (exp, 0));
6070 else
6071 expand_computed_goto (TREE_OPERAND (exp, 0));
6072 return const0_rtx;
6074 case EXIT_EXPR:
6075 expand_exit_loop_if_false (NULL_PTR,
6076 invert_truthvalue (TREE_OPERAND (exp, 0)));
6077 return const0_rtx;
6079 case LABELED_BLOCK_EXPR:
6080 if (LABELED_BLOCK_BODY (exp))
6081 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6082 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6083 return const0_rtx;
6085 case EXIT_BLOCK_EXPR:
6086 if (EXIT_BLOCK_RETURN (exp))
6087 sorry ("returned value in block_exit_expr");
6088 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6089 return const0_rtx;
6091 case LOOP_EXPR:
6092 push_temp_slots ();
6093 expand_start_loop (1);
6094 expand_expr_stmt (TREE_OPERAND (exp, 0));
6095 expand_end_loop ();
6096 pop_temp_slots ();
6098 return const0_rtx;
6100 case BIND_EXPR:
6102 tree vars = TREE_OPERAND (exp, 0);
6103 int vars_need_expansion = 0;
6105 /* Need to open a binding contour here because
6106 if there are any cleanups they must be contained here. */
6107 expand_start_bindings (2);
6109 /* Mark the corresponding BLOCK for output in its proper place. */
6110 if (TREE_OPERAND (exp, 2) != 0
6111 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6112 insert_block (TREE_OPERAND (exp, 2));
6114 /* If VARS have not yet been expanded, expand them now. */
6115 while (vars)
6117 if (DECL_RTL (vars) == 0)
6119 vars_need_expansion = 1;
6120 expand_decl (vars);
6122 expand_decl_init (vars);
6123 vars = TREE_CHAIN (vars);
6126 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6128 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6130 return temp;
6133 case RTL_EXPR:
6134 if (RTL_EXPR_SEQUENCE (exp))
6136 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6137 abort ();
6138 emit_insns (RTL_EXPR_SEQUENCE (exp));
6139 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6141 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6142 free_temps_for_rtl_expr (exp);
6143 return RTL_EXPR_RTL (exp);
6145 case CONSTRUCTOR:
6146 /* If we don't need the result, just ensure we evaluate any
6147 subexpressions. */
6148 if (ignore)
6150 tree elt;
6151 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6152 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6153 EXPAND_MEMORY_USE_BAD);
6154 return const0_rtx;
6157 /* All elts simple constants => refer to a constant in memory. But
6158 if this is a non-BLKmode mode, let it store a field at a time
6159 since that should make a CONST_INT or CONST_DOUBLE when we
6160 fold. Likewise, if we have a target we can use, it is best to
6161 store directly into the target unless the type is large enough
6162 that memcpy will be used. If we are making an initializer and
6163 all operands are constant, put it in memory as well. */
6164 else if ((TREE_STATIC (exp)
6165 && ((mode == BLKmode
6166 && ! (target != 0 && safe_from_p (target, exp, 1)))
6167 || TREE_ADDRESSABLE (exp)
6168 || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
6169 && (!MOVE_BY_PIECES_P
6170 (TREE_INT_CST_LOW (TYPE_SIZE (type))/BITS_PER_UNIT,
6171 TYPE_ALIGN (type) / BITS_PER_UNIT))
6172 && ! mostly_zeros_p (exp))))
6173 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6175 rtx constructor = output_constant_def (exp);
6176 if (modifier != EXPAND_CONST_ADDRESS
6177 && modifier != EXPAND_INITIALIZER
6178 && modifier != EXPAND_SUM
6179 && (! memory_address_p (GET_MODE (constructor),
6180 XEXP (constructor, 0))
6181 || (flag_force_addr
6182 && GET_CODE (XEXP (constructor, 0)) != REG)))
6183 constructor = change_address (constructor, VOIDmode,
6184 XEXP (constructor, 0));
6185 return constructor;
6188 else
6190 /* Handle calls that pass values in multiple non-contiguous
6191 locations. The Irix 6 ABI has examples of this. */
6192 if (target == 0 || ! safe_from_p (target, exp, 1)
6193 || GET_CODE (target) == PARALLEL)
6195 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
6196 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6197 else
6198 target = assign_temp (type, 0, 1, 1);
6201 if (TREE_READONLY (exp))
6203 if (GET_CODE (target) == MEM)
6204 target = copy_rtx (target);
6206 RTX_UNCHANGING_P (target) = 1;
6209 store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0);
6210 return target;
6213 case INDIRECT_REF:
6215 tree exp1 = TREE_OPERAND (exp, 0);
6216 tree exp2;
6217 tree index;
6218 tree string = string_constant (exp1, &index);
6219 int i;
6221 /* Try to optimize reads from const strings. */
6222 if (string
6223 && TREE_CODE (string) == STRING_CST
6224 && TREE_CODE (index) == INTEGER_CST
6225 && !TREE_INT_CST_HIGH (index)
6226 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (string)
6227 && GET_MODE_CLASS (mode) == MODE_INT
6228 && GET_MODE_SIZE (mode) == 1
6229 && modifier != EXPAND_MEMORY_USE_WO)
6230 return GEN_INT (TREE_STRING_POINTER (string)[i]);
6232 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6233 op0 = memory_address (mode, op0);
6235 if (current_function && current_function_check_memory_usage
6236 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6238 enum memory_use_mode memory_usage;
6239 memory_usage = get_memory_usage_from_modifier (modifier);
6241 if (memory_usage != MEMORY_USE_DONT)
6243 in_check_memory_usage = 1;
6244 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6245 op0, Pmode,
6246 GEN_INT (int_size_in_bytes (type)),
6247 TYPE_MODE (sizetype),
6248 GEN_INT (memory_usage),
6249 TYPE_MODE (integer_type_node));
6250 in_check_memory_usage = 0;
6254 temp = gen_rtx_MEM (mode, op0);
6255 /* If address was computed by addition,
6256 mark this as an element of an aggregate. */
6257 if (TREE_CODE (exp1) == PLUS_EXPR
6258 || (TREE_CODE (exp1) == SAVE_EXPR
6259 && TREE_CODE (TREE_OPERAND (exp1, 0)) == PLUS_EXPR)
6260 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
6261 || (TREE_CODE (exp1) == ADDR_EXPR
6262 && (exp2 = TREE_OPERAND (exp1, 0))
6263 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
6264 MEM_SET_IN_STRUCT_P (temp, 1);
6266 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
6267 MEM_ALIAS_SET (temp) = get_alias_set (exp);
6269 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6270 here, because, in C and C++, the fact that a location is accessed
6271 through a pointer to const does not mean that the value there can
6272 never change. Languages where it can never change should
6273 also set TREE_STATIC. */
6274 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
6275 return temp;
6278 case ARRAY_REF:
6279 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6280 abort ();
6283 tree array = TREE_OPERAND (exp, 0);
6284 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6285 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6286 tree index = TREE_OPERAND (exp, 1);
6287 tree index_type = TREE_TYPE (index);
6288 HOST_WIDE_INT i;
6290 /* Optimize the special-case of a zero lower bound.
6292 We convert the low_bound to sizetype to avoid some problems
6293 with constant folding. (E.g. suppose the lower bound is 1,
6294 and its mode is QI. Without the conversion, (ARRAY
6295 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6296 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
6298 But sizetype isn't quite right either (especially if
6299 the lowbound is negative). FIXME */
6301 if (! integer_zerop (low_bound))
6302 index = fold (build (MINUS_EXPR, index_type, index,
6303 convert (sizetype, low_bound)));
6305 /* Fold an expression like: "foo"[2].
6306 This is not done in fold so it won't happen inside &.
6307 Don't fold if this is for wide characters since it's too
6308 difficult to do correctly and this is a very rare case. */
6310 if (TREE_CODE (array) == STRING_CST
6311 && TREE_CODE (index) == INTEGER_CST
6312 && !TREE_INT_CST_HIGH (index)
6313 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
6314 && GET_MODE_CLASS (mode) == MODE_INT
6315 && GET_MODE_SIZE (mode) == 1)
6316 return GEN_INT (TREE_STRING_POINTER (array)[i]);
6318 /* If this is a constant index into a constant array,
6319 just get the value from the array. Handle both the cases when
6320 we have an explicit constructor and when our operand is a variable
6321 that was declared const. */
6323 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
6325 if (TREE_CODE (index) == INTEGER_CST
6326 && TREE_INT_CST_HIGH (index) == 0)
6328 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
6330 i = TREE_INT_CST_LOW (index);
6331 while (elem && i--)
6332 elem = TREE_CHAIN (elem);
6333 if (elem)
6334 return expand_expr (fold (TREE_VALUE (elem)), target,
6335 tmode, ro_modifier);
6339 else if (optimize >= 1
6340 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6341 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6342 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6344 if (TREE_CODE (index) == INTEGER_CST)
6346 tree init = DECL_INITIAL (array);
6348 i = TREE_INT_CST_LOW (index);
6349 if (TREE_CODE (init) == CONSTRUCTOR)
6351 tree elem = CONSTRUCTOR_ELTS (init);
6353 while (elem
6354 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
6355 elem = TREE_CHAIN (elem);
6356 if (elem)
6357 return expand_expr (fold (TREE_VALUE (elem)), target,
6358 tmode, ro_modifier);
6360 else if (TREE_CODE (init) == STRING_CST
6361 && TREE_INT_CST_HIGH (index) == 0
6362 && (TREE_INT_CST_LOW (index)
6363 < TREE_STRING_LENGTH (init)))
6364 return (GEN_INT
6365 (TREE_STRING_POINTER
6366 (init)[TREE_INT_CST_LOW (index)]));
6371 /* ... fall through ... */
6373 case COMPONENT_REF:
6374 case BIT_FIELD_REF:
6375 /* If the operand is a CONSTRUCTOR, we can just extract the
6376 appropriate field if it is present. Don't do this if we have
6377 already written the data since we want to refer to that copy
6378 and varasm.c assumes that's what we'll do. */
6379 if (code != ARRAY_REF
6380 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6381 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6383 tree elt;
6385 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6386 elt = TREE_CHAIN (elt))
6387 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6388 /* We can normally use the value of the field in the
6389 CONSTRUCTOR. However, if this is a bitfield in
6390 an integral mode that we can fit in a HOST_WIDE_INT,
6391 we must mask only the number of bits in the bitfield,
6392 since this is done implicitly by the constructor. If
6393 the bitfield does not meet either of those conditions,
6394 we can't do this optimization. */
6395 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6396 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6397 == MODE_INT)
6398 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6399 <= HOST_BITS_PER_WIDE_INT))))
6401 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6402 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6404 int bitsize = DECL_FIELD_SIZE (TREE_PURPOSE (elt));
6406 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6408 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6409 op0 = expand_and (op0, op1, target);
6411 else
6413 enum machine_mode imode
6414 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6415 tree count
6416 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6419 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6420 target, 0);
6421 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6422 target, 0);
6426 return op0;
6431 enum machine_mode mode1;
6432 int bitsize;
6433 int bitpos;
6434 tree offset;
6435 int volatilep = 0;
6436 int alignment;
6437 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6438 &mode1, &unsignedp, &volatilep,
6439 &alignment);
6441 /* If we got back the original object, something is wrong. Perhaps
6442 we are evaluating an expression too early. In any event, don't
6443 infinitely recurse. */
6444 if (tem == exp)
6445 abort ();
6447 /* If TEM's type is a union of variable size, pass TARGET to the inner
6448 computation, since it will need a temporary and TARGET is known
6449 to have to do. This occurs in unchecked conversion in Ada. */
6451 op0 = expand_expr (tem,
6452 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6453 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6454 != INTEGER_CST)
6455 ? target : NULL_RTX),
6456 VOIDmode,
6457 modifier == EXPAND_INITIALIZER
6458 ? modifier : EXPAND_NORMAL);
6460 /* If this is a constant, put it into a register if it is a
6461 legitimate constant and memory if it isn't. */
6462 if (CONSTANT_P (op0))
6464 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6465 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
6466 op0 = force_reg (mode, op0);
6467 else
6468 op0 = validize_mem (force_const_mem (mode, op0));
6471 if (offset != 0)
6473 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
6475 if (GET_CODE (op0) != MEM)
6476 abort ();
6478 if (GET_MODE (offset_rtx) != ptr_mode)
6480 #ifdef POINTERS_EXTEND_UNSIGNED
6481 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
6482 #else
6483 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6484 #endif
6487 /* A constant address in TO_RTX can have VOIDmode, we must not try
6488 to call force_reg for that case. Avoid that case. */
6489 if (GET_CODE (op0) == MEM
6490 && GET_MODE (op0) == BLKmode
6491 && GET_MODE (XEXP (op0, 0)) != VOIDmode
6492 && bitsize
6493 && (bitpos % bitsize) == 0
6494 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6495 && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
6497 rtx temp = change_address (op0, mode1,
6498 plus_constant (XEXP (op0, 0),
6499 (bitpos /
6500 BITS_PER_UNIT)));
6501 if (GET_CODE (XEXP (temp, 0)) == REG)
6502 op0 = temp;
6503 else
6504 op0 = change_address (op0, mode1,
6505 force_reg (GET_MODE (XEXP (temp, 0)),
6506 XEXP (temp, 0)));
6507 bitpos = 0;
6511 op0 = change_address (op0, VOIDmode,
6512 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
6513 force_reg (ptr_mode,
6514 offset_rtx)));
6517 /* Don't forget about volatility even if this is a bitfield. */
6518 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6520 op0 = copy_rtx (op0);
6521 MEM_VOLATILE_P (op0) = 1;
6524 /* Check the access. */
6525 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
6527 enum memory_use_mode memory_usage;
6528 memory_usage = get_memory_usage_from_modifier (modifier);
6530 if (memory_usage != MEMORY_USE_DONT)
6532 rtx to;
6533 int size;
6535 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
6536 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
6538 /* Check the access right of the pointer. */
6539 if (size > BITS_PER_UNIT)
6540 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6541 to, Pmode,
6542 GEN_INT (size / BITS_PER_UNIT),
6543 TYPE_MODE (sizetype),
6544 GEN_INT (memory_usage),
6545 TYPE_MODE (integer_type_node));
6549 /* In cases where an aligned union has an unaligned object
6550 as a field, we might be extracting a BLKmode value from
6551 an integer-mode (e.g., SImode) object. Handle this case
6552 by doing the extract into an object as wide as the field
6553 (which we know to be the width of a basic mode), then
6554 storing into memory, and changing the mode to BLKmode.
6555 If we ultimately want the address (EXPAND_CONST_ADDRESS or
6556 EXPAND_INITIALIZER), then we must not copy to a temporary. */
6557 if (mode1 == VOIDmode
6558 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6559 || (modifier != EXPAND_CONST_ADDRESS
6560 && modifier != EXPAND_INITIALIZER
6561 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
6562 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6563 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6564 /* If the field isn't aligned enough to fetch as a memref,
6565 fetch it as a bit field. */
6566 || (SLOW_UNALIGNED_ACCESS
6567 && ((TYPE_ALIGN (TREE_TYPE (tem)) < (unsigned int) GET_MODE_ALIGNMENT (mode))
6568 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))))))
6570 enum machine_mode ext_mode = mode;
6572 if (ext_mode == BLKmode)
6573 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6575 if (ext_mode == BLKmode)
6577 /* In this case, BITPOS must start at a byte boundary and
6578 TARGET, if specified, must be a MEM. */
6579 if (GET_CODE (op0) != MEM
6580 || (target != 0 && GET_CODE (target) != MEM)
6581 || bitpos % BITS_PER_UNIT != 0)
6582 abort ();
6584 op0 = change_address (op0, VOIDmode,
6585 plus_constant (XEXP (op0, 0),
6586 bitpos / BITS_PER_UNIT));
6587 if (target == 0)
6588 target = assign_temp (type, 0, 1, 1);
6590 emit_block_move (target, op0,
6591 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6592 / BITS_PER_UNIT),
6595 return target;
6598 op0 = validize_mem (op0);
6600 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
6601 mark_reg_pointer (XEXP (op0, 0), alignment);
6603 op0 = extract_bit_field (op0, bitsize, bitpos,
6604 unsignedp, target, ext_mode, ext_mode,
6605 alignment,
6606 int_size_in_bytes (TREE_TYPE (tem)));
6608 /* If the result is a record type and BITSIZE is narrower than
6609 the mode of OP0, an integral mode, and this is a big endian
6610 machine, we must put the field into the high-order bits. */
6611 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
6612 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
6613 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
6614 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
6615 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
6616 - bitsize),
6617 op0, 1);
6619 if (mode == BLKmode)
6621 rtx new = assign_stack_temp (ext_mode,
6622 bitsize / BITS_PER_UNIT, 0);
6624 emit_move_insn (new, op0);
6625 op0 = copy_rtx (new);
6626 PUT_MODE (op0, BLKmode);
6627 MEM_SET_IN_STRUCT_P (op0, 1);
6630 return op0;
6633 /* If the result is BLKmode, use that to access the object
6634 now as well. */
6635 if (mode == BLKmode)
6636 mode1 = BLKmode;
6638 /* Get a reference to just this component. */
6639 if (modifier == EXPAND_CONST_ADDRESS
6640 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6641 op0 = gen_rtx_MEM (mode1, plus_constant (XEXP (op0, 0),
6642 (bitpos / BITS_PER_UNIT)));
6643 else
6644 op0 = change_address (op0, mode1,
6645 plus_constant (XEXP (op0, 0),
6646 (bitpos / BITS_PER_UNIT)));
6648 if (GET_CODE (op0) == MEM)
6649 MEM_ALIAS_SET (op0) = get_alias_set (exp);
6651 if (GET_CODE (XEXP (op0, 0)) == REG)
6652 mark_reg_pointer (XEXP (op0, 0), alignment);
6654 MEM_SET_IN_STRUCT_P (op0, 1);
6655 MEM_VOLATILE_P (op0) |= volatilep;
6656 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
6657 || modifier == EXPAND_CONST_ADDRESS
6658 || modifier == EXPAND_INITIALIZER)
6659 return op0;
6660 else if (target == 0)
6661 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6663 convert_move (target, op0, unsignedp);
6664 return target;
6667 /* Intended for a reference to a buffer of a file-object in Pascal.
6668 But it's not certain that a special tree code will really be
6669 necessary for these. INDIRECT_REF might work for them. */
6670 case BUFFER_REF:
6671 abort ();
6673 case IN_EXPR:
6675 /* Pascal set IN expression.
6677 Algorithm:
6678 rlo = set_low - (set_low%bits_per_word);
6679 the_word = set [ (index - rlo)/bits_per_word ];
6680 bit_index = index % bits_per_word;
6681 bitmask = 1 << bit_index;
6682 return !!(the_word & bitmask); */
6684 tree set = TREE_OPERAND (exp, 0);
6685 tree index = TREE_OPERAND (exp, 1);
6686 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
6687 tree set_type = TREE_TYPE (set);
6688 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
6689 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
6690 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
6691 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
6692 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
6693 rtx setval = expand_expr (set, 0, VOIDmode, 0);
6694 rtx setaddr = XEXP (setval, 0);
6695 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
6696 rtx rlow;
6697 rtx diff, quo, rem, addr, bit, result;
6699 preexpand_calls (exp);
6701 /* If domain is empty, answer is no. Likewise if index is constant
6702 and out of bounds. */
6703 if (((TREE_CODE (set_high_bound) == INTEGER_CST
6704 && TREE_CODE (set_low_bound) == INTEGER_CST
6705 && tree_int_cst_lt (set_high_bound, set_low_bound))
6706 || (TREE_CODE (index) == INTEGER_CST
6707 && TREE_CODE (set_low_bound) == INTEGER_CST
6708 && tree_int_cst_lt (index, set_low_bound))
6709 || (TREE_CODE (set_high_bound) == INTEGER_CST
6710 && TREE_CODE (index) == INTEGER_CST
6711 && tree_int_cst_lt (set_high_bound, index))))
6712 return const0_rtx;
6714 if (target == 0)
6715 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6717 /* If we get here, we have to generate the code for both cases
6718 (in range and out of range). */
6720 op0 = gen_label_rtx ();
6721 op1 = gen_label_rtx ();
6723 if (! (GET_CODE (index_val) == CONST_INT
6724 && GET_CODE (lo_r) == CONST_INT))
6726 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
6727 GET_MODE (index_val), iunsignedp, 0, op1);
6730 if (! (GET_CODE (index_val) == CONST_INT
6731 && GET_CODE (hi_r) == CONST_INT))
6733 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
6734 GET_MODE (index_val), iunsignedp, 0, op1);
6737 /* Calculate the element number of bit zero in the first word
6738 of the set. */
6739 if (GET_CODE (lo_r) == CONST_INT)
6740 rlow = GEN_INT (INTVAL (lo_r)
6741 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
6742 else
6743 rlow = expand_binop (index_mode, and_optab, lo_r,
6744 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
6745 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6747 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
6748 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6750 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
6751 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6752 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
6753 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6755 addr = memory_address (byte_mode,
6756 expand_binop (index_mode, add_optab, diff,
6757 setaddr, NULL_RTX, iunsignedp,
6758 OPTAB_LIB_WIDEN));
6760 /* Extract the bit we want to examine */
6761 bit = expand_shift (RSHIFT_EXPR, byte_mode,
6762 gen_rtx_MEM (byte_mode, addr),
6763 make_tree (TREE_TYPE (index), rem),
6764 NULL_RTX, 1);
6765 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
6766 GET_MODE (target) == byte_mode ? target : 0,
6767 1, OPTAB_LIB_WIDEN);
6769 if (result != target)
6770 convert_move (target, result, 1);
6772 /* Output the code to handle the out-of-range case. */
6773 emit_jump (op0);
6774 emit_label (op1);
6775 emit_move_insn (target, const0_rtx);
6776 emit_label (op0);
6777 return target;
6780 case WITH_CLEANUP_EXPR:
6781 if (RTL_EXPR_RTL (exp) == 0)
6783 RTL_EXPR_RTL (exp)
6784 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
6785 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
6787 /* That's it for this cleanup. */
6788 TREE_OPERAND (exp, 2) = 0;
6790 return RTL_EXPR_RTL (exp);
6792 case CLEANUP_POINT_EXPR:
6794 /* Start a new binding layer that will keep track of all cleanup
6795 actions to be performed. */
6796 expand_start_bindings (2);
6798 target_temp_slot_level = temp_slot_level;
6800 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
6801 /* If we're going to use this value, load it up now. */
6802 if (! ignore)
6803 op0 = force_not_mem (op0);
6804 preserve_temp_slots (op0);
6805 expand_end_bindings (NULL_TREE, 0, 0);
6807 return op0;
6809 case CALL_EXPR:
6810 /* Check for a built-in function. */
6811 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
6812 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6813 == FUNCTION_DECL)
6814 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6815 return expand_builtin (exp, target, subtarget, tmode, ignore);
6817 /* If this call was expanded already by preexpand_calls,
6818 just return the result we got. */
6819 if (CALL_EXPR_RTL (exp) != 0)
6820 return CALL_EXPR_RTL (exp);
6822 return expand_call (exp, target, ignore);
6824 case NON_LVALUE_EXPR:
6825 case NOP_EXPR:
6826 case CONVERT_EXPR:
6827 case REFERENCE_EXPR:
6828 if (TREE_CODE (type) == UNION_TYPE)
6830 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
6831 if (target == 0)
6833 if (mode != BLKmode)
6834 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6835 else
6836 target = assign_temp (type, 0, 1, 1);
6839 if (GET_CODE (target) == MEM)
6840 /* Store data into beginning of memory target. */
6841 store_expr (TREE_OPERAND (exp, 0),
6842 change_address (target, TYPE_MODE (valtype), 0), 0);
6844 else if (GET_CODE (target) == REG)
6845 /* Store this field into a union of the proper type. */
6846 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
6847 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
6848 VOIDmode, 0, 1,
6849 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))),
6851 else
6852 abort ();
6854 /* Return the entire union. */
6855 return target;
6858 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6860 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
6861 ro_modifier);
6863 /* If the signedness of the conversion differs and OP0 is
6864 a promoted SUBREG, clear that indication since we now
6865 have to do the proper extension. */
6866 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
6867 && GET_CODE (op0) == SUBREG)
6868 SUBREG_PROMOTED_VAR_P (op0) = 0;
6870 return op0;
6873 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
6874 if (GET_MODE (op0) == mode)
6875 return op0;
6877 /* If OP0 is a constant, just convert it into the proper mode. */
6878 if (CONSTANT_P (op0))
6879 return
6880 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6881 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6883 if (modifier == EXPAND_INITIALIZER)
6884 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
6886 if (target == 0)
6887 return
6888 convert_to_mode (mode, op0,
6889 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6890 else
6891 convert_move (target, op0,
6892 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6893 return target;
6895 case PLUS_EXPR:
6896 /* We come here from MINUS_EXPR when the second operand is a
6897 constant. */
6898 plus_expr:
6899 this_optab = add_optab;
6901 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
6902 something else, make sure we add the register to the constant and
6903 then to the other thing. This case can occur during strength
6904 reduction and doing it this way will produce better code if the
6905 frame pointer or argument pointer is eliminated.
6907 fold-const.c will ensure that the constant is always in the inner
6908 PLUS_EXPR, so the only case we need to do anything about is if
6909 sp, ap, or fp is our second argument, in which case we must swap
6910 the innermost first argument and our second argument. */
6912 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
6913 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
6914 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
6915 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
6916 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
6917 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
6919 tree t = TREE_OPERAND (exp, 1);
6921 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6922 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
6925 /* If the result is to be ptr_mode and we are adding an integer to
6926 something, we might be forming a constant. So try to use
6927 plus_constant. If it produces a sum and we can't accept it,
6928 use force_operand. This allows P = &ARR[const] to generate
6929 efficient code on machines where a SYMBOL_REF is not a valid
6930 address.
6932 If this is an EXPAND_SUM call, always return the sum. */
6933 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
6934 || mode == ptr_mode)
6936 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
6937 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
6938 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
6940 rtx constant_part;
6942 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
6943 EXPAND_SUM);
6944 /* Use immed_double_const to ensure that the constant is
6945 truncated according to the mode of OP1, then sign extended
6946 to a HOST_WIDE_INT. Using the constant directly can result
6947 in non-canonical RTL in a 64x32 cross compile. */
6948 constant_part
6949 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
6950 (HOST_WIDE_INT) 0,
6951 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
6952 op1 = plus_constant (op1, INTVAL (constant_part));
6953 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6954 op1 = force_operand (op1, target);
6955 return op1;
6958 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6959 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
6960 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
6962 rtx constant_part;
6964 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
6965 EXPAND_SUM);
6966 if (! CONSTANT_P (op0))
6968 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6969 VOIDmode, modifier);
6970 /* Don't go to both_summands if modifier
6971 says it's not right to return a PLUS. */
6972 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6973 goto binop2;
6974 goto both_summands;
6976 /* Use immed_double_const to ensure that the constant is
6977 truncated according to the mode of OP1, then sign extended
6978 to a HOST_WIDE_INT. Using the constant directly can result
6979 in non-canonical RTL in a 64x32 cross compile. */
6980 constant_part
6981 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
6982 (HOST_WIDE_INT) 0,
6983 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
6984 op0 = plus_constant (op0, INTVAL (constant_part));
6985 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6986 op0 = force_operand (op0, target);
6987 return op0;
6991 /* No sense saving up arithmetic to be done
6992 if it's all in the wrong mode to form part of an address.
6993 And force_operand won't know whether to sign-extend or
6994 zero-extend. */
6995 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6996 || mode != ptr_mode)
6997 goto binop;
6999 preexpand_calls (exp);
7000 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7001 subtarget = 0;
7003 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7004 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
7006 both_summands:
7007 /* Make sure any term that's a sum with a constant comes last. */
7008 if (GET_CODE (op0) == PLUS
7009 && CONSTANT_P (XEXP (op0, 1)))
7011 temp = op0;
7012 op0 = op1;
7013 op1 = temp;
7015 /* If adding to a sum including a constant,
7016 associate it to put the constant outside. */
7017 if (GET_CODE (op1) == PLUS
7018 && CONSTANT_P (XEXP (op1, 1)))
7020 rtx constant_term = const0_rtx;
7022 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7023 if (temp != 0)
7024 op0 = temp;
7025 /* Ensure that MULT comes first if there is one. */
7026 else if (GET_CODE (op0) == MULT)
7027 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7028 else
7029 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7031 /* Let's also eliminate constants from op0 if possible. */
7032 op0 = eliminate_constant_term (op0, &constant_term);
7034 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7035 their sum should be a constant. Form it into OP1, since the
7036 result we want will then be OP0 + OP1. */
7038 temp = simplify_binary_operation (PLUS, mode, constant_term,
7039 XEXP (op1, 1));
7040 if (temp != 0)
7041 op1 = temp;
7042 else
7043 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7046 /* Put a constant term last and put a multiplication first. */
7047 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7048 temp = op1, op1 = op0, op0 = temp;
7050 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7051 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7053 case MINUS_EXPR:
7054 /* For initializers, we are allowed to return a MINUS of two
7055 symbolic constants. Here we handle all cases when both operands
7056 are constant. */
7057 /* Handle difference of two symbolic constants,
7058 for the sake of an initializer. */
7059 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7060 && really_constant_p (TREE_OPERAND (exp, 0))
7061 && really_constant_p (TREE_OPERAND (exp, 1)))
7063 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
7064 VOIDmode, ro_modifier);
7065 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7066 VOIDmode, ro_modifier);
7068 /* If the last operand is a CONST_INT, use plus_constant of
7069 the negated constant. Else make the MINUS. */
7070 if (GET_CODE (op1) == CONST_INT)
7071 return plus_constant (op0, - INTVAL (op1));
7072 else
7073 return gen_rtx_MINUS (mode, op0, op1);
7075 /* Convert A - const to A + (-const). */
7076 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7078 tree negated = fold (build1 (NEGATE_EXPR, type,
7079 TREE_OPERAND (exp, 1)));
7081 /* Deal with the case where we can't negate the constant
7082 in TYPE. */
7083 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7085 tree newtype = signed_type (type);
7086 tree newop0 = convert (newtype, TREE_OPERAND (exp, 0));
7087 tree newop1 = convert (newtype, TREE_OPERAND (exp, 1));
7088 tree newneg = fold (build1 (NEGATE_EXPR, newtype, newop1));
7090 if (! TREE_OVERFLOW (newneg))
7091 return expand_expr (convert (type,
7092 build (PLUS_EXPR, newtype,
7093 newop0, newneg)),
7094 target, tmode, ro_modifier);
7096 else
7098 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7099 goto plus_expr;
7102 this_optab = sub_optab;
7103 goto binop;
7105 case MULT_EXPR:
7106 preexpand_calls (exp);
7107 /* If first operand is constant, swap them.
7108 Thus the following special case checks need only
7109 check the second operand. */
7110 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7112 register tree t1 = TREE_OPERAND (exp, 0);
7113 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7114 TREE_OPERAND (exp, 1) = t1;
7117 /* Attempt to return something suitable for generating an
7118 indexed address, for machines that support that. */
7120 if (modifier == EXPAND_SUM && mode == ptr_mode
7121 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7122 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7124 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7125 EXPAND_SUM);
7127 /* Apply distributive law if OP0 is x+c. */
7128 if (GET_CODE (op0) == PLUS
7129 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7130 return
7131 gen_rtx_PLUS
7132 (mode,
7133 gen_rtx_MULT
7134 (mode, XEXP (op0, 0),
7135 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7136 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7137 * INTVAL (XEXP (op0, 1))));
7139 if (GET_CODE (op0) != REG)
7140 op0 = force_operand (op0, NULL_RTX);
7141 if (GET_CODE (op0) != REG)
7142 op0 = copy_to_mode_reg (mode, op0);
7144 return
7145 gen_rtx_MULT (mode, op0,
7146 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7149 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7150 subtarget = 0;
7152 /* Check for multiplying things that have been extended
7153 from a narrower type. If this machine supports multiplying
7154 in that narrower type with a result in the desired type,
7155 do it that way, and avoid the explicit type-conversion. */
7156 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7157 && TREE_CODE (type) == INTEGER_TYPE
7158 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7159 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7160 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7161 && int_fits_type_p (TREE_OPERAND (exp, 1),
7162 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7163 /* Don't use a widening multiply if a shift will do. */
7164 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7165 > HOST_BITS_PER_WIDE_INT)
7166 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7168 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7169 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7171 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7172 /* If both operands are extended, they must either both
7173 be zero-extended or both be sign-extended. */
7174 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7176 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7178 enum machine_mode innermode
7179 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7180 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7181 ? smul_widen_optab : umul_widen_optab);
7182 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7183 ? umul_widen_optab : smul_widen_optab);
7184 if (mode == GET_MODE_WIDER_MODE (innermode))
7186 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7188 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7189 NULL_RTX, VOIDmode, 0);
7190 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7191 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7192 VOIDmode, 0);
7193 else
7194 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7195 NULL_RTX, VOIDmode, 0);
7196 goto binop2;
7198 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7199 && innermode == word_mode)
7201 rtx htem;
7202 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7203 NULL_RTX, VOIDmode, 0);
7204 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7205 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7206 VOIDmode, 0);
7207 else
7208 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7209 NULL_RTX, VOIDmode, 0);
7210 temp = expand_binop (mode, other_optab, op0, op1, target,
7211 unsignedp, OPTAB_LIB_WIDEN);
7212 htem = expand_mult_highpart_adjust (innermode,
7213 gen_highpart (innermode, temp),
7214 op0, op1,
7215 gen_highpart (innermode, temp),
7216 unsignedp);
7217 emit_move_insn (gen_highpart (innermode, temp), htem);
7218 return temp;
7222 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7223 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7224 return expand_mult (mode, op0, op1, target, unsignedp);
7226 case TRUNC_DIV_EXPR:
7227 case FLOOR_DIV_EXPR:
7228 case CEIL_DIV_EXPR:
7229 case ROUND_DIV_EXPR:
7230 case EXACT_DIV_EXPR:
7231 preexpand_calls (exp);
7232 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7233 subtarget = 0;
7234 /* Possible optimization: compute the dividend with EXPAND_SUM
7235 then if the divisor is constant can optimize the case
7236 where some terms of the dividend have coeffs divisible by it. */
7237 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7238 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7239 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7241 case RDIV_EXPR:
7242 this_optab = flodiv_optab;
7243 goto binop;
7245 case TRUNC_MOD_EXPR:
7246 case FLOOR_MOD_EXPR:
7247 case CEIL_MOD_EXPR:
7248 case ROUND_MOD_EXPR:
7249 preexpand_calls (exp);
7250 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7251 subtarget = 0;
7252 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7253 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7254 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7256 case FIX_ROUND_EXPR:
7257 case FIX_FLOOR_EXPR:
7258 case FIX_CEIL_EXPR:
7259 abort (); /* Not used for C. */
7261 case FIX_TRUNC_EXPR:
7262 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7263 if (target == 0)
7264 target = gen_reg_rtx (mode);
7265 expand_fix (target, op0, unsignedp);
7266 return target;
7268 case FLOAT_EXPR:
7269 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7270 if (target == 0)
7271 target = gen_reg_rtx (mode);
7272 /* expand_float can't figure out what to do if FROM has VOIDmode.
7273 So give it the correct mode. With -O, cse will optimize this. */
7274 if (GET_MODE (op0) == VOIDmode)
7275 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7276 op0);
7277 expand_float (target, op0,
7278 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7279 return target;
7281 case NEGATE_EXPR:
7282 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7283 temp = expand_unop (mode, neg_optab, op0, target, 0);
7284 if (temp == 0)
7285 abort ();
7286 return temp;
7288 case ABS_EXPR:
7289 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7291 /* Handle complex values specially. */
7292 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7293 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7294 return expand_complex_abs (mode, op0, target, unsignedp);
7296 /* Unsigned abs is simply the operand. Testing here means we don't
7297 risk generating incorrect code below. */
7298 if (TREE_UNSIGNED (type))
7299 return op0;
7301 return expand_abs (mode, op0, target,
7302 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7304 case MAX_EXPR:
7305 case MIN_EXPR:
7306 target = original_target;
7307 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7308 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7309 || GET_MODE (target) != mode
7310 || (GET_CODE (target) == REG
7311 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7312 target = gen_reg_rtx (mode);
7313 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7314 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7316 /* First try to do it with a special MIN or MAX instruction.
7317 If that does not win, use a conditional jump to select the proper
7318 value. */
7319 this_optab = (TREE_UNSIGNED (type)
7320 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7321 : (code == MIN_EXPR ? smin_optab : smax_optab));
7323 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7324 OPTAB_WIDEN);
7325 if (temp != 0)
7326 return temp;
7328 /* At this point, a MEM target is no longer useful; we will get better
7329 code without it. */
7331 if (GET_CODE (target) == MEM)
7332 target = gen_reg_rtx (mode);
7334 if (target != op0)
7335 emit_move_insn (target, op0);
7337 op0 = gen_label_rtx ();
7339 /* If this mode is an integer too wide to compare properly,
7340 compare word by word. Rely on cse to optimize constant cases. */
7341 if (GET_MODE_CLASS (mode) == MODE_INT && ! can_compare_p (mode))
7343 if (code == MAX_EXPR)
7344 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7345 target, op1, NULL_RTX, op0);
7346 else
7347 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7348 op1, target, NULL_RTX, op0);
7350 else
7352 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7353 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7354 unsignedp, mode, NULL_RTX, 0, NULL_RTX,
7355 op0);
7357 emit_move_insn (target, op1);
7358 emit_label (op0);
7359 return target;
7361 case BIT_NOT_EXPR:
7362 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7363 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7364 if (temp == 0)
7365 abort ();
7366 return temp;
7368 case FFS_EXPR:
7369 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7370 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7371 if (temp == 0)
7372 abort ();
7373 return temp;
7375 /* ??? Can optimize bitwise operations with one arg constant.
7376 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7377 and (a bitwise1 b) bitwise2 b (etc)
7378 but that is probably not worth while. */
7380 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7381 boolean values when we want in all cases to compute both of them. In
7382 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7383 as actual zero-or-1 values and then bitwise anding. In cases where
7384 there cannot be any side effects, better code would be made by
7385 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7386 how to recognize those cases. */
7388 case TRUTH_AND_EXPR:
7389 case BIT_AND_EXPR:
7390 this_optab = and_optab;
7391 goto binop;
7393 case TRUTH_OR_EXPR:
7394 case BIT_IOR_EXPR:
7395 this_optab = ior_optab;
7396 goto binop;
7398 case TRUTH_XOR_EXPR:
7399 case BIT_XOR_EXPR:
7400 this_optab = xor_optab;
7401 goto binop;
7403 case LSHIFT_EXPR:
7404 case RSHIFT_EXPR:
7405 case LROTATE_EXPR:
7406 case RROTATE_EXPR:
7407 preexpand_calls (exp);
7408 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7409 subtarget = 0;
7410 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7411 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7412 unsignedp);
7414 /* Could determine the answer when only additive constants differ. Also,
7415 the addition of one can be handled by changing the condition. */
7416 case LT_EXPR:
7417 case LE_EXPR:
7418 case GT_EXPR:
7419 case GE_EXPR:
7420 case EQ_EXPR:
7421 case NE_EXPR:
7422 preexpand_calls (exp);
7423 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7424 if (temp != 0)
7425 return temp;
7427 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7428 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7429 && original_target
7430 && GET_CODE (original_target) == REG
7431 && (GET_MODE (original_target)
7432 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7434 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7435 VOIDmode, 0);
7437 if (temp != original_target)
7438 temp = copy_to_reg (temp);
7440 op1 = gen_label_rtx ();
7441 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7442 GET_MODE (temp), unsignedp, 0, op1);
7443 emit_move_insn (temp, const1_rtx);
7444 emit_label (op1);
7445 return temp;
7448 /* If no set-flag instruction, must generate a conditional
7449 store into a temporary variable. Drop through
7450 and handle this like && and ||. */
7452 case TRUTH_ANDIF_EXPR:
7453 case TRUTH_ORIF_EXPR:
7454 if (! ignore
7455 && (target == 0 || ! safe_from_p (target, exp, 1)
7456 /* Make sure we don't have a hard reg (such as function's return
7457 value) live across basic blocks, if not optimizing. */
7458 || (!optimize && GET_CODE (target) == REG
7459 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7460 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7462 if (target)
7463 emit_clr_insn (target);
7465 op1 = gen_label_rtx ();
7466 jumpifnot (exp, op1);
7468 if (target)
7469 emit_0_to_1_insn (target);
7471 emit_label (op1);
7472 return ignore ? const0_rtx : target;
7474 case TRUTH_NOT_EXPR:
7475 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7476 /* The parser is careful to generate TRUTH_NOT_EXPR
7477 only with operands that are always zero or one. */
7478 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7479 target, 1, OPTAB_LIB_WIDEN);
7480 if (temp == 0)
7481 abort ();
7482 return temp;
7484 case COMPOUND_EXPR:
7485 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7486 emit_queue ();
7487 return expand_expr (TREE_OPERAND (exp, 1),
7488 (ignore ? const0_rtx : target),
7489 VOIDmode, 0);
7491 case COND_EXPR:
7492 /* If we would have a "singleton" (see below) were it not for a
7493 conversion in each arm, bring that conversion back out. */
7494 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7495 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
7496 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
7497 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
7499 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
7500 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
7502 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
7503 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7504 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
7505 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
7506 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
7507 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7508 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
7509 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
7510 return expand_expr (build1 (NOP_EXPR, type,
7511 build (COND_EXPR, TREE_TYPE (true),
7512 TREE_OPERAND (exp, 0),
7513 true, false)),
7514 target, tmode, modifier);
7518 /* Note that COND_EXPRs whose type is a structure or union
7519 are required to be constructed to contain assignments of
7520 a temporary variable, so that we can evaluate them here
7521 for side effect only. If type is void, we must do likewise. */
7523 /* If an arm of the branch requires a cleanup,
7524 only that cleanup is performed. */
7526 tree singleton = 0;
7527 tree binary_op = 0, unary_op = 0;
7529 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
7530 convert it to our mode, if necessary. */
7531 if (integer_onep (TREE_OPERAND (exp, 1))
7532 && integer_zerop (TREE_OPERAND (exp, 2))
7533 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7535 if (ignore)
7537 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
7538 ro_modifier);
7539 return const0_rtx;
7542 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
7543 if (GET_MODE (op0) == mode)
7544 return op0;
7546 if (target == 0)
7547 target = gen_reg_rtx (mode);
7548 convert_move (target, op0, unsignedp);
7549 return target;
7552 /* Check for X ? A + B : A. If we have this, we can copy A to the
7553 output and conditionally add B. Similarly for unary operations.
7554 Don't do this if X has side-effects because those side effects
7555 might affect A or B and the "?" operation is a sequence point in
7556 ANSI. (operand_equal_p tests for side effects.) */
7558 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
7559 && operand_equal_p (TREE_OPERAND (exp, 2),
7560 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7561 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
7562 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
7563 && operand_equal_p (TREE_OPERAND (exp, 1),
7564 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7565 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
7566 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
7567 && operand_equal_p (TREE_OPERAND (exp, 2),
7568 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7569 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
7570 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
7571 && operand_equal_p (TREE_OPERAND (exp, 1),
7572 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7573 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
7575 /* If we are not to produce a result, we have no target. Otherwise,
7576 if a target was specified use it; it will not be used as an
7577 intermediate target unless it is safe. If no target, use a
7578 temporary. */
7580 if (ignore)
7581 temp = 0;
7582 else if (original_target
7583 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
7584 || (singleton && GET_CODE (original_target) == REG
7585 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
7586 && original_target == var_rtx (singleton)))
7587 && GET_MODE (original_target) == mode
7588 #ifdef HAVE_conditional_move
7589 && (! can_conditionally_move_p (mode)
7590 || GET_CODE (original_target) == REG
7591 || TREE_ADDRESSABLE (type))
7592 #endif
7593 && ! (GET_CODE (original_target) == MEM
7594 && MEM_VOLATILE_P (original_target)))
7595 temp = original_target;
7596 else if (TREE_ADDRESSABLE (type))
7597 abort ();
7598 else
7599 temp = assign_temp (type, 0, 0, 1);
7601 /* If we had X ? A + C : A, with C a constant power of 2, and we can
7602 do the test of X as a store-flag operation, do this as
7603 A + ((X != 0) << log C). Similarly for other simple binary
7604 operators. Only do for C == 1 if BRANCH_COST is low. */
7605 if (temp && singleton && binary_op
7606 && (TREE_CODE (binary_op) == PLUS_EXPR
7607 || TREE_CODE (binary_op) == MINUS_EXPR
7608 || TREE_CODE (binary_op) == BIT_IOR_EXPR
7609 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
7610 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
7611 : integer_onep (TREE_OPERAND (binary_op, 1)))
7612 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7614 rtx result;
7615 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
7616 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
7617 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
7618 : xor_optab);
7620 /* If we had X ? A : A + 1, do this as A + (X == 0).
7622 We have to invert the truth value here and then put it
7623 back later if do_store_flag fails. We cannot simply copy
7624 TREE_OPERAND (exp, 0) to another variable and modify that
7625 because invert_truthvalue can modify the tree pointed to
7626 by its argument. */
7627 if (singleton == TREE_OPERAND (exp, 1))
7628 TREE_OPERAND (exp, 0)
7629 = invert_truthvalue (TREE_OPERAND (exp, 0));
7631 result = do_store_flag (TREE_OPERAND (exp, 0),
7632 (safe_from_p (temp, singleton, 1)
7633 ? temp : NULL_RTX),
7634 mode, BRANCH_COST <= 1);
7636 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
7637 result = expand_shift (LSHIFT_EXPR, mode, result,
7638 build_int_2 (tree_log2
7639 (TREE_OPERAND
7640 (binary_op, 1)),
7642 (safe_from_p (temp, singleton, 1)
7643 ? temp : NULL_RTX), 0);
7645 if (result)
7647 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
7648 return expand_binop (mode, boptab, op1, result, temp,
7649 unsignedp, OPTAB_LIB_WIDEN);
7651 else if (singleton == TREE_OPERAND (exp, 1))
7652 TREE_OPERAND (exp, 0)
7653 = invert_truthvalue (TREE_OPERAND (exp, 0));
7656 do_pending_stack_adjust ();
7657 NO_DEFER_POP;
7658 op0 = gen_label_rtx ();
7660 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
7662 if (temp != 0)
7664 /* If the target conflicts with the other operand of the
7665 binary op, we can't use it. Also, we can't use the target
7666 if it is a hard register, because evaluating the condition
7667 might clobber it. */
7668 if ((binary_op
7669 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
7670 || (GET_CODE (temp) == REG
7671 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
7672 temp = gen_reg_rtx (mode);
7673 store_expr (singleton, temp, 0);
7675 else
7676 expand_expr (singleton,
7677 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7678 if (singleton == TREE_OPERAND (exp, 1))
7679 jumpif (TREE_OPERAND (exp, 0), op0);
7680 else
7681 jumpifnot (TREE_OPERAND (exp, 0), op0);
7683 start_cleanup_deferral ();
7684 if (binary_op && temp == 0)
7685 /* Just touch the other operand. */
7686 expand_expr (TREE_OPERAND (binary_op, 1),
7687 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7688 else if (binary_op)
7689 store_expr (build (TREE_CODE (binary_op), type,
7690 make_tree (type, temp),
7691 TREE_OPERAND (binary_op, 1)),
7692 temp, 0);
7693 else
7694 store_expr (build1 (TREE_CODE (unary_op), type,
7695 make_tree (type, temp)),
7696 temp, 0);
7697 op1 = op0;
7699 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
7700 comparison operator. If we have one of these cases, set the
7701 output to A, branch on A (cse will merge these two references),
7702 then set the output to FOO. */
7703 else if (temp
7704 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7705 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7706 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7707 TREE_OPERAND (exp, 1), 0)
7708 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7709 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
7710 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
7712 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7713 temp = gen_reg_rtx (mode);
7714 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7715 jumpif (TREE_OPERAND (exp, 0), op0);
7717 start_cleanup_deferral ();
7718 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7719 op1 = op0;
7721 else if (temp
7722 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7723 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7724 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7725 TREE_OPERAND (exp, 2), 0)
7726 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7727 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
7728 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
7730 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7731 temp = gen_reg_rtx (mode);
7732 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7733 jumpifnot (TREE_OPERAND (exp, 0), op0);
7735 start_cleanup_deferral ();
7736 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7737 op1 = op0;
7739 else
7741 op1 = gen_label_rtx ();
7742 jumpifnot (TREE_OPERAND (exp, 0), op0);
7744 start_cleanup_deferral ();
7746 /* One branch of the cond can be void, if it never returns. For
7747 example A ? throw : E */
7748 if (temp != 0
7749 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
7750 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7751 else
7752 expand_expr (TREE_OPERAND (exp, 1),
7753 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7754 end_cleanup_deferral ();
7755 emit_queue ();
7756 emit_jump_insn (gen_jump (op1));
7757 emit_barrier ();
7758 emit_label (op0);
7759 start_cleanup_deferral ();
7760 if (temp != 0
7761 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
7762 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7763 else
7764 expand_expr (TREE_OPERAND (exp, 2),
7765 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7768 end_cleanup_deferral ();
7770 emit_queue ();
7771 emit_label (op1);
7772 OK_DEFER_POP;
7774 return temp;
7777 case TARGET_EXPR:
7779 /* Something needs to be initialized, but we didn't know
7780 where that thing was when building the tree. For example,
7781 it could be the return value of a function, or a parameter
7782 to a function which lays down in the stack, or a temporary
7783 variable which must be passed by reference.
7785 We guarantee that the expression will either be constructed
7786 or copied into our original target. */
7788 tree slot = TREE_OPERAND (exp, 0);
7789 tree cleanups = NULL_TREE;
7790 tree exp1;
7792 if (TREE_CODE (slot) != VAR_DECL)
7793 abort ();
7795 if (! ignore)
7796 target = original_target;
7798 if (target == 0)
7800 if (DECL_RTL (slot) != 0)
7802 target = DECL_RTL (slot);
7803 /* If we have already expanded the slot, so don't do
7804 it again. (mrs) */
7805 if (TREE_OPERAND (exp, 1) == NULL_TREE)
7806 return target;
7808 else
7810 target = assign_temp (type, 2, 0, 1);
7811 /* All temp slots at this level must not conflict. */
7812 preserve_temp_slots (target);
7813 DECL_RTL (slot) = target;
7814 if (TREE_ADDRESSABLE (slot))
7816 TREE_ADDRESSABLE (slot) = 0;
7817 mark_addressable (slot);
7820 /* Since SLOT is not known to the called function
7821 to belong to its stack frame, we must build an explicit
7822 cleanup. This case occurs when we must build up a reference
7823 to pass the reference as an argument. In this case,
7824 it is very likely that such a reference need not be
7825 built here. */
7827 if (TREE_OPERAND (exp, 2) == 0)
7828 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
7829 cleanups = TREE_OPERAND (exp, 2);
7832 else
7834 /* This case does occur, when expanding a parameter which
7835 needs to be constructed on the stack. The target
7836 is the actual stack address that we want to initialize.
7837 The function we call will perform the cleanup in this case. */
7839 /* If we have already assigned it space, use that space,
7840 not target that we were passed in, as our target
7841 parameter is only a hint. */
7842 if (DECL_RTL (slot) != 0)
7844 target = DECL_RTL (slot);
7845 /* If we have already expanded the slot, so don't do
7846 it again. (mrs) */
7847 if (TREE_OPERAND (exp, 1) == NULL_TREE)
7848 return target;
7850 else
7852 DECL_RTL (slot) = target;
7853 /* If we must have an addressable slot, then make sure that
7854 the RTL that we just stored in slot is OK. */
7855 if (TREE_ADDRESSABLE (slot))
7857 TREE_ADDRESSABLE (slot) = 0;
7858 mark_addressable (slot);
7863 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
7864 /* Mark it as expanded. */
7865 TREE_OPERAND (exp, 1) = NULL_TREE;
7867 TREE_USED (slot) = 1;
7868 store_expr (exp1, target, 0);
7870 expand_decl_cleanup (NULL_TREE, cleanups);
7872 return target;
7875 case INIT_EXPR:
7877 tree lhs = TREE_OPERAND (exp, 0);
7878 tree rhs = TREE_OPERAND (exp, 1);
7879 tree noncopied_parts = 0;
7880 tree lhs_type = TREE_TYPE (lhs);
7882 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7883 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
7884 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
7885 TYPE_NONCOPIED_PARTS (lhs_type));
7886 while (noncopied_parts != 0)
7888 expand_assignment (TREE_VALUE (noncopied_parts),
7889 TREE_PURPOSE (noncopied_parts), 0, 0);
7890 noncopied_parts = TREE_CHAIN (noncopied_parts);
7892 return temp;
7895 case MODIFY_EXPR:
7897 /* If lhs is complex, expand calls in rhs before computing it.
7898 That's so we don't compute a pointer and save it over a call.
7899 If lhs is simple, compute it first so we can give it as a
7900 target if the rhs is just a call. This avoids an extra temp and copy
7901 and that prevents a partial-subsumption which makes bad code.
7902 Actually we could treat component_ref's of vars like vars. */
7904 tree lhs = TREE_OPERAND (exp, 0);
7905 tree rhs = TREE_OPERAND (exp, 1);
7906 tree noncopied_parts = 0;
7907 tree lhs_type = TREE_TYPE (lhs);
7909 temp = 0;
7911 if (TREE_CODE (lhs) != VAR_DECL
7912 && TREE_CODE (lhs) != RESULT_DECL
7913 && TREE_CODE (lhs) != PARM_DECL
7914 && ! (TREE_CODE (lhs) == INDIRECT_REF
7915 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
7916 preexpand_calls (exp);
7918 /* Check for |= or &= of a bitfield of size one into another bitfield
7919 of size 1. In this case, (unless we need the result of the
7920 assignment) we can do this more efficiently with a
7921 test followed by an assignment, if necessary.
7923 ??? At this point, we can't get a BIT_FIELD_REF here. But if
7924 things change so we do, this code should be enhanced to
7925 support it. */
7926 if (ignore
7927 && TREE_CODE (lhs) == COMPONENT_REF
7928 && (TREE_CODE (rhs) == BIT_IOR_EXPR
7929 || TREE_CODE (rhs) == BIT_AND_EXPR)
7930 && TREE_OPERAND (rhs, 0) == lhs
7931 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
7932 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
7933 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
7935 rtx label = gen_label_rtx ();
7937 do_jump (TREE_OPERAND (rhs, 1),
7938 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
7939 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
7940 expand_assignment (lhs, convert (TREE_TYPE (rhs),
7941 (TREE_CODE (rhs) == BIT_IOR_EXPR
7942 ? integer_one_node
7943 : integer_zero_node)),
7944 0, 0);
7945 do_pending_stack_adjust ();
7946 emit_label (label);
7947 return const0_rtx;
7950 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
7951 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
7952 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
7953 TYPE_NONCOPIED_PARTS (lhs_type));
7955 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7956 while (noncopied_parts != 0)
7958 expand_assignment (TREE_PURPOSE (noncopied_parts),
7959 TREE_VALUE (noncopied_parts), 0, 0);
7960 noncopied_parts = TREE_CHAIN (noncopied_parts);
7962 return temp;
7965 case RETURN_EXPR:
7966 if (!TREE_OPERAND (exp, 0))
7967 expand_null_return ();
7968 else
7969 expand_return (TREE_OPERAND (exp, 0));
7970 return const0_rtx;
7972 case PREINCREMENT_EXPR:
7973 case PREDECREMENT_EXPR:
7974 return expand_increment (exp, 0, ignore);
7976 case POSTINCREMENT_EXPR:
7977 case POSTDECREMENT_EXPR:
7978 /* Faster to treat as pre-increment if result is not used. */
7979 return expand_increment (exp, ! ignore, ignore);
7981 case ADDR_EXPR:
7982 /* If nonzero, TEMP will be set to the address of something that might
7983 be a MEM corresponding to a stack slot. */
7984 temp = 0;
7986 /* Are we taking the address of a nested function? */
7987 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
7988 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
7989 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
7990 && ! TREE_STATIC (exp))
7992 op0 = trampoline_address (TREE_OPERAND (exp, 0));
7993 op0 = force_operand (op0, target);
7995 /* If we are taking the address of something erroneous, just
7996 return a zero. */
7997 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
7998 return const0_rtx;
7999 else
8001 /* We make sure to pass const0_rtx down if we came in with
8002 ignore set, to avoid doing the cleanups twice for something. */
8003 op0 = expand_expr (TREE_OPERAND (exp, 0),
8004 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8005 (modifier == EXPAND_INITIALIZER
8006 ? modifier : EXPAND_CONST_ADDRESS));
8008 /* If we are going to ignore the result, OP0 will have been set
8009 to const0_rtx, so just return it. Don't get confused and
8010 think we are taking the address of the constant. */
8011 if (ignore)
8012 return op0;
8014 op0 = protect_from_queue (op0, 0);
8016 /* We would like the object in memory. If it is a constant, we can
8017 have it be statically allocated into memory. For a non-constant,
8018 we need to allocate some memory and store the value into it. */
8020 if (CONSTANT_P (op0))
8021 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8022 op0);
8023 else if (GET_CODE (op0) == MEM)
8025 mark_temp_addr_taken (op0);
8026 temp = XEXP (op0, 0);
8029 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8030 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8032 /* If this object is in a register, it must be not
8033 be BLKmode. */
8034 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8035 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8037 mark_temp_addr_taken (memloc);
8038 emit_move_insn (memloc, op0);
8039 op0 = memloc;
8042 if (GET_CODE (op0) != MEM)
8043 abort ();
8045 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8047 temp = XEXP (op0, 0);
8048 #ifdef POINTERS_EXTEND_UNSIGNED
8049 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8050 && mode == ptr_mode)
8051 temp = convert_memory_address (ptr_mode, temp);
8052 #endif
8053 return temp;
8056 op0 = force_operand (XEXP (op0, 0), target);
8059 if (flag_force_addr && GET_CODE (op0) != REG)
8060 op0 = force_reg (Pmode, op0);
8062 if (GET_CODE (op0) == REG
8063 && ! REG_USERVAR_P (op0))
8064 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)) / BITS_PER_UNIT);
8066 /* If we might have had a temp slot, add an equivalent address
8067 for it. */
8068 if (temp != 0)
8069 update_temp_slot_address (temp, op0);
8071 #ifdef POINTERS_EXTEND_UNSIGNED
8072 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8073 && mode == ptr_mode)
8074 op0 = convert_memory_address (ptr_mode, op0);
8075 #endif
8077 return op0;
8079 case ENTRY_VALUE_EXPR:
8080 abort ();
8082 /* COMPLEX type for Extended Pascal & Fortran */
8083 case COMPLEX_EXPR:
8085 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8086 rtx insns;
8088 /* Get the rtx code of the operands. */
8089 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8090 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8092 if (! target)
8093 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8095 start_sequence ();
8097 /* Move the real (op0) and imaginary (op1) parts to their location. */
8098 emit_move_insn (gen_realpart (mode, target), op0);
8099 emit_move_insn (gen_imagpart (mode, target), op1);
8101 insns = get_insns ();
8102 end_sequence ();
8104 /* Complex construction should appear as a single unit. */
8105 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8106 each with a separate pseudo as destination.
8107 It's not correct for flow to treat them as a unit. */
8108 if (GET_CODE (target) != CONCAT)
8109 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8110 else
8111 emit_insns (insns);
8113 return target;
8116 case REALPART_EXPR:
8117 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8118 return gen_realpart (mode, op0);
8120 case IMAGPART_EXPR:
8121 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8122 return gen_imagpart (mode, op0);
8124 case CONJ_EXPR:
8126 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8127 rtx imag_t;
8128 rtx insns;
8130 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8132 if (! target)
8133 target = gen_reg_rtx (mode);
8135 start_sequence ();
8137 /* Store the realpart and the negated imagpart to target. */
8138 emit_move_insn (gen_realpart (partmode, target),
8139 gen_realpart (partmode, op0));
8141 imag_t = gen_imagpart (partmode, target);
8142 temp = expand_unop (partmode, neg_optab,
8143 gen_imagpart (partmode, op0), imag_t, 0);
8144 if (temp != imag_t)
8145 emit_move_insn (imag_t, temp);
8147 insns = get_insns ();
8148 end_sequence ();
8150 /* Conjugate should appear as a single unit
8151 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8152 each with a separate pseudo as destination.
8153 It's not correct for flow to treat them as a unit. */
8154 if (GET_CODE (target) != CONCAT)
8155 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8156 else
8157 emit_insns (insns);
8159 return target;
8162 case TRY_CATCH_EXPR:
8164 tree handler = TREE_OPERAND (exp, 1);
8166 expand_eh_region_start ();
8168 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8170 expand_eh_region_end (handler);
8172 return op0;
8175 case TRY_FINALLY_EXPR:
8177 tree try_block = TREE_OPERAND (exp, 0);
8178 tree finally_block = TREE_OPERAND (exp, 1);
8179 rtx finally_label = gen_label_rtx ();
8180 rtx done_label = gen_label_rtx ();
8181 rtx return_link = gen_reg_rtx (Pmode);
8182 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8183 (tree) finally_label, (tree) return_link);
8184 TREE_SIDE_EFFECTS (cleanup) = 1;
8186 /* Start a new binding layer that will keep track of all cleanup
8187 actions to be performed. */
8188 expand_start_bindings (2);
8190 target_temp_slot_level = temp_slot_level;
8192 expand_decl_cleanup (NULL_TREE, cleanup);
8193 op0 = expand_expr (try_block, target, tmode, modifier);
8195 preserve_temp_slots (op0);
8196 expand_end_bindings (NULL_TREE, 0, 0);
8197 emit_jump (done_label);
8198 emit_label (finally_label);
8199 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8200 emit_indirect_jump (return_link);
8201 emit_label (done_label);
8202 return op0;
8205 case GOTO_SUBROUTINE_EXPR:
8207 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8208 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8209 rtx return_address = gen_label_rtx ();
8210 emit_move_insn (return_link, gen_rtx_LABEL_REF (Pmode, return_address));
8211 emit_jump (subr);
8212 emit_label (return_address);
8213 return const0_rtx;
8216 case POPDCC_EXPR:
8218 rtx dcc = get_dynamic_cleanup_chain ();
8219 emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc)));
8220 return const0_rtx;
8223 case POPDHC_EXPR:
8225 rtx dhc = get_dynamic_handler_chain ();
8226 emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc)));
8227 return const0_rtx;
8230 case VA_ARG_EXPR:
8231 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8233 default:
8234 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8237 /* Here to do an ordinary binary operator, generating an instruction
8238 from the optab already placed in `this_optab'. */
8239 binop:
8240 preexpand_calls (exp);
8241 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8242 subtarget = 0;
8243 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8244 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8245 binop2:
8246 temp = expand_binop (mode, this_optab, op0, op1, target,
8247 unsignedp, OPTAB_LIB_WIDEN);
8248 if (temp == 0)
8249 abort ();
8250 return temp;
8253 /* Return the tree node and offset if a given argument corresponds to
8254 a string constant. */
8256 tree
8257 string_constant (arg, ptr_offset)
8258 tree arg;
8259 tree *ptr_offset;
8261 STRIP_NOPS (arg);
8263 if (TREE_CODE (arg) == ADDR_EXPR
8264 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8266 *ptr_offset = integer_zero_node;
8267 return TREE_OPERAND (arg, 0);
8269 else if (TREE_CODE (arg) == PLUS_EXPR)
8271 tree arg0 = TREE_OPERAND (arg, 0);
8272 tree arg1 = TREE_OPERAND (arg, 1);
8274 STRIP_NOPS (arg0);
8275 STRIP_NOPS (arg1);
8277 if (TREE_CODE (arg0) == ADDR_EXPR
8278 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
8280 *ptr_offset = arg1;
8281 return TREE_OPERAND (arg0, 0);
8283 else if (TREE_CODE (arg1) == ADDR_EXPR
8284 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
8286 *ptr_offset = arg0;
8287 return TREE_OPERAND (arg1, 0);
8291 return 0;
8294 /* Expand code for a post- or pre- increment or decrement
8295 and return the RTX for the result.
8296 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
8298 static rtx
8299 expand_increment (exp, post, ignore)
8300 register tree exp;
8301 int post, ignore;
8303 register rtx op0, op1;
8304 register rtx temp, value;
8305 register tree incremented = TREE_OPERAND (exp, 0);
8306 optab this_optab = add_optab;
8307 int icode;
8308 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
8309 int op0_is_copy = 0;
8310 int single_insn = 0;
8311 /* 1 means we can't store into OP0 directly,
8312 because it is a subreg narrower than a word,
8313 and we don't dare clobber the rest of the word. */
8314 int bad_subreg = 0;
8316 /* Stabilize any component ref that might need to be
8317 evaluated more than once below. */
8318 if (!post
8319 || TREE_CODE (incremented) == BIT_FIELD_REF
8320 || (TREE_CODE (incremented) == COMPONENT_REF
8321 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
8322 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
8323 incremented = stabilize_reference (incremented);
8324 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
8325 ones into save exprs so that they don't accidentally get evaluated
8326 more than once by the code below. */
8327 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
8328 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
8329 incremented = save_expr (incremented);
8331 /* Compute the operands as RTX.
8332 Note whether OP0 is the actual lvalue or a copy of it:
8333 I believe it is a copy iff it is a register or subreg
8334 and insns were generated in computing it. */
8336 temp = get_last_insn ();
8337 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
8339 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
8340 in place but instead must do sign- or zero-extension during assignment,
8341 so we copy it into a new register and let the code below use it as
8342 a copy.
8344 Note that we can safely modify this SUBREG since it is know not to be
8345 shared (it was made by the expand_expr call above). */
8347 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
8349 if (post)
8350 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
8351 else
8352 bad_subreg = 1;
8354 else if (GET_CODE (op0) == SUBREG
8355 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
8357 /* We cannot increment this SUBREG in place. If we are
8358 post-incrementing, get a copy of the old value. Otherwise,
8359 just mark that we cannot increment in place. */
8360 if (post)
8361 op0 = copy_to_reg (op0);
8362 else
8363 bad_subreg = 1;
8366 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
8367 && temp != get_last_insn ());
8368 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
8369 EXPAND_MEMORY_USE_BAD);
8371 /* Decide whether incrementing or decrementing. */
8372 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
8373 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8374 this_optab = sub_optab;
8376 /* Convert decrement by a constant into a negative increment. */
8377 if (this_optab == sub_optab
8378 && GET_CODE (op1) == CONST_INT)
8380 op1 = GEN_INT (- INTVAL (op1));
8381 this_optab = add_optab;
8384 /* For a preincrement, see if we can do this with a single instruction. */
8385 if (!post)
8387 icode = (int) this_optab->handlers[(int) mode].insn_code;
8388 if (icode != (int) CODE_FOR_nothing
8389 /* Make sure that OP0 is valid for operands 0 and 1
8390 of the insn we want to queue. */
8391 && (*insn_data[icode].operand[0].predicate) (op0, mode)
8392 && (*insn_data[icode].operand[1].predicate) (op0, mode)
8393 && (*insn_data[icode].operand[2].predicate) (op1, mode))
8394 single_insn = 1;
8397 /* If OP0 is not the actual lvalue, but rather a copy in a register,
8398 then we cannot just increment OP0. We must therefore contrive to
8399 increment the original value. Then, for postincrement, we can return
8400 OP0 since it is a copy of the old value. For preincrement, expand here
8401 unless we can do it with a single insn.
8403 Likewise if storing directly into OP0 would clobber high bits
8404 we need to preserve (bad_subreg). */
8405 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
8407 /* This is the easiest way to increment the value wherever it is.
8408 Problems with multiple evaluation of INCREMENTED are prevented
8409 because either (1) it is a component_ref or preincrement,
8410 in which case it was stabilized above, or (2) it is an array_ref
8411 with constant index in an array in a register, which is
8412 safe to reevaluate. */
8413 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
8414 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8415 ? MINUS_EXPR : PLUS_EXPR),
8416 TREE_TYPE (exp),
8417 incremented,
8418 TREE_OPERAND (exp, 1));
8420 while (TREE_CODE (incremented) == NOP_EXPR
8421 || TREE_CODE (incremented) == CONVERT_EXPR)
8423 newexp = convert (TREE_TYPE (incremented), newexp);
8424 incremented = TREE_OPERAND (incremented, 0);
8427 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
8428 return post ? op0 : temp;
8431 if (post)
8433 /* We have a true reference to the value in OP0.
8434 If there is an insn to add or subtract in this mode, queue it.
8435 Queueing the increment insn avoids the register shuffling
8436 that often results if we must increment now and first save
8437 the old value for subsequent use. */
8439 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
8440 op0 = stabilize (op0);
8441 #endif
8443 icode = (int) this_optab->handlers[(int) mode].insn_code;
8444 if (icode != (int) CODE_FOR_nothing
8445 /* Make sure that OP0 is valid for operands 0 and 1
8446 of the insn we want to queue. */
8447 && (*insn_data[icode].operand[0].predicate) (op0, mode)
8448 && (*insn_data[icode].operand[1].predicate) (op0, mode))
8450 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
8451 op1 = force_reg (mode, op1);
8453 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
8455 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
8457 rtx addr = (general_operand (XEXP (op0, 0), mode)
8458 ? force_reg (Pmode, XEXP (op0, 0))
8459 : copy_to_reg (XEXP (op0, 0)));
8460 rtx temp, result;
8462 op0 = change_address (op0, VOIDmode, addr);
8463 temp = force_reg (GET_MODE (op0), op0);
8464 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
8465 op1 = force_reg (mode, op1);
8467 /* The increment queue is LIFO, thus we have to `queue'
8468 the instructions in reverse order. */
8469 enqueue_insn (op0, gen_move_insn (op0, temp));
8470 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
8471 return result;
8475 /* Preincrement, or we can't increment with one simple insn. */
8476 if (post)
8477 /* Save a copy of the value before inc or dec, to return it later. */
8478 temp = value = copy_to_reg (op0);
8479 else
8480 /* Arrange to return the incremented value. */
8481 /* Copy the rtx because expand_binop will protect from the queue,
8482 and the results of that would be invalid for us to return
8483 if our caller does emit_queue before using our result. */
8484 temp = copy_rtx (value = op0);
8486 /* Increment however we can. */
8487 op1 = expand_binop (mode, this_optab, value, op1,
8488 current_function_check_memory_usage ? NULL_RTX : op0,
8489 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
8490 /* Make sure the value is stored into OP0. */
8491 if (op1 != op0)
8492 emit_move_insn (op0, op1);
8494 return temp;
8497 /* Expand all function calls contained within EXP, innermost ones first.
8498 But don't look within expressions that have sequence points.
8499 For each CALL_EXPR, record the rtx for its value
8500 in the CALL_EXPR_RTL field. */
8502 static void
8503 preexpand_calls (exp)
8504 tree exp;
8506 register int nops, i;
8507 int type = TREE_CODE_CLASS (TREE_CODE (exp));
8509 if (! do_preexpand_calls)
8510 return;
8512 /* Only expressions and references can contain calls. */
8514 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
8515 return;
8517 switch (TREE_CODE (exp))
8519 case CALL_EXPR:
8520 /* Do nothing if already expanded. */
8521 if (CALL_EXPR_RTL (exp) != 0
8522 /* Do nothing if the call returns a variable-sized object. */
8523 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
8524 /* Do nothing to built-in functions. */
8525 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
8526 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
8527 == FUNCTION_DECL)
8528 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
8529 return;
8531 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
8532 return;
8534 case COMPOUND_EXPR:
8535 case COND_EXPR:
8536 case TRUTH_ANDIF_EXPR:
8537 case TRUTH_ORIF_EXPR:
8538 /* If we find one of these, then we can be sure
8539 the adjust will be done for it (since it makes jumps).
8540 Do it now, so that if this is inside an argument
8541 of a function, we don't get the stack adjustment
8542 after some other args have already been pushed. */
8543 do_pending_stack_adjust ();
8544 return;
8546 case BLOCK:
8547 case RTL_EXPR:
8548 case WITH_CLEANUP_EXPR:
8549 case CLEANUP_POINT_EXPR:
8550 case TRY_CATCH_EXPR:
8551 return;
8553 case SAVE_EXPR:
8554 if (SAVE_EXPR_RTL (exp) != 0)
8555 return;
8557 default:
8558 break;
8561 nops = tree_code_length[(int) TREE_CODE (exp)];
8562 for (i = 0; i < nops; i++)
8563 if (TREE_OPERAND (exp, i) != 0)
8565 if (TREE_CODE (exp) == TARGET_EXPR && i == 2)
8566 /* We don't need to preexpand the cleanup for a TARGET_EXPR.
8567 It doesn't happen before the call is made. */
8569 else
8571 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
8572 if (type == 'e' || type == '<' || type == '1' || type == '2'
8573 || type == 'r')
8574 preexpand_calls (TREE_OPERAND (exp, i));
8579 /* At the start of a function, record that we have no previously-pushed
8580 arguments waiting to be popped. */
8582 void
8583 init_pending_stack_adjust ()
8585 pending_stack_adjust = 0;
8588 /* When exiting from function, if safe, clear out any pending stack adjust
8589 so the adjustment won't get done.
8591 Note, if the current function calls alloca, then it must have a
8592 frame pointer regardless of the value of flag_omit_frame_pointer. */
8594 void
8595 clear_pending_stack_adjust ()
8597 #ifdef EXIT_IGNORE_STACK
8598 if (optimize > 0
8599 && (! flag_omit_frame_pointer || current_function_calls_alloca)
8600 && EXIT_IGNORE_STACK
8601 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
8602 && ! flag_inline_functions)
8603 pending_stack_adjust = 0;
8604 #endif
8607 /* Pop any previously-pushed arguments that have not been popped yet. */
8609 void
8610 do_pending_stack_adjust ()
8612 if (inhibit_defer_pop == 0)
8614 if (pending_stack_adjust != 0)
8615 adjust_stack (GEN_INT (pending_stack_adjust));
8616 pending_stack_adjust = 0;
8620 /* Expand conditional expressions. */
8622 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
8623 LABEL is an rtx of code CODE_LABEL, in this function and all the
8624 functions here. */
8626 void
8627 jumpifnot (exp, label)
8628 tree exp;
8629 rtx label;
8631 do_jump (exp, label, NULL_RTX);
8634 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
8636 void
8637 jumpif (exp, label)
8638 tree exp;
8639 rtx label;
8641 do_jump (exp, NULL_RTX, label);
8644 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
8645 the result is zero, or IF_TRUE_LABEL if the result is one.
8646 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
8647 meaning fall through in that case.
8649 do_jump always does any pending stack adjust except when it does not
8650 actually perform a jump. An example where there is no jump
8651 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
8653 This function is responsible for optimizing cases such as
8654 &&, || and comparison operators in EXP. */
8656 void
8657 do_jump (exp, if_false_label, if_true_label)
8658 tree exp;
8659 rtx if_false_label, if_true_label;
8661 register enum tree_code code = TREE_CODE (exp);
8662 /* Some cases need to create a label to jump to
8663 in order to properly fall through.
8664 These cases set DROP_THROUGH_LABEL nonzero. */
8665 rtx drop_through_label = 0;
8666 rtx temp;
8667 int i;
8668 tree type;
8669 enum machine_mode mode;
8671 #ifdef MAX_INTEGER_COMPUTATION_MODE
8672 check_max_integer_computation_mode (exp);
8673 #endif
8675 emit_queue ();
8677 switch (code)
8679 case ERROR_MARK:
8680 break;
8682 case INTEGER_CST:
8683 temp = integer_zerop (exp) ? if_false_label : if_true_label;
8684 if (temp)
8685 emit_jump (temp);
8686 break;
8688 #if 0
8689 /* This is not true with #pragma weak */
8690 case ADDR_EXPR:
8691 /* The address of something can never be zero. */
8692 if (if_true_label)
8693 emit_jump (if_true_label);
8694 break;
8695 #endif
8697 case NOP_EXPR:
8698 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
8699 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
8700 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
8701 goto normal;
8702 case CONVERT_EXPR:
8703 /* If we are narrowing the operand, we have to do the compare in the
8704 narrower mode. */
8705 if ((TYPE_PRECISION (TREE_TYPE (exp))
8706 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8707 goto normal;
8708 case NON_LVALUE_EXPR:
8709 case REFERENCE_EXPR:
8710 case ABS_EXPR:
8711 case NEGATE_EXPR:
8712 case LROTATE_EXPR:
8713 case RROTATE_EXPR:
8714 /* These cannot change zero->non-zero or vice versa. */
8715 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
8716 break;
8718 #if 0
8719 /* This is never less insns than evaluating the PLUS_EXPR followed by
8720 a test and can be longer if the test is eliminated. */
8721 case PLUS_EXPR:
8722 /* Reduce to minus. */
8723 exp = build (MINUS_EXPR, TREE_TYPE (exp),
8724 TREE_OPERAND (exp, 0),
8725 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
8726 TREE_OPERAND (exp, 1))));
8727 /* Process as MINUS. */
8728 #endif
8730 case MINUS_EXPR:
8731 /* Non-zero iff operands of minus differ. */
8732 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
8733 TREE_OPERAND (exp, 0),
8734 TREE_OPERAND (exp, 1)),
8735 NE, NE, if_false_label, if_true_label);
8736 break;
8738 case BIT_AND_EXPR:
8739 /* If we are AND'ing with a small constant, do this comparison in the
8740 smallest type that fits. If the machine doesn't have comparisons
8741 that small, it will be converted back to the wider comparison.
8742 This helps if we are testing the sign bit of a narrower object.
8743 combine can't do this for us because it can't know whether a
8744 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
8746 if (! SLOW_BYTE_ACCESS
8747 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8748 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
8749 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
8750 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
8751 && (type = type_for_mode (mode, 1)) != 0
8752 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
8753 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
8754 != CODE_FOR_nothing))
8756 do_jump (convert (type, exp), if_false_label, if_true_label);
8757 break;
8759 goto normal;
8761 case TRUTH_NOT_EXPR:
8762 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
8763 break;
8765 case TRUTH_ANDIF_EXPR:
8766 if (if_false_label == 0)
8767 if_false_label = drop_through_label = gen_label_rtx ();
8768 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
8769 start_cleanup_deferral ();
8770 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8771 end_cleanup_deferral ();
8772 break;
8774 case TRUTH_ORIF_EXPR:
8775 if (if_true_label == 0)
8776 if_true_label = drop_through_label = gen_label_rtx ();
8777 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
8778 start_cleanup_deferral ();
8779 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8780 end_cleanup_deferral ();
8781 break;
8783 case COMPOUND_EXPR:
8784 push_temp_slots ();
8785 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8786 preserve_temp_slots (NULL_RTX);
8787 free_temp_slots ();
8788 pop_temp_slots ();
8789 emit_queue ();
8790 do_pending_stack_adjust ();
8791 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8792 break;
8794 case COMPONENT_REF:
8795 case BIT_FIELD_REF:
8796 case ARRAY_REF:
8798 int bitsize, bitpos, unsignedp;
8799 enum machine_mode mode;
8800 tree type;
8801 tree offset;
8802 int volatilep = 0;
8803 int alignment;
8805 /* Get description of this reference. We don't actually care
8806 about the underlying object here. */
8807 get_inner_reference (exp, &bitsize, &bitpos, &offset,
8808 &mode, &unsignedp, &volatilep,
8809 &alignment);
8811 type = type_for_size (bitsize, unsignedp);
8812 if (! SLOW_BYTE_ACCESS
8813 && type != 0 && bitsize >= 0
8814 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
8815 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
8816 != CODE_FOR_nothing))
8818 do_jump (convert (type, exp), if_false_label, if_true_label);
8819 break;
8821 goto normal;
8824 case COND_EXPR:
8825 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
8826 if (integer_onep (TREE_OPERAND (exp, 1))
8827 && integer_zerop (TREE_OPERAND (exp, 2)))
8828 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
8830 else if (integer_zerop (TREE_OPERAND (exp, 1))
8831 && integer_onep (TREE_OPERAND (exp, 2)))
8832 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
8834 else
8836 register rtx label1 = gen_label_rtx ();
8837 drop_through_label = gen_label_rtx ();
8839 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
8841 start_cleanup_deferral ();
8842 /* Now the THEN-expression. */
8843 do_jump (TREE_OPERAND (exp, 1),
8844 if_false_label ? if_false_label : drop_through_label,
8845 if_true_label ? if_true_label : drop_through_label);
8846 /* In case the do_jump just above never jumps. */
8847 do_pending_stack_adjust ();
8848 emit_label (label1);
8850 /* Now the ELSE-expression. */
8851 do_jump (TREE_OPERAND (exp, 2),
8852 if_false_label ? if_false_label : drop_through_label,
8853 if_true_label ? if_true_label : drop_through_label);
8854 end_cleanup_deferral ();
8856 break;
8858 case EQ_EXPR:
8860 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8862 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
8863 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
8865 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
8866 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
8867 do_jump
8868 (fold
8869 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
8870 fold (build (EQ_EXPR, TREE_TYPE (exp),
8871 fold (build1 (REALPART_EXPR,
8872 TREE_TYPE (inner_type),
8873 exp0)),
8874 fold (build1 (REALPART_EXPR,
8875 TREE_TYPE (inner_type),
8876 exp1)))),
8877 fold (build (EQ_EXPR, TREE_TYPE (exp),
8878 fold (build1 (IMAGPART_EXPR,
8879 TREE_TYPE (inner_type),
8880 exp0)),
8881 fold (build1 (IMAGPART_EXPR,
8882 TREE_TYPE (inner_type),
8883 exp1)))))),
8884 if_false_label, if_true_label);
8887 else if (integer_zerop (TREE_OPERAND (exp, 1)))
8888 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
8890 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
8891 && !can_compare_p (TYPE_MODE (inner_type)))
8892 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
8893 else
8894 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
8895 break;
8898 case NE_EXPR:
8900 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8902 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
8903 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
8905 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
8906 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
8907 do_jump
8908 (fold
8909 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
8910 fold (build (NE_EXPR, TREE_TYPE (exp),
8911 fold (build1 (REALPART_EXPR,
8912 TREE_TYPE (inner_type),
8913 exp0)),
8914 fold (build1 (REALPART_EXPR,
8915 TREE_TYPE (inner_type),
8916 exp1)))),
8917 fold (build (NE_EXPR, TREE_TYPE (exp),
8918 fold (build1 (IMAGPART_EXPR,
8919 TREE_TYPE (inner_type),
8920 exp0)),
8921 fold (build1 (IMAGPART_EXPR,
8922 TREE_TYPE (inner_type),
8923 exp1)))))),
8924 if_false_label, if_true_label);
8927 else if (integer_zerop (TREE_OPERAND (exp, 1)))
8928 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
8930 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
8931 && !can_compare_p (TYPE_MODE (inner_type)))
8932 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
8933 else
8934 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
8935 break;
8938 case LT_EXPR:
8939 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8940 == MODE_INT)
8941 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8942 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
8943 else
8944 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
8945 break;
8947 case LE_EXPR:
8948 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8949 == MODE_INT)
8950 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8951 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
8952 else
8953 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
8954 break;
8956 case GT_EXPR:
8957 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8958 == MODE_INT)
8959 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8960 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
8961 else
8962 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
8963 break;
8965 case GE_EXPR:
8966 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8967 == MODE_INT)
8968 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8969 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
8970 else
8971 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
8972 break;
8974 default:
8975 normal:
8976 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
8977 #if 0
8978 /* This is not needed any more and causes poor code since it causes
8979 comparisons and tests from non-SI objects to have different code
8980 sequences. */
8981 /* Copy to register to avoid generating bad insns by cse
8982 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
8983 if (!cse_not_expected && GET_CODE (temp) == MEM)
8984 temp = copy_to_reg (temp);
8985 #endif
8986 do_pending_stack_adjust ();
8987 /* Do any postincrements in the expression that was tested. */
8988 emit_queue ();
8990 if (GET_CODE (temp) == CONST_INT || GET_CODE (temp) == LABEL_REF)
8992 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
8993 if (target)
8994 emit_jump (target);
8996 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
8997 && ! can_compare_p (GET_MODE (temp)))
8998 /* Note swapping the labels gives us not-equal. */
8999 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9000 else if (GET_MODE (temp) != VOIDmode)
9001 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9002 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9003 GET_MODE (temp), NULL_RTX, 0,
9004 if_false_label, if_true_label);
9005 else
9006 abort ();
9009 if (drop_through_label)
9011 /* If do_jump produces code that might be jumped around,
9012 do any stack adjusts from that code, before the place
9013 where control merges in. */
9014 do_pending_stack_adjust ();
9015 emit_label (drop_through_label);
9019 /* Given a comparison expression EXP for values too wide to be compared
9020 with one insn, test the comparison and jump to the appropriate label.
9021 The code of EXP is ignored; we always test GT if SWAP is 0,
9022 and LT if SWAP is 1. */
9024 static void
9025 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9026 tree exp;
9027 int swap;
9028 rtx if_false_label, if_true_label;
9030 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9031 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9032 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9033 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9035 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9038 /* Compare OP0 with OP1, word at a time, in mode MODE.
9039 UNSIGNEDP says to do unsigned comparison.
9040 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9042 void
9043 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9044 enum machine_mode mode;
9045 int unsignedp;
9046 rtx op0, op1;
9047 rtx if_false_label, if_true_label;
9049 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9050 rtx drop_through_label = 0;
9051 int i;
9053 if (! if_true_label || ! if_false_label)
9054 drop_through_label = gen_label_rtx ();
9055 if (! if_true_label)
9056 if_true_label = drop_through_label;
9057 if (! if_false_label)
9058 if_false_label = drop_through_label;
9060 /* Compare a word at a time, high order first. */
9061 for (i = 0; i < nwords; i++)
9063 rtx op0_word, op1_word;
9065 if (WORDS_BIG_ENDIAN)
9067 op0_word = operand_subword_force (op0, i, mode);
9068 op1_word = operand_subword_force (op1, i, mode);
9070 else
9072 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9073 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9076 /* All but high-order word must be compared as unsigned. */
9077 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9078 (unsignedp || i > 0), word_mode, NULL_RTX, 0,
9079 NULL_RTX, if_true_label);
9081 /* Consider lower words only if these are equal. */
9082 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9083 NULL_RTX, 0, NULL_RTX, if_false_label);
9086 if (if_false_label)
9087 emit_jump (if_false_label);
9088 if (drop_through_label)
9089 emit_label (drop_through_label);
9092 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9093 with one insn, test the comparison and jump to the appropriate label. */
9095 static void
9096 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9097 tree exp;
9098 rtx if_false_label, if_true_label;
9100 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9101 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9102 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9103 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9104 int i;
9105 rtx drop_through_label = 0;
9107 if (! if_false_label)
9108 drop_through_label = if_false_label = gen_label_rtx ();
9110 for (i = 0; i < nwords; i++)
9111 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
9112 operand_subword_force (op1, i, mode),
9113 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9114 word_mode, NULL_RTX, 0, if_false_label,
9115 NULL_RTX);
9117 if (if_true_label)
9118 emit_jump (if_true_label);
9119 if (drop_through_label)
9120 emit_label (drop_through_label);
9123 /* Jump according to whether OP0 is 0.
9124 We assume that OP0 has an integer mode that is too wide
9125 for the available compare insns. */
9127 void
9128 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9129 rtx op0;
9130 rtx if_false_label, if_true_label;
9132 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9133 rtx part;
9134 int i;
9135 rtx drop_through_label = 0;
9137 /* The fastest way of doing this comparison on almost any machine is to
9138 "or" all the words and compare the result. If all have to be loaded
9139 from memory and this is a very wide item, it's possible this may
9140 be slower, but that's highly unlikely. */
9142 part = gen_reg_rtx (word_mode);
9143 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
9144 for (i = 1; i < nwords && part != 0; i++)
9145 part = expand_binop (word_mode, ior_optab, part,
9146 operand_subword_force (op0, i, GET_MODE (op0)),
9147 part, 1, OPTAB_WIDEN);
9149 if (part != 0)
9151 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
9152 NULL_RTX, 0, if_false_label, if_true_label);
9154 return;
9157 /* If we couldn't do the "or" simply, do this with a series of compares. */
9158 if (! if_false_label)
9159 drop_through_label = if_false_label = gen_label_rtx ();
9161 for (i = 0; i < nwords; i++)
9162 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
9163 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0,
9164 if_false_label, NULL_RTX);
9166 if (if_true_label)
9167 emit_jump (if_true_label);
9169 if (drop_through_label)
9170 emit_label (drop_through_label);
9173 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
9174 (including code to compute the values to be compared)
9175 and set (CC0) according to the result.
9176 The decision as to signed or unsigned comparison must be made by the caller.
9178 We force a stack adjustment unless there are currently
9179 things pushed on the stack that aren't yet used.
9181 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9182 compared.
9184 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9185 size of MODE should be used. */
9188 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
9189 register rtx op0, op1;
9190 enum rtx_code code;
9191 int unsignedp;
9192 enum machine_mode mode;
9193 rtx size;
9194 int align;
9196 rtx tem;
9198 /* If one operand is constant, make it the second one. Only do this
9199 if the other operand is not constant as well. */
9201 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9202 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9204 tem = op0;
9205 op0 = op1;
9206 op1 = tem;
9207 code = swap_condition (code);
9210 if (flag_force_mem)
9212 op0 = force_not_mem (op0);
9213 op1 = force_not_mem (op1);
9216 do_pending_stack_adjust ();
9218 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9219 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9220 return tem;
9222 #if 0
9223 /* There's no need to do this now that combine.c can eliminate lots of
9224 sign extensions. This can be less efficient in certain cases on other
9225 machines. */
9227 /* If this is a signed equality comparison, we can do it as an
9228 unsigned comparison since zero-extension is cheaper than sign
9229 extension and comparisons with zero are done as unsigned. This is
9230 the case even on machines that can do fast sign extension, since
9231 zero-extension is easier to combine with other operations than
9232 sign-extension is. If we are comparing against a constant, we must
9233 convert it to what it would look like unsigned. */
9234 if ((code == EQ || code == NE) && ! unsignedp
9235 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9237 if (GET_CODE (op1) == CONST_INT
9238 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9239 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9240 unsignedp = 1;
9242 #endif
9244 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
9246 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
9249 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
9250 The decision as to signed or unsigned comparison must be made by the caller.
9252 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9253 compared.
9255 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9256 size of MODE should be used. */
9258 void
9259 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, align,
9260 if_false_label, if_true_label)
9261 register rtx op0, op1;
9262 enum rtx_code code;
9263 int unsignedp;
9264 enum machine_mode mode;
9265 rtx size;
9266 int align;
9267 rtx if_false_label, if_true_label;
9269 rtx tem;
9270 int dummy_true_label = 0;
9272 /* Reverse the comparison if that is safe and we want to jump if it is
9273 false. */
9274 if (! if_true_label && ! FLOAT_MODE_P (mode))
9276 if_true_label = if_false_label;
9277 if_false_label = 0;
9278 code = reverse_condition (code);
9281 /* If one operand is constant, make it the second one. Only do this
9282 if the other operand is not constant as well. */
9284 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9285 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9287 tem = op0;
9288 op0 = op1;
9289 op1 = tem;
9290 code = swap_condition (code);
9293 if (flag_force_mem)
9295 op0 = force_not_mem (op0);
9296 op1 = force_not_mem (op1);
9299 do_pending_stack_adjust ();
9301 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9302 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9304 if (tem == const_true_rtx)
9306 if (if_true_label)
9307 emit_jump (if_true_label);
9309 else
9311 if (if_false_label)
9312 emit_jump (if_false_label);
9314 return;
9317 #if 0
9318 /* There's no need to do this now that combine.c can eliminate lots of
9319 sign extensions. This can be less efficient in certain cases on other
9320 machines. */
9322 /* If this is a signed equality comparison, we can do it as an
9323 unsigned comparison since zero-extension is cheaper than sign
9324 extension and comparisons with zero are done as unsigned. This is
9325 the case even on machines that can do fast sign extension, since
9326 zero-extension is easier to combine with other operations than
9327 sign-extension is. If we are comparing against a constant, we must
9328 convert it to what it would look like unsigned. */
9329 if ((code == EQ || code == NE) && ! unsignedp
9330 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9332 if (GET_CODE (op1) == CONST_INT
9333 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9334 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9335 unsignedp = 1;
9337 #endif
9339 if (! if_true_label)
9341 dummy_true_label = 1;
9342 if_true_label = gen_label_rtx ();
9345 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, align,
9346 if_true_label);
9348 if (if_false_label)
9349 emit_jump (if_false_label);
9350 if (dummy_true_label)
9351 emit_label (if_true_label);
9354 /* Generate code for a comparison expression EXP (including code to compute
9355 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
9356 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
9357 generated code will drop through.
9358 SIGNED_CODE should be the rtx operation for this comparison for
9359 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
9361 We force a stack adjustment unless there are currently
9362 things pushed on the stack that aren't yet used. */
9364 static void
9365 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
9366 if_true_label)
9367 register tree exp;
9368 enum rtx_code signed_code, unsigned_code;
9369 rtx if_false_label, if_true_label;
9371 register rtx op0, op1;
9372 register tree type;
9373 register enum machine_mode mode;
9374 int unsignedp;
9375 enum rtx_code code;
9377 /* Don't crash if the comparison was erroneous. */
9378 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9379 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
9380 return;
9382 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9383 type = TREE_TYPE (TREE_OPERAND (exp, 0));
9384 mode = TYPE_MODE (type);
9385 unsignedp = TREE_UNSIGNED (type);
9386 code = unsignedp ? unsigned_code : signed_code;
9388 #ifdef HAVE_canonicalize_funcptr_for_compare
9389 /* If function pointers need to be "canonicalized" before they can
9390 be reliably compared, then canonicalize them. */
9391 if (HAVE_canonicalize_funcptr_for_compare
9392 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9393 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9394 == FUNCTION_TYPE))
9396 rtx new_op0 = gen_reg_rtx (mode);
9398 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
9399 op0 = new_op0;
9402 if (HAVE_canonicalize_funcptr_for_compare
9403 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9404 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9405 == FUNCTION_TYPE))
9407 rtx new_op1 = gen_reg_rtx (mode);
9409 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
9410 op1 = new_op1;
9412 #endif
9414 /* Do any postincrements in the expression that was tested. */
9415 emit_queue ();
9417 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
9418 ((mode == BLKmode)
9419 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
9420 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT,
9421 if_false_label, if_true_label);
9424 /* Generate code to calculate EXP using a store-flag instruction
9425 and return an rtx for the result. EXP is either a comparison
9426 or a TRUTH_NOT_EXPR whose operand is a comparison.
9428 If TARGET is nonzero, store the result there if convenient.
9430 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
9431 cheap.
9433 Return zero if there is no suitable set-flag instruction
9434 available on this machine.
9436 Once expand_expr has been called on the arguments of the comparison,
9437 we are committed to doing the store flag, since it is not safe to
9438 re-evaluate the expression. We emit the store-flag insn by calling
9439 emit_store_flag, but only expand the arguments if we have a reason
9440 to believe that emit_store_flag will be successful. If we think that
9441 it will, but it isn't, we have to simulate the store-flag with a
9442 set/jump/set sequence. */
9444 static rtx
9445 do_store_flag (exp, target, mode, only_cheap)
9446 tree exp;
9447 rtx target;
9448 enum machine_mode mode;
9449 int only_cheap;
9451 enum rtx_code code;
9452 tree arg0, arg1, type;
9453 tree tem;
9454 enum machine_mode operand_mode;
9455 int invert = 0;
9456 int unsignedp;
9457 rtx op0, op1;
9458 enum insn_code icode;
9459 rtx subtarget = target;
9460 rtx result, label;
9462 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9463 result at the end. We can't simply invert the test since it would
9464 have already been inverted if it were valid. This case occurs for
9465 some floating-point comparisons. */
9467 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9468 invert = 1, exp = TREE_OPERAND (exp, 0);
9470 arg0 = TREE_OPERAND (exp, 0);
9471 arg1 = TREE_OPERAND (exp, 1);
9472 type = TREE_TYPE (arg0);
9473 operand_mode = TYPE_MODE (type);
9474 unsignedp = TREE_UNSIGNED (type);
9476 /* We won't bother with BLKmode store-flag operations because it would mean
9477 passing a lot of information to emit_store_flag. */
9478 if (operand_mode == BLKmode)
9479 return 0;
9481 /* We won't bother with store-flag operations involving function pointers
9482 when function pointers must be canonicalized before comparisons. */
9483 #ifdef HAVE_canonicalize_funcptr_for_compare
9484 if (HAVE_canonicalize_funcptr_for_compare
9485 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9486 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9487 == FUNCTION_TYPE))
9488 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9489 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9490 == FUNCTION_TYPE))))
9491 return 0;
9492 #endif
9494 STRIP_NOPS (arg0);
9495 STRIP_NOPS (arg1);
9497 /* Get the rtx comparison code to use. We know that EXP is a comparison
9498 operation of some type. Some comparisons against 1 and -1 can be
9499 converted to comparisons with zero. Do so here so that the tests
9500 below will be aware that we have a comparison with zero. These
9501 tests will not catch constants in the first operand, but constants
9502 are rarely passed as the first operand. */
9504 switch (TREE_CODE (exp))
9506 case EQ_EXPR:
9507 code = EQ;
9508 break;
9509 case NE_EXPR:
9510 code = NE;
9511 break;
9512 case LT_EXPR:
9513 if (integer_onep (arg1))
9514 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9515 else
9516 code = unsignedp ? LTU : LT;
9517 break;
9518 case LE_EXPR:
9519 if (! unsignedp && integer_all_onesp (arg1))
9520 arg1 = integer_zero_node, code = LT;
9521 else
9522 code = unsignedp ? LEU : LE;
9523 break;
9524 case GT_EXPR:
9525 if (! unsignedp && integer_all_onesp (arg1))
9526 arg1 = integer_zero_node, code = GE;
9527 else
9528 code = unsignedp ? GTU : GT;
9529 break;
9530 case GE_EXPR:
9531 if (integer_onep (arg1))
9532 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9533 else
9534 code = unsignedp ? GEU : GE;
9535 break;
9536 default:
9537 abort ();
9540 /* Put a constant second. */
9541 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9543 tem = arg0; arg0 = arg1; arg1 = tem;
9544 code = swap_condition (code);
9547 /* If this is an equality or inequality test of a single bit, we can
9548 do this by shifting the bit being tested to the low-order bit and
9549 masking the result with the constant 1. If the condition was EQ,
9550 we xor it with 1. This does not require an scc insn and is faster
9551 than an scc insn even if we have it. */
9553 if ((code == NE || code == EQ)
9554 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9555 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9557 tree inner = TREE_OPERAND (arg0, 0);
9558 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
9559 int ops_unsignedp;
9561 /* If INNER is a right shift of a constant and it plus BITNUM does
9562 not overflow, adjust BITNUM and INNER. */
9564 if (TREE_CODE (inner) == RSHIFT_EXPR
9565 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
9566 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
9567 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
9568 < TYPE_PRECISION (type)))
9570 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
9571 inner = TREE_OPERAND (inner, 0);
9574 /* If we are going to be able to omit the AND below, we must do our
9575 operations as unsigned. If we must use the AND, we have a choice.
9576 Normally unsigned is faster, but for some machines signed is. */
9577 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
9578 #ifdef LOAD_EXTEND_OP
9579 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
9580 #else
9582 #endif
9585 if (subtarget == 0 || GET_CODE (subtarget) != REG
9586 || GET_MODE (subtarget) != operand_mode
9587 || ! safe_from_p (subtarget, inner, 1))
9588 subtarget = 0;
9590 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
9592 if (bitnum != 0)
9593 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
9594 size_int (bitnum), subtarget, ops_unsignedp);
9596 if (GET_MODE (op0) != mode)
9597 op0 = convert_to_mode (mode, op0, ops_unsignedp);
9599 if ((code == EQ && ! invert) || (code == NE && invert))
9600 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
9601 ops_unsignedp, OPTAB_LIB_WIDEN);
9603 /* Put the AND last so it can combine with more things. */
9604 if (bitnum != TYPE_PRECISION (type) - 1)
9605 op0 = expand_and (op0, const1_rtx, subtarget);
9607 return op0;
9610 /* Now see if we are likely to be able to do this. Return if not. */
9611 if (! can_compare_p (operand_mode))
9612 return 0;
9613 icode = setcc_gen_code[(int) code];
9614 if (icode == CODE_FOR_nothing
9615 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
9617 /* We can only do this if it is one of the special cases that
9618 can be handled without an scc insn. */
9619 if ((code == LT && integer_zerop (arg1))
9620 || (! only_cheap && code == GE && integer_zerop (arg1)))
9622 else if (BRANCH_COST >= 0
9623 && ! only_cheap && (code == NE || code == EQ)
9624 && TREE_CODE (type) != REAL_TYPE
9625 && ((abs_optab->handlers[(int) operand_mode].insn_code
9626 != CODE_FOR_nothing)
9627 || (ffs_optab->handlers[(int) operand_mode].insn_code
9628 != CODE_FOR_nothing)))
9630 else
9631 return 0;
9634 preexpand_calls (exp);
9635 if (subtarget == 0 || GET_CODE (subtarget) != REG
9636 || GET_MODE (subtarget) != operand_mode
9637 || ! safe_from_p (subtarget, arg1, 1))
9638 subtarget = 0;
9640 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
9641 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
9643 if (target == 0)
9644 target = gen_reg_rtx (mode);
9646 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
9647 because, if the emit_store_flag does anything it will succeed and
9648 OP0 and OP1 will not be used subsequently. */
9650 result = emit_store_flag (target, code,
9651 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
9652 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
9653 operand_mode, unsignedp, 1);
9655 if (result)
9657 if (invert)
9658 result = expand_binop (mode, xor_optab, result, const1_rtx,
9659 result, 0, OPTAB_LIB_WIDEN);
9660 return result;
9663 /* If this failed, we have to do this with set/compare/jump/set code. */
9664 if (GET_CODE (target) != REG
9665 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9666 target = gen_reg_rtx (GET_MODE (target));
9668 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9669 result = compare_from_rtx (op0, op1, code, unsignedp,
9670 operand_mode, NULL_RTX, 0);
9671 if (GET_CODE (result) == CONST_INT)
9672 return (((result == const0_rtx && ! invert)
9673 || (result != const0_rtx && invert))
9674 ? const0_rtx : const1_rtx);
9676 label = gen_label_rtx ();
9677 if (bcc_gen_fctn[(int) code] == 0)
9678 abort ();
9680 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
9681 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9682 emit_label (label);
9684 return target;
9687 /* Generate a tablejump instruction (used for switch statements). */
9689 #ifdef HAVE_tablejump
9691 /* INDEX is the value being switched on, with the lowest value
9692 in the table already subtracted.
9693 MODE is its expected mode (needed if INDEX is constant).
9694 RANGE is the length of the jump table.
9695 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9697 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9698 index value is out of range. */
9700 void
9701 do_tablejump (index, mode, range, table_label, default_label)
9702 rtx index, range, table_label, default_label;
9703 enum machine_mode mode;
9705 register rtx temp, vector;
9707 /* Do an unsigned comparison (in the proper mode) between the index
9708 expression and the value which represents the length of the range.
9709 Since we just finished subtracting the lower bound of the range
9710 from the index expression, this comparison allows us to simultaneously
9711 check that the original index expression value is both greater than
9712 or equal to the minimum value of the range and less than or equal to
9713 the maximum value of the range. */
9715 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
9716 0, default_label);
9718 /* If index is in range, it must fit in Pmode.
9719 Convert to Pmode so we can index with it. */
9720 if (mode != Pmode)
9721 index = convert_to_mode (Pmode, index, 1);
9723 /* Don't let a MEM slip thru, because then INDEX that comes
9724 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9725 and break_out_memory_refs will go to work on it and mess it up. */
9726 #ifdef PIC_CASE_VECTOR_ADDRESS
9727 if (flag_pic && GET_CODE (index) != REG)
9728 index = copy_to_mode_reg (Pmode, index);
9729 #endif
9731 /* If flag_force_addr were to affect this address
9732 it could interfere with the tricky assumptions made
9733 about addresses that contain label-refs,
9734 which may be valid only very near the tablejump itself. */
9735 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9736 GET_MODE_SIZE, because this indicates how large insns are. The other
9737 uses should all be Pmode, because they are addresses. This code
9738 could fail if addresses and insns are not the same size. */
9739 index = gen_rtx_PLUS (Pmode,
9740 gen_rtx_MULT (Pmode, index,
9741 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9742 gen_rtx_LABEL_REF (Pmode, table_label));
9743 #ifdef PIC_CASE_VECTOR_ADDRESS
9744 if (flag_pic)
9745 index = PIC_CASE_VECTOR_ADDRESS (index);
9746 else
9747 #endif
9748 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9749 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9750 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
9751 RTX_UNCHANGING_P (vector) = 1;
9752 convert_move (temp, vector, 0);
9754 emit_jump_insn (gen_tablejump (temp, table_label));
9756 /* If we are generating PIC code or if the table is PC-relative, the
9757 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9758 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9759 emit_barrier ();
9762 #endif /* HAVE_tablejump */