Do not generate error message about unrecognised command line switches of
[official-gcc.git] / gcc / expr.c
blobe246020304529c59972f0802b78743260b5705d8
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 92-98, 1999 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "tm_p.h"
28 #include "obstack.h"
29 #include "flags.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "except.h"
33 #include "function.h"
34 #include "insn-flags.h"
35 #include "insn-codes.h"
36 #include "insn-config.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38 #include "expr.h"
39 #include "recog.h"
40 #include "output.h"
41 #include "typeclass.h"
42 #include "defaults.h"
43 #include "toplev.h"
44 #include "ggc.h"
46 #define CEIL(x,y) (((x) + (y) - 1) / (y))
48 /* Decide whether a function's arguments should be processed
49 from first to last or from last to first.
51 They should if the stack and args grow in opposite directions, but
52 only if we have push insns. */
54 #ifdef PUSH_ROUNDING
56 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
57 #define PUSH_ARGS_REVERSED /* If it's last to first */
58 #endif
60 #endif
62 #ifndef STACK_PUSH_CODE
63 #ifdef STACK_GROWS_DOWNWARD
64 #define STACK_PUSH_CODE PRE_DEC
65 #else
66 #define STACK_PUSH_CODE PRE_INC
67 #endif
68 #endif
70 /* Assume that case vectors are not pc-relative. */
71 #ifndef CASE_VECTOR_PC_RELATIVE
72 #define CASE_VECTOR_PC_RELATIVE 0
73 #endif
75 /* If this is nonzero, we do not bother generating VOLATILE
76 around volatile memory references, and we are willing to
77 output indirect addresses. If cse is to follow, we reject
78 indirect addresses so a useful potential cse is generated;
79 if it is used only once, instruction combination will produce
80 the same indirect address eventually. */
81 int cse_not_expected;
83 /* Nonzero to generate code for all the subroutines within an
84 expression before generating the upper levels of the expression.
85 Nowadays this is never zero. */
86 int do_preexpand_calls = 1;
88 /* Don't check memory usage, since code is being emitted to check a memory
89 usage. Used when current_function_check_memory_usage is true, to avoid
90 infinite recursion. */
91 static int in_check_memory_usage;
93 /* This structure is used by move_by_pieces to describe the move to
94 be performed. */
95 struct move_by_pieces
97 rtx to;
98 rtx to_addr;
99 int autinc_to;
100 int explicit_inc_to;
101 int to_struct;
102 int to_readonly;
103 rtx from;
104 rtx from_addr;
105 int autinc_from;
106 int explicit_inc_from;
107 int from_struct;
108 int from_readonly;
109 int len;
110 int offset;
111 int reverse;
114 /* This structure is used by clear_by_pieces to describe the clear to
115 be performed. */
117 struct clear_by_pieces
119 rtx to;
120 rtx to_addr;
121 int autinc_to;
122 int explicit_inc_to;
123 int to_struct;
124 int len;
125 int offset;
126 int reverse;
129 extern struct obstack permanent_obstack;
131 static rtx get_push_address PROTO ((int));
133 static rtx enqueue_insn PROTO((rtx, rtx));
134 static int move_by_pieces_ninsns PROTO((unsigned int, int));
135 static void move_by_pieces_1 PROTO((rtx (*) (rtx, ...), enum machine_mode,
136 struct move_by_pieces *));
137 static void clear_by_pieces PROTO((rtx, int, int));
138 static void clear_by_pieces_1 PROTO((rtx (*) (rtx, ...),
139 enum machine_mode,
140 struct clear_by_pieces *));
141 static int is_zeros_p PROTO((tree));
142 static int mostly_zeros_p PROTO((tree));
143 static void store_constructor_field PROTO((rtx, int, int, enum machine_mode,
144 tree, tree, int, int));
145 static void store_constructor PROTO((tree, rtx, int, int));
146 static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
147 enum machine_mode, int, int,
148 int, int));
149 static enum memory_use_mode
150 get_memory_usage_from_modifier PROTO((enum expand_modifier));
151 static tree save_noncopied_parts PROTO((tree, tree));
152 static tree init_noncopied_parts PROTO((tree, tree));
153 static int safe_from_p PROTO((rtx, tree, int));
154 static int fixed_type_p PROTO((tree));
155 static rtx var_rtx PROTO((tree));
156 static rtx expand_increment PROTO((tree, int, int));
157 static void preexpand_calls PROTO((tree));
158 static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
159 static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
160 static void do_compare_and_jump PROTO((tree, enum rtx_code, enum rtx_code, rtx, rtx));
161 static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
163 /* Record for each mode whether we can move a register directly to or
164 from an object of that mode in memory. If we can't, we won't try
165 to use that mode directly when accessing a field of that mode. */
167 static char direct_load[NUM_MACHINE_MODES];
168 static char direct_store[NUM_MACHINE_MODES];
170 /* If a memory-to-memory move would take MOVE_RATIO or more simple
171 move-instruction sequences, we will do a movstr or libcall instead. */
173 #ifndef MOVE_RATIO
174 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
175 #define MOVE_RATIO 2
176 #else
177 /* If we are optimizing for space (-Os), cut down the default move ratio */
178 #define MOVE_RATIO (optimize_size ? 3 : 15)
179 #endif
180 #endif
182 /* This macro is used to determine whether move_by_pieces should be called
183 to perform a structure copy. */
184 #ifndef MOVE_BY_PIECES_P
185 #define MOVE_BY_PIECES_P(SIZE, ALIGN) (move_by_pieces_ninsns \
186 (SIZE, ALIGN) < MOVE_RATIO)
187 #endif
189 /* This array records the insn_code of insns to perform block moves. */
190 enum insn_code movstr_optab[NUM_MACHINE_MODES];
192 /* This array records the insn_code of insns to perform block clears. */
193 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
195 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
197 #ifndef SLOW_UNALIGNED_ACCESS
198 #define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
199 #endif
201 /* This is run once per compilation to set up which modes can be used
202 directly in memory and to initialize the block move optab. */
204 void
205 init_expr_once ()
207 rtx insn, pat;
208 enum machine_mode mode;
209 int num_clobbers;
210 rtx mem, mem1;
211 char *free_point;
213 start_sequence ();
215 /* Since we are on the permanent obstack, we must be sure we save this
216 spot AFTER we call start_sequence, since it will reuse the rtl it
217 makes. */
218 free_point = (char *) oballoc (0);
220 /* Try indexing by frame ptr and try by stack ptr.
221 It is known that on the Convex the stack ptr isn't a valid index.
222 With luck, one or the other is valid on any machine. */
223 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
224 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
226 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
227 pat = PATTERN (insn);
229 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
230 mode = (enum machine_mode) ((int) mode + 1))
232 int regno;
233 rtx reg;
235 direct_load[(int) mode] = direct_store[(int) mode] = 0;
236 PUT_MODE (mem, mode);
237 PUT_MODE (mem1, mode);
239 /* See if there is some register that can be used in this mode and
240 directly loaded or stored from memory. */
242 if (mode != VOIDmode && mode != BLKmode)
243 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
244 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
245 regno++)
247 if (! HARD_REGNO_MODE_OK (regno, mode))
248 continue;
250 reg = gen_rtx_REG (mode, regno);
252 SET_SRC (pat) = mem;
253 SET_DEST (pat) = reg;
254 if (recog (pat, insn, &num_clobbers) >= 0)
255 direct_load[(int) mode] = 1;
257 SET_SRC (pat) = mem1;
258 SET_DEST (pat) = reg;
259 if (recog (pat, insn, &num_clobbers) >= 0)
260 direct_load[(int) mode] = 1;
262 SET_SRC (pat) = reg;
263 SET_DEST (pat) = mem;
264 if (recog (pat, insn, &num_clobbers) >= 0)
265 direct_store[(int) mode] = 1;
267 SET_SRC (pat) = reg;
268 SET_DEST (pat) = mem1;
269 if (recog (pat, insn, &num_clobbers) >= 0)
270 direct_store[(int) mode] = 1;
274 end_sequence ();
275 obfree (free_point);
278 /* This is run at the start of compiling a function. */
280 void
281 init_expr ()
283 current_function->expr
284 = (struct expr_status *) xmalloc (sizeof (struct expr_status));
286 pending_chain = 0;
287 pending_stack_adjust = 0;
288 inhibit_defer_pop = 0;
289 saveregs_value = 0;
290 apply_args_value = 0;
291 forced_labels = 0;
294 void
295 mark_expr_status (p)
296 struct expr_status *p;
298 if (p == NULL)
299 return;
301 ggc_mark_rtx (p->x_saveregs_value);
302 ggc_mark_rtx (p->x_apply_args_value);
303 ggc_mark_rtx (p->x_forced_labels);
306 void
307 free_expr_status (f)
308 struct function *f;
310 free (f->expr);
311 f->expr = NULL;
314 /* Small sanity check that the queue is empty at the end of a function. */
315 void
316 finish_expr_for_function ()
318 if (pending_chain)
319 abort ();
322 /* Manage the queue of increment instructions to be output
323 for POSTINCREMENT_EXPR expressions, etc. */
325 /* Queue up to increment (or change) VAR later. BODY says how:
326 BODY should be the same thing you would pass to emit_insn
327 to increment right away. It will go to emit_insn later on.
329 The value is a QUEUED expression to be used in place of VAR
330 where you want to guarantee the pre-incrementation value of VAR. */
332 static rtx
333 enqueue_insn (var, body)
334 rtx var, body;
336 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
337 body, pending_chain);
338 return pending_chain;
341 /* Use protect_from_queue to convert a QUEUED expression
342 into something that you can put immediately into an instruction.
343 If the queued incrementation has not happened yet,
344 protect_from_queue returns the variable itself.
345 If the incrementation has happened, protect_from_queue returns a temp
346 that contains a copy of the old value of the variable.
348 Any time an rtx which might possibly be a QUEUED is to be put
349 into an instruction, it must be passed through protect_from_queue first.
350 QUEUED expressions are not meaningful in instructions.
352 Do not pass a value through protect_from_queue and then hold
353 on to it for a while before putting it in an instruction!
354 If the queue is flushed in between, incorrect code will result. */
357 protect_from_queue (x, modify)
358 register rtx x;
359 int modify;
361 register RTX_CODE code = GET_CODE (x);
363 #if 0 /* A QUEUED can hang around after the queue is forced out. */
364 /* Shortcut for most common case. */
365 if (pending_chain == 0)
366 return x;
367 #endif
369 if (code != QUEUED)
371 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
372 use of autoincrement. Make a copy of the contents of the memory
373 location rather than a copy of the address, but not if the value is
374 of mode BLKmode. Don't modify X in place since it might be
375 shared. */
376 if (code == MEM && GET_MODE (x) != BLKmode
377 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
379 register rtx y = XEXP (x, 0);
380 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
382 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
383 MEM_COPY_ATTRIBUTES (new, x);
384 MEM_ALIAS_SET (new) = MEM_ALIAS_SET (x);
386 if (QUEUED_INSN (y))
388 register rtx temp = gen_reg_rtx (GET_MODE (new));
389 emit_insn_before (gen_move_insn (temp, new),
390 QUEUED_INSN (y));
391 return temp;
393 return new;
395 /* Otherwise, recursively protect the subexpressions of all
396 the kinds of rtx's that can contain a QUEUED. */
397 if (code == MEM)
399 rtx tem = protect_from_queue (XEXP (x, 0), 0);
400 if (tem != XEXP (x, 0))
402 x = copy_rtx (x);
403 XEXP (x, 0) = tem;
406 else if (code == PLUS || code == MULT)
408 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
409 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
410 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
412 x = copy_rtx (x);
413 XEXP (x, 0) = new0;
414 XEXP (x, 1) = new1;
417 return x;
419 /* If the increment has not happened, use the variable itself. */
420 if (QUEUED_INSN (x) == 0)
421 return QUEUED_VAR (x);
422 /* If the increment has happened and a pre-increment copy exists,
423 use that copy. */
424 if (QUEUED_COPY (x) != 0)
425 return QUEUED_COPY (x);
426 /* The increment has happened but we haven't set up a pre-increment copy.
427 Set one up now, and use it. */
428 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
429 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
430 QUEUED_INSN (x));
431 return QUEUED_COPY (x);
434 /* Return nonzero if X contains a QUEUED expression:
435 if it contains anything that will be altered by a queued increment.
436 We handle only combinations of MEM, PLUS, MINUS and MULT operators
437 since memory addresses generally contain only those. */
440 queued_subexp_p (x)
441 rtx x;
443 register enum rtx_code code = GET_CODE (x);
444 switch (code)
446 case QUEUED:
447 return 1;
448 case MEM:
449 return queued_subexp_p (XEXP (x, 0));
450 case MULT:
451 case PLUS:
452 case MINUS:
453 return (queued_subexp_p (XEXP (x, 0))
454 || queued_subexp_p (XEXP (x, 1)));
455 default:
456 return 0;
460 /* Perform all the pending incrementations. */
462 void
463 emit_queue ()
465 register rtx p;
466 while ((p = pending_chain))
468 rtx body = QUEUED_BODY (p);
470 if (GET_CODE (body) == SEQUENCE)
472 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
473 emit_insn (QUEUED_BODY (p));
475 else
476 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
477 pending_chain = QUEUED_NEXT (p);
481 /* Copy data from FROM to TO, where the machine modes are not the same.
482 Both modes may be integer, or both may be floating.
483 UNSIGNEDP should be nonzero if FROM is an unsigned type.
484 This causes zero-extension instead of sign-extension. */
486 void
487 convert_move (to, from, unsignedp)
488 register rtx to, from;
489 int unsignedp;
491 enum machine_mode to_mode = GET_MODE (to);
492 enum machine_mode from_mode = GET_MODE (from);
493 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
494 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
495 enum insn_code code;
496 rtx libcall;
498 /* rtx code for making an equivalent value. */
499 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
501 to = protect_from_queue (to, 1);
502 from = protect_from_queue (from, 0);
504 if (to_real != from_real)
505 abort ();
507 /* If FROM is a SUBREG that indicates that we have already done at least
508 the required extension, strip it. We don't handle such SUBREGs as
509 TO here. */
511 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
512 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
513 >= GET_MODE_SIZE (to_mode))
514 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
515 from = gen_lowpart (to_mode, from), from_mode = to_mode;
517 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
518 abort ();
520 if (to_mode == from_mode
521 || (from_mode == VOIDmode && CONSTANT_P (from)))
523 emit_move_insn (to, from);
524 return;
527 if (to_real)
529 rtx value;
531 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
533 /* Try converting directly if the insn is supported. */
534 if ((code = can_extend_p (to_mode, from_mode, 0))
535 != CODE_FOR_nothing)
537 emit_unop_insn (code, to, from, UNKNOWN);
538 return;
542 #ifdef HAVE_trunchfqf2
543 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
545 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
546 return;
548 #endif
549 #ifdef HAVE_trunctqfqf2
550 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
552 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
553 return;
555 #endif
556 #ifdef HAVE_truncsfqf2
557 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
559 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
560 return;
562 #endif
563 #ifdef HAVE_truncdfqf2
564 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
566 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
567 return;
569 #endif
570 #ifdef HAVE_truncxfqf2
571 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
573 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
574 return;
576 #endif
577 #ifdef HAVE_trunctfqf2
578 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
580 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
581 return;
583 #endif
585 #ifdef HAVE_trunctqfhf2
586 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
588 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
589 return;
591 #endif
592 #ifdef HAVE_truncsfhf2
593 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
595 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
596 return;
598 #endif
599 #ifdef HAVE_truncdfhf2
600 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
602 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
603 return;
605 #endif
606 #ifdef HAVE_truncxfhf2
607 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
609 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
610 return;
612 #endif
613 #ifdef HAVE_trunctfhf2
614 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
616 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
617 return;
619 #endif
621 #ifdef HAVE_truncsftqf2
622 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
624 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
625 return;
627 #endif
628 #ifdef HAVE_truncdftqf2
629 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
631 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
632 return;
634 #endif
635 #ifdef HAVE_truncxftqf2
636 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
638 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
639 return;
641 #endif
642 #ifdef HAVE_trunctftqf2
643 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
645 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
646 return;
648 #endif
650 #ifdef HAVE_truncdfsf2
651 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
653 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
654 return;
656 #endif
657 #ifdef HAVE_truncxfsf2
658 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
660 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
661 return;
663 #endif
664 #ifdef HAVE_trunctfsf2
665 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
667 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
668 return;
670 #endif
671 #ifdef HAVE_truncxfdf2
672 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
674 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
675 return;
677 #endif
678 #ifdef HAVE_trunctfdf2
679 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
681 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
682 return;
684 #endif
686 libcall = (rtx) 0;
687 switch (from_mode)
689 case SFmode:
690 switch (to_mode)
692 case DFmode:
693 libcall = extendsfdf2_libfunc;
694 break;
696 case XFmode:
697 libcall = extendsfxf2_libfunc;
698 break;
700 case TFmode:
701 libcall = extendsftf2_libfunc;
702 break;
704 default:
705 break;
707 break;
709 case DFmode:
710 switch (to_mode)
712 case SFmode:
713 libcall = truncdfsf2_libfunc;
714 break;
716 case XFmode:
717 libcall = extenddfxf2_libfunc;
718 break;
720 case TFmode:
721 libcall = extenddftf2_libfunc;
722 break;
724 default:
725 break;
727 break;
729 case XFmode:
730 switch (to_mode)
732 case SFmode:
733 libcall = truncxfsf2_libfunc;
734 break;
736 case DFmode:
737 libcall = truncxfdf2_libfunc;
738 break;
740 default:
741 break;
743 break;
745 case TFmode:
746 switch (to_mode)
748 case SFmode:
749 libcall = trunctfsf2_libfunc;
750 break;
752 case DFmode:
753 libcall = trunctfdf2_libfunc;
754 break;
756 default:
757 break;
759 break;
761 default:
762 break;
765 if (libcall == (rtx) 0)
766 /* This conversion is not implemented yet. */
767 abort ();
769 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
770 1, from, from_mode);
771 emit_move_insn (to, value);
772 return;
775 /* Now both modes are integers. */
777 /* Handle expanding beyond a word. */
778 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
779 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
781 rtx insns;
782 rtx lowpart;
783 rtx fill_value;
784 rtx lowfrom;
785 int i;
786 enum machine_mode lowpart_mode;
787 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
789 /* Try converting directly if the insn is supported. */
790 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
791 != CODE_FOR_nothing)
793 /* If FROM is a SUBREG, put it into a register. Do this
794 so that we always generate the same set of insns for
795 better cse'ing; if an intermediate assignment occurred,
796 we won't be doing the operation directly on the SUBREG. */
797 if (optimize > 0 && GET_CODE (from) == SUBREG)
798 from = force_reg (from_mode, from);
799 emit_unop_insn (code, to, from, equiv_code);
800 return;
802 /* Next, try converting via full word. */
803 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
804 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
805 != CODE_FOR_nothing))
807 if (GET_CODE (to) == REG)
808 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
809 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
810 emit_unop_insn (code, to,
811 gen_lowpart (word_mode, to), equiv_code);
812 return;
815 /* No special multiword conversion insn; do it by hand. */
816 start_sequence ();
818 /* Since we will turn this into a no conflict block, we must ensure
819 that the source does not overlap the target. */
821 if (reg_overlap_mentioned_p (to, from))
822 from = force_reg (from_mode, from);
824 /* Get a copy of FROM widened to a word, if necessary. */
825 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
826 lowpart_mode = word_mode;
827 else
828 lowpart_mode = from_mode;
830 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
832 lowpart = gen_lowpart (lowpart_mode, to);
833 emit_move_insn (lowpart, lowfrom);
835 /* Compute the value to put in each remaining word. */
836 if (unsignedp)
837 fill_value = const0_rtx;
838 else
840 #ifdef HAVE_slt
841 if (HAVE_slt
842 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
843 && STORE_FLAG_VALUE == -1)
845 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
846 lowpart_mode, 0, 0);
847 fill_value = gen_reg_rtx (word_mode);
848 emit_insn (gen_slt (fill_value));
850 else
851 #endif
853 fill_value
854 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
855 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
856 NULL_RTX, 0);
857 fill_value = convert_to_mode (word_mode, fill_value, 1);
861 /* Fill the remaining words. */
862 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
864 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
865 rtx subword = operand_subword (to, index, 1, to_mode);
867 if (subword == 0)
868 abort ();
870 if (fill_value != subword)
871 emit_move_insn (subword, fill_value);
874 insns = get_insns ();
875 end_sequence ();
877 emit_no_conflict_block (insns, to, from, NULL_RTX,
878 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
879 return;
882 /* Truncating multi-word to a word or less. */
883 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
884 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
886 if (!((GET_CODE (from) == MEM
887 && ! MEM_VOLATILE_P (from)
888 && direct_load[(int) to_mode]
889 && ! mode_dependent_address_p (XEXP (from, 0)))
890 || GET_CODE (from) == REG
891 || GET_CODE (from) == SUBREG))
892 from = force_reg (from_mode, from);
893 convert_move (to, gen_lowpart (word_mode, from), 0);
894 return;
897 /* Handle pointer conversion */ /* SPEE 900220 */
898 if (to_mode == PQImode)
900 if (from_mode != QImode)
901 from = convert_to_mode (QImode, from, unsignedp);
903 #ifdef HAVE_truncqipqi2
904 if (HAVE_truncqipqi2)
906 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
907 return;
909 #endif /* HAVE_truncqipqi2 */
910 abort ();
913 if (from_mode == PQImode)
915 if (to_mode != QImode)
917 from = convert_to_mode (QImode, from, unsignedp);
918 from_mode = QImode;
920 else
922 #ifdef HAVE_extendpqiqi2
923 if (HAVE_extendpqiqi2)
925 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
926 return;
928 #endif /* HAVE_extendpqiqi2 */
929 abort ();
933 if (to_mode == PSImode)
935 if (from_mode != SImode)
936 from = convert_to_mode (SImode, from, unsignedp);
938 #ifdef HAVE_truncsipsi2
939 if (HAVE_truncsipsi2)
941 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
942 return;
944 #endif /* HAVE_truncsipsi2 */
945 abort ();
948 if (from_mode == PSImode)
950 if (to_mode != SImode)
952 from = convert_to_mode (SImode, from, unsignedp);
953 from_mode = SImode;
955 else
957 #ifdef HAVE_extendpsisi2
958 if (HAVE_extendpsisi2)
960 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
961 return;
963 #endif /* HAVE_extendpsisi2 */
964 abort ();
968 if (to_mode == PDImode)
970 if (from_mode != DImode)
971 from = convert_to_mode (DImode, from, unsignedp);
973 #ifdef HAVE_truncdipdi2
974 if (HAVE_truncdipdi2)
976 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
977 return;
979 #endif /* HAVE_truncdipdi2 */
980 abort ();
983 if (from_mode == PDImode)
985 if (to_mode != DImode)
987 from = convert_to_mode (DImode, from, unsignedp);
988 from_mode = DImode;
990 else
992 #ifdef HAVE_extendpdidi2
993 if (HAVE_extendpdidi2)
995 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
996 return;
998 #endif /* HAVE_extendpdidi2 */
999 abort ();
1003 /* Now follow all the conversions between integers
1004 no more than a word long. */
1006 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1007 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1008 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1009 GET_MODE_BITSIZE (from_mode)))
1011 if (!((GET_CODE (from) == MEM
1012 && ! MEM_VOLATILE_P (from)
1013 && direct_load[(int) to_mode]
1014 && ! mode_dependent_address_p (XEXP (from, 0)))
1015 || GET_CODE (from) == REG
1016 || GET_CODE (from) == SUBREG))
1017 from = force_reg (from_mode, from);
1018 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1019 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1020 from = copy_to_reg (from);
1021 emit_move_insn (to, gen_lowpart (to_mode, from));
1022 return;
1025 /* Handle extension. */
1026 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1028 /* Convert directly if that works. */
1029 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1030 != CODE_FOR_nothing)
1032 emit_unop_insn (code, to, from, equiv_code);
1033 return;
1035 else
1037 enum machine_mode intermediate;
1038 rtx tmp;
1039 tree shift_amount;
1041 /* Search for a mode to convert via. */
1042 for (intermediate = from_mode; intermediate != VOIDmode;
1043 intermediate = GET_MODE_WIDER_MODE (intermediate))
1044 if (((can_extend_p (to_mode, intermediate, unsignedp)
1045 != CODE_FOR_nothing)
1046 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1047 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1048 GET_MODE_BITSIZE (intermediate))))
1049 && (can_extend_p (intermediate, from_mode, unsignedp)
1050 != CODE_FOR_nothing))
1052 convert_move (to, convert_to_mode (intermediate, from,
1053 unsignedp), unsignedp);
1054 return;
1057 /* No suitable intermediate mode.
1058 Generate what we need with shifts. */
1059 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1060 - GET_MODE_BITSIZE (from_mode), 0);
1061 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1062 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1063 to, unsignedp);
1064 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1065 to, unsignedp);
1066 if (tmp != to)
1067 emit_move_insn (to, tmp);
1068 return;
1072 /* Support special truncate insns for certain modes. */
1074 if (from_mode == DImode && to_mode == SImode)
1076 #ifdef HAVE_truncdisi2
1077 if (HAVE_truncdisi2)
1079 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1080 return;
1082 #endif
1083 convert_move (to, force_reg (from_mode, from), unsignedp);
1084 return;
1087 if (from_mode == DImode && to_mode == HImode)
1089 #ifdef HAVE_truncdihi2
1090 if (HAVE_truncdihi2)
1092 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1093 return;
1095 #endif
1096 convert_move (to, force_reg (from_mode, from), unsignedp);
1097 return;
1100 if (from_mode == DImode && to_mode == QImode)
1102 #ifdef HAVE_truncdiqi2
1103 if (HAVE_truncdiqi2)
1105 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1106 return;
1108 #endif
1109 convert_move (to, force_reg (from_mode, from), unsignedp);
1110 return;
1113 if (from_mode == SImode && to_mode == HImode)
1115 #ifdef HAVE_truncsihi2
1116 if (HAVE_truncsihi2)
1118 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1119 return;
1121 #endif
1122 convert_move (to, force_reg (from_mode, from), unsignedp);
1123 return;
1126 if (from_mode == SImode && to_mode == QImode)
1128 #ifdef HAVE_truncsiqi2
1129 if (HAVE_truncsiqi2)
1131 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1132 return;
1134 #endif
1135 convert_move (to, force_reg (from_mode, from), unsignedp);
1136 return;
1139 if (from_mode == HImode && to_mode == QImode)
1141 #ifdef HAVE_trunchiqi2
1142 if (HAVE_trunchiqi2)
1144 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1145 return;
1147 #endif
1148 convert_move (to, force_reg (from_mode, from), unsignedp);
1149 return;
1152 if (from_mode == TImode && to_mode == DImode)
1154 #ifdef HAVE_trunctidi2
1155 if (HAVE_trunctidi2)
1157 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1158 return;
1160 #endif
1161 convert_move (to, force_reg (from_mode, from), unsignedp);
1162 return;
1165 if (from_mode == TImode && to_mode == SImode)
1167 #ifdef HAVE_trunctisi2
1168 if (HAVE_trunctisi2)
1170 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1171 return;
1173 #endif
1174 convert_move (to, force_reg (from_mode, from), unsignedp);
1175 return;
1178 if (from_mode == TImode && to_mode == HImode)
1180 #ifdef HAVE_trunctihi2
1181 if (HAVE_trunctihi2)
1183 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1184 return;
1186 #endif
1187 convert_move (to, force_reg (from_mode, from), unsignedp);
1188 return;
1191 if (from_mode == TImode && to_mode == QImode)
1193 #ifdef HAVE_trunctiqi2
1194 if (HAVE_trunctiqi2)
1196 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1197 return;
1199 #endif
1200 convert_move (to, force_reg (from_mode, from), unsignedp);
1201 return;
1204 /* Handle truncation of volatile memrefs, and so on;
1205 the things that couldn't be truncated directly,
1206 and for which there was no special instruction. */
1207 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1209 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1210 emit_move_insn (to, temp);
1211 return;
1214 /* Mode combination is not recognized. */
1215 abort ();
1218 /* Return an rtx for a value that would result
1219 from converting X to mode MODE.
1220 Both X and MODE may be floating, or both integer.
1221 UNSIGNEDP is nonzero if X is an unsigned value.
1222 This can be done by referring to a part of X in place
1223 or by copying to a new temporary with conversion.
1225 This function *must not* call protect_from_queue
1226 except when putting X into an insn (in which case convert_move does it). */
1229 convert_to_mode (mode, x, unsignedp)
1230 enum machine_mode mode;
1231 rtx x;
1232 int unsignedp;
1234 return convert_modes (mode, VOIDmode, x, unsignedp);
1237 /* Return an rtx for a value that would result
1238 from converting X from mode OLDMODE to mode MODE.
1239 Both modes may be floating, or both integer.
1240 UNSIGNEDP is nonzero if X is an unsigned value.
1242 This can be done by referring to a part of X in place
1243 or by copying to a new temporary with conversion.
1245 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1247 This function *must not* call protect_from_queue
1248 except when putting X into an insn (in which case convert_move does it). */
1251 convert_modes (mode, oldmode, x, unsignedp)
1252 enum machine_mode mode, oldmode;
1253 rtx x;
1254 int unsignedp;
1256 register rtx temp;
1258 /* If FROM is a SUBREG that indicates that we have already done at least
1259 the required extension, strip it. */
1261 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1262 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1263 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1264 x = gen_lowpart (mode, x);
1266 if (GET_MODE (x) != VOIDmode)
1267 oldmode = GET_MODE (x);
1269 if (mode == oldmode)
1270 return x;
1272 /* There is one case that we must handle specially: If we are converting
1273 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1274 we are to interpret the constant as unsigned, gen_lowpart will do
1275 the wrong if the constant appears negative. What we want to do is
1276 make the high-order word of the constant zero, not all ones. */
1278 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1279 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1280 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1282 HOST_WIDE_INT val = INTVAL (x);
1284 if (oldmode != VOIDmode
1285 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1287 int width = GET_MODE_BITSIZE (oldmode);
1289 /* We need to zero extend VAL. */
1290 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1293 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1296 /* We can do this with a gen_lowpart if both desired and current modes
1297 are integer, and this is either a constant integer, a register, or a
1298 non-volatile MEM. Except for the constant case where MODE is no
1299 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1301 if ((GET_CODE (x) == CONST_INT
1302 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1303 || (GET_MODE_CLASS (mode) == MODE_INT
1304 && GET_MODE_CLASS (oldmode) == MODE_INT
1305 && (GET_CODE (x) == CONST_DOUBLE
1306 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1307 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1308 && direct_load[(int) mode])
1309 || (GET_CODE (x) == REG
1310 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1311 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1313 /* ?? If we don't know OLDMODE, we have to assume here that
1314 X does not need sign- or zero-extension. This may not be
1315 the case, but it's the best we can do. */
1316 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1317 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1319 HOST_WIDE_INT val = INTVAL (x);
1320 int width = GET_MODE_BITSIZE (oldmode);
1322 /* We must sign or zero-extend in this case. Start by
1323 zero-extending, then sign extend if we need to. */
1324 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1325 if (! unsignedp
1326 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1327 val |= (HOST_WIDE_INT) (-1) << width;
1329 return GEN_INT (val);
1332 return gen_lowpart (mode, x);
1335 temp = gen_reg_rtx (mode);
1336 convert_move (temp, x, unsignedp);
1337 return temp;
1341 /* This macro is used to determine what the largest unit size that
1342 move_by_pieces can use is. */
1344 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1345 move efficiently, as opposed to MOVE_MAX which is the maximum
1346 number of bhytes we can move with a single instruction. */
1348 #ifndef MOVE_MAX_PIECES
1349 #define MOVE_MAX_PIECES MOVE_MAX
1350 #endif
1352 /* Generate several move instructions to copy LEN bytes
1353 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1354 The caller must pass FROM and TO
1355 through protect_from_queue before calling.
1356 ALIGN (in bytes) is maximum alignment we can assume. */
1358 void
1359 move_by_pieces (to, from, len, align)
1360 rtx to, from;
1361 int len, align;
1363 struct move_by_pieces data;
1364 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1365 int max_size = MOVE_MAX_PIECES + 1;
1366 enum machine_mode mode = VOIDmode, tmode;
1367 enum insn_code icode;
1369 data.offset = 0;
1370 data.to_addr = to_addr;
1371 data.from_addr = from_addr;
1372 data.to = to;
1373 data.from = from;
1374 data.autinc_to
1375 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1376 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1377 data.autinc_from
1378 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1379 || GET_CODE (from_addr) == POST_INC
1380 || GET_CODE (from_addr) == POST_DEC);
1382 data.explicit_inc_from = 0;
1383 data.explicit_inc_to = 0;
1384 data.reverse
1385 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1386 if (data.reverse) data.offset = len;
1387 data.len = len;
1389 data.to_struct = MEM_IN_STRUCT_P (to);
1390 data.from_struct = MEM_IN_STRUCT_P (from);
1391 data.to_readonly = RTX_UNCHANGING_P (to);
1392 data.from_readonly = RTX_UNCHANGING_P (from);
1394 /* If copying requires more than two move insns,
1395 copy addresses to registers (to make displacements shorter)
1396 and use post-increment if available. */
1397 if (!(data.autinc_from && data.autinc_to)
1398 && move_by_pieces_ninsns (len, align) > 2)
1400 /* Find the mode of the largest move... */
1401 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1402 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1403 if (GET_MODE_SIZE (tmode) < max_size)
1404 mode = tmode;
1406 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1408 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1409 data.autinc_from = 1;
1410 data.explicit_inc_from = -1;
1412 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1414 data.from_addr = copy_addr_to_reg (from_addr);
1415 data.autinc_from = 1;
1416 data.explicit_inc_from = 1;
1418 if (!data.autinc_from && CONSTANT_P (from_addr))
1419 data.from_addr = copy_addr_to_reg (from_addr);
1420 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1422 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1423 data.autinc_to = 1;
1424 data.explicit_inc_to = -1;
1426 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1428 data.to_addr = copy_addr_to_reg (to_addr);
1429 data.autinc_to = 1;
1430 data.explicit_inc_to = 1;
1432 if (!data.autinc_to && CONSTANT_P (to_addr))
1433 data.to_addr = copy_addr_to_reg (to_addr);
1436 if (! SLOW_UNALIGNED_ACCESS
1437 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1438 align = MOVE_MAX;
1440 /* First move what we can in the largest integer mode, then go to
1441 successively smaller modes. */
1443 while (max_size > 1)
1445 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1446 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1447 if (GET_MODE_SIZE (tmode) < max_size)
1448 mode = tmode;
1450 if (mode == VOIDmode)
1451 break;
1453 icode = mov_optab->handlers[(int) mode].insn_code;
1454 if (icode != CODE_FOR_nothing
1455 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1456 GET_MODE_SIZE (mode)))
1457 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1459 max_size = GET_MODE_SIZE (mode);
1462 /* The code above should have handled everything. */
1463 if (data.len > 0)
1464 abort ();
1467 /* Return number of insns required to move L bytes by pieces.
1468 ALIGN (in bytes) is maximum alignment we can assume. */
1470 static int
1471 move_by_pieces_ninsns (l, align)
1472 unsigned int l;
1473 int align;
1475 register int n_insns = 0;
1476 int max_size = MOVE_MAX + 1;
1478 if (! SLOW_UNALIGNED_ACCESS
1479 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1480 align = MOVE_MAX;
1482 while (max_size > 1)
1484 enum machine_mode mode = VOIDmode, tmode;
1485 enum insn_code icode;
1487 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1488 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1489 if (GET_MODE_SIZE (tmode) < max_size)
1490 mode = tmode;
1492 if (mode == VOIDmode)
1493 break;
1495 icode = mov_optab->handlers[(int) mode].insn_code;
1496 if (icode != CODE_FOR_nothing
1497 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1498 GET_MODE_SIZE (mode)))
1499 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1501 max_size = GET_MODE_SIZE (mode);
1504 return n_insns;
1507 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1508 with move instructions for mode MODE. GENFUN is the gen_... function
1509 to make a move insn for that mode. DATA has all the other info. */
1511 static void
1512 move_by_pieces_1 (genfun, mode, data)
1513 rtx (*genfun) PROTO ((rtx, ...));
1514 enum machine_mode mode;
1515 struct move_by_pieces *data;
1517 register int size = GET_MODE_SIZE (mode);
1518 register rtx to1, from1;
1520 while (data->len >= size)
1522 if (data->reverse) data->offset -= size;
1524 to1 = (data->autinc_to
1525 ? gen_rtx_MEM (mode, data->to_addr)
1526 : copy_rtx (change_address (data->to, mode,
1527 plus_constant (data->to_addr,
1528 data->offset))));
1529 MEM_IN_STRUCT_P (to1) = data->to_struct;
1530 RTX_UNCHANGING_P (to1) = data->to_readonly;
1532 from1
1533 = (data->autinc_from
1534 ? gen_rtx_MEM (mode, data->from_addr)
1535 : copy_rtx (change_address (data->from, mode,
1536 plus_constant (data->from_addr,
1537 data->offset))));
1538 MEM_IN_STRUCT_P (from1) = data->from_struct;
1539 RTX_UNCHANGING_P (from1) = data->from_readonly;
1541 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1542 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1543 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1544 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1546 emit_insn ((*genfun) (to1, from1));
1547 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1548 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1549 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1550 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1552 if (! data->reverse) data->offset += size;
1554 data->len -= size;
1558 /* Emit code to move a block Y to a block X.
1559 This may be done with string-move instructions,
1560 with multiple scalar move instructions, or with a library call.
1562 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1563 with mode BLKmode.
1564 SIZE is an rtx that says how long they are.
1565 ALIGN is the maximum alignment we can assume they have,
1566 measured in bytes.
1568 Return the address of the new block, if memcpy is called and returns it,
1569 0 otherwise. */
1572 emit_block_move (x, y, size, align)
1573 rtx x, y;
1574 rtx size;
1575 int align;
1577 rtx retval = 0;
1578 #ifdef TARGET_MEM_FUNCTIONS
1579 static tree fn;
1580 tree call_expr, arg_list;
1581 #endif
1583 if (GET_MODE (x) != BLKmode)
1584 abort ();
1586 if (GET_MODE (y) != BLKmode)
1587 abort ();
1589 x = protect_from_queue (x, 1);
1590 y = protect_from_queue (y, 0);
1591 size = protect_from_queue (size, 0);
1593 if (GET_CODE (x) != MEM)
1594 abort ();
1595 if (GET_CODE (y) != MEM)
1596 abort ();
1597 if (size == 0)
1598 abort ();
1600 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1601 move_by_pieces (x, y, INTVAL (size), align);
1602 else
1604 /* Try the most limited insn first, because there's no point
1605 including more than one in the machine description unless
1606 the more limited one has some advantage. */
1608 rtx opalign = GEN_INT (align);
1609 enum machine_mode mode;
1611 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1612 mode = GET_MODE_WIDER_MODE (mode))
1614 enum insn_code code = movstr_optab[(int) mode];
1615 insn_operand_predicate_fn pred;
1617 if (code != CODE_FOR_nothing
1618 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1619 here because if SIZE is less than the mode mask, as it is
1620 returned by the macro, it will definitely be less than the
1621 actual mode mask. */
1622 && ((GET_CODE (size) == CONST_INT
1623 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1624 <= (GET_MODE_MASK (mode) >> 1)))
1625 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1626 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1627 || (*pred) (x, BLKmode))
1628 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1629 || (*pred) (y, BLKmode))
1630 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1631 || (*pred) (opalign, VOIDmode)))
1633 rtx op2;
1634 rtx last = get_last_insn ();
1635 rtx pat;
1637 op2 = convert_to_mode (mode, size, 1);
1638 pred = insn_data[(int) code].operand[2].predicate;
1639 if (pred != 0 && ! (*pred) (op2, mode))
1640 op2 = copy_to_mode_reg (mode, op2);
1642 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1643 if (pat)
1645 emit_insn (pat);
1646 return 0;
1648 else
1649 delete_insns_since (last);
1653 /* X, Y, or SIZE may have been passed through protect_from_queue.
1655 It is unsafe to save the value generated by protect_from_queue
1656 and reuse it later. Consider what happens if emit_queue is
1657 called before the return value from protect_from_queue is used.
1659 Expansion of the CALL_EXPR below will call emit_queue before
1660 we are finished emitting RTL for argument setup. So if we are
1661 not careful we could get the wrong value for an argument.
1663 To avoid this problem we go ahead and emit code to copy X, Y &
1664 SIZE into new pseudos. We can then place those new pseudos
1665 into an RTL_EXPR and use them later, even after a call to
1666 emit_queue.
1668 Note this is not strictly needed for library calls since they
1669 do not call emit_queue before loading their arguments. However,
1670 we may need to have library calls call emit_queue in the future
1671 since failing to do so could cause problems for targets which
1672 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1673 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1674 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1676 #ifdef TARGET_MEM_FUNCTIONS
1677 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1678 #else
1679 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1680 TREE_UNSIGNED (integer_type_node));
1681 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1682 #endif
1684 #ifdef TARGET_MEM_FUNCTIONS
1685 /* It is incorrect to use the libcall calling conventions to call
1686 memcpy in this context.
1688 This could be a user call to memcpy and the user may wish to
1689 examine the return value from memcpy.
1691 For targets where libcalls and normal calls have different conventions
1692 for returning pointers, we could end up generating incorrect code.
1694 So instead of using a libcall sequence we build up a suitable
1695 CALL_EXPR and expand the call in the normal fashion. */
1696 if (fn == NULL_TREE)
1698 tree fntype;
1700 /* This was copied from except.c, I don't know if all this is
1701 necessary in this context or not. */
1702 fn = get_identifier ("memcpy");
1703 push_obstacks_nochange ();
1704 end_temporary_allocation ();
1705 fntype = build_pointer_type (void_type_node);
1706 fntype = build_function_type (fntype, NULL_TREE);
1707 fn = build_decl (FUNCTION_DECL, fn, fntype);
1708 ggc_add_tree_root (&fn, 1);
1709 DECL_EXTERNAL (fn) = 1;
1710 TREE_PUBLIC (fn) = 1;
1711 DECL_ARTIFICIAL (fn) = 1;
1712 make_decl_rtl (fn, NULL_PTR, 1);
1713 assemble_external (fn);
1714 pop_obstacks ();
1717 /* We need to make an argument list for the function call.
1719 memcpy has three arguments, the first two are void * addresses and
1720 the last is a size_t byte count for the copy. */
1721 arg_list
1722 = build_tree_list (NULL_TREE,
1723 make_tree (build_pointer_type (void_type_node), x));
1724 TREE_CHAIN (arg_list)
1725 = build_tree_list (NULL_TREE,
1726 make_tree (build_pointer_type (void_type_node), y));
1727 TREE_CHAIN (TREE_CHAIN (arg_list))
1728 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1729 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1731 /* Now we have to build up the CALL_EXPR itself. */
1732 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1733 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1734 call_expr, arg_list, NULL_TREE);
1735 TREE_SIDE_EFFECTS (call_expr) = 1;
1737 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1738 #else
1739 emit_library_call (bcopy_libfunc, 0,
1740 VOIDmode, 3, y, Pmode, x, Pmode,
1741 convert_to_mode (TYPE_MODE (integer_type_node), size,
1742 TREE_UNSIGNED (integer_type_node)),
1743 TYPE_MODE (integer_type_node));
1744 #endif
1747 return retval;
1750 /* Copy all or part of a value X into registers starting at REGNO.
1751 The number of registers to be filled is NREGS. */
1753 void
1754 move_block_to_reg (regno, x, nregs, mode)
1755 int regno;
1756 rtx x;
1757 int nregs;
1758 enum machine_mode mode;
1760 int i;
1761 #ifdef HAVE_load_multiple
1762 rtx pat;
1763 rtx last;
1764 #endif
1766 if (nregs == 0)
1767 return;
1769 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1770 x = validize_mem (force_const_mem (mode, x));
1772 /* See if the machine can do this with a load multiple insn. */
1773 #ifdef HAVE_load_multiple
1774 if (HAVE_load_multiple)
1776 last = get_last_insn ();
1777 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1778 GEN_INT (nregs));
1779 if (pat)
1781 emit_insn (pat);
1782 return;
1784 else
1785 delete_insns_since (last);
1787 #endif
1789 for (i = 0; i < nregs; i++)
1790 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1791 operand_subword_force (x, i, mode));
1794 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1795 The number of registers to be filled is NREGS. SIZE indicates the number
1796 of bytes in the object X. */
1799 void
1800 move_block_from_reg (regno, x, nregs, size)
1801 int regno;
1802 rtx x;
1803 int nregs;
1804 int size;
1806 int i;
1807 #ifdef HAVE_store_multiple
1808 rtx pat;
1809 rtx last;
1810 #endif
1811 enum machine_mode mode;
1813 /* If SIZE is that of a mode no bigger than a word, just use that
1814 mode's store operation. */
1815 if (size <= UNITS_PER_WORD
1816 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1818 emit_move_insn (change_address (x, mode, NULL),
1819 gen_rtx_REG (mode, regno));
1820 return;
1823 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1824 to the left before storing to memory. Note that the previous test
1825 doesn't handle all cases (e.g. SIZE == 3). */
1826 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1828 rtx tem = operand_subword (x, 0, 1, BLKmode);
1829 rtx shift;
1831 if (tem == 0)
1832 abort ();
1834 shift = expand_shift (LSHIFT_EXPR, word_mode,
1835 gen_rtx_REG (word_mode, regno),
1836 build_int_2 ((UNITS_PER_WORD - size)
1837 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1838 emit_move_insn (tem, shift);
1839 return;
1842 /* See if the machine can do this with a store multiple insn. */
1843 #ifdef HAVE_store_multiple
1844 if (HAVE_store_multiple)
1846 last = get_last_insn ();
1847 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1848 GEN_INT (nregs));
1849 if (pat)
1851 emit_insn (pat);
1852 return;
1854 else
1855 delete_insns_since (last);
1857 #endif
1859 for (i = 0; i < nregs; i++)
1861 rtx tem = operand_subword (x, i, 1, BLKmode);
1863 if (tem == 0)
1864 abort ();
1866 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1870 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1871 registers represented by a PARALLEL. SSIZE represents the total size of
1872 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1873 SRC in bits. */
1874 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1875 the balance will be in what would be the low-order memory addresses, i.e.
1876 left justified for big endian, right justified for little endian. This
1877 happens to be true for the targets currently using this support. If this
1878 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1879 would be needed. */
1881 void
1882 emit_group_load (dst, orig_src, ssize, align)
1883 rtx dst, orig_src;
1884 int align, ssize;
1886 rtx *tmps, src;
1887 int start, i;
1889 if (GET_CODE (dst) != PARALLEL)
1890 abort ();
1892 /* Check for a NULL entry, used to indicate that the parameter goes
1893 both on the stack and in registers. */
1894 if (XEXP (XVECEXP (dst, 0, 0), 0))
1895 start = 0;
1896 else
1897 start = 1;
1899 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (dst, 0));
1901 /* If we won't be loading directly from memory, protect the real source
1902 from strange tricks we might play. */
1903 src = orig_src;
1904 if (GET_CODE (src) != MEM)
1906 src = gen_reg_rtx (GET_MODE (orig_src));
1907 emit_move_insn (src, orig_src);
1910 /* Process the pieces. */
1911 for (i = start; i < XVECLEN (dst, 0); i++)
1913 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1914 int bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1915 int bytelen = GET_MODE_SIZE (mode);
1916 int shift = 0;
1918 /* Handle trailing fragments that run over the size of the struct. */
1919 if (ssize >= 0 && bytepos + bytelen > ssize)
1921 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1922 bytelen = ssize - bytepos;
1923 if (bytelen <= 0)
1924 abort();
1927 /* Optimize the access just a bit. */
1928 if (GET_CODE (src) == MEM
1929 && align*BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
1930 && bytepos*BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1931 && bytelen == GET_MODE_SIZE (mode))
1933 tmps[i] = gen_reg_rtx (mode);
1934 emit_move_insn (tmps[i],
1935 change_address (src, mode,
1936 plus_constant (XEXP (src, 0),
1937 bytepos)));
1939 else if (GET_CODE (src) == CONCAT)
1941 if (bytepos == 0
1942 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
1943 tmps[i] = XEXP (src, 0);
1944 else if (bytepos == GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
1945 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
1946 tmps[i] = XEXP (src, 1);
1947 else
1948 abort ();
1950 else
1952 tmps[i] = extract_bit_field (src, bytelen*BITS_PER_UNIT,
1953 bytepos*BITS_PER_UNIT, 1, NULL_RTX,
1954 mode, mode, align, ssize);
1957 if (BYTES_BIG_ENDIAN && shift)
1959 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
1960 tmps[i], 0, OPTAB_WIDEN);
1963 emit_queue();
1965 /* Copy the extracted pieces into the proper (probable) hard regs. */
1966 for (i = start; i < XVECLEN (dst, 0); i++)
1967 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
1970 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
1971 registers represented by a PARALLEL. SSIZE represents the total size of
1972 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
1974 void
1975 emit_group_store (orig_dst, src, ssize, align)
1976 rtx orig_dst, src;
1977 int ssize, align;
1979 rtx *tmps, dst;
1980 int start, i;
1982 if (GET_CODE (src) != PARALLEL)
1983 abort ();
1985 /* Check for a NULL entry, used to indicate that the parameter goes
1986 both on the stack and in registers. */
1987 if (XEXP (XVECEXP (src, 0, 0), 0))
1988 start = 0;
1989 else
1990 start = 1;
1992 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (src, 0));
1994 /* Copy the (probable) hard regs into pseudos. */
1995 for (i = start; i < XVECLEN (src, 0); i++)
1997 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1998 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1999 emit_move_insn (tmps[i], reg);
2001 emit_queue();
2003 /* If we won't be storing directly into memory, protect the real destination
2004 from strange tricks we might play. */
2005 dst = orig_dst;
2006 if (GET_CODE (dst) == PARALLEL)
2008 rtx temp;
2010 /* We can get a PARALLEL dst if there is a conditional expression in
2011 a return statement. In that case, the dst and src are the same,
2012 so no action is necessary. */
2013 if (rtx_equal_p (dst, src))
2014 return;
2016 /* It is unclear if we can ever reach here, but we may as well handle
2017 it. Allocate a temporary, and split this into a store/load to/from
2018 the temporary. */
2020 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2021 emit_group_store (temp, src, ssize, align);
2022 emit_group_load (dst, temp, ssize, align);
2023 return;
2025 else if (GET_CODE (dst) != MEM)
2027 dst = gen_reg_rtx (GET_MODE (orig_dst));
2028 /* Make life a bit easier for combine. */
2029 emit_move_insn (dst, const0_rtx);
2031 else if (! MEM_IN_STRUCT_P (dst))
2033 /* store_bit_field requires that memory operations have
2034 mem_in_struct_p set; we might not. */
2036 dst = copy_rtx (orig_dst);
2037 MEM_SET_IN_STRUCT_P (dst, 1);
2040 /* Process the pieces. */
2041 for (i = start; i < XVECLEN (src, 0); i++)
2043 int bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2044 enum machine_mode mode = GET_MODE (tmps[i]);
2045 int bytelen = GET_MODE_SIZE (mode);
2047 /* Handle trailing fragments that run over the size of the struct. */
2048 if (ssize >= 0 && bytepos + bytelen > ssize)
2050 if (BYTES_BIG_ENDIAN)
2052 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2053 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2054 tmps[i], 0, OPTAB_WIDEN);
2056 bytelen = ssize - bytepos;
2059 /* Optimize the access just a bit. */
2060 if (GET_CODE (dst) == MEM
2061 && align*BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
2062 && bytepos*BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2063 && bytelen == GET_MODE_SIZE (mode))
2065 emit_move_insn (change_address (dst, mode,
2066 plus_constant (XEXP (dst, 0),
2067 bytepos)),
2068 tmps[i]);
2070 else
2072 store_bit_field (dst, bytelen*BITS_PER_UNIT, bytepos*BITS_PER_UNIT,
2073 mode, tmps[i], align, ssize);
2076 emit_queue();
2078 /* Copy from the pseudo into the (probable) hard reg. */
2079 if (GET_CODE (dst) == REG)
2080 emit_move_insn (orig_dst, dst);
2083 /* Generate code to copy a BLKmode object of TYPE out of a
2084 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2085 is null, a stack temporary is created. TGTBLK is returned.
2087 The primary purpose of this routine is to handle functions
2088 that return BLKmode structures in registers. Some machines
2089 (the PA for example) want to return all small structures
2090 in registers regardless of the structure's alignment.
2094 copy_blkmode_from_reg(tgtblk,srcreg,type)
2095 rtx tgtblk;
2096 rtx srcreg;
2097 tree type;
2099 int bytes = int_size_in_bytes (type);
2100 rtx src = NULL, dst = NULL;
2101 int bitsize = MIN (TYPE_ALIGN (type), (unsigned int) BITS_PER_WORD);
2102 int bitpos, xbitpos, big_endian_correction = 0;
2104 if (tgtblk == 0)
2106 tgtblk = assign_stack_temp (BLKmode, bytes, 0);
2107 MEM_SET_IN_STRUCT_P (tgtblk, AGGREGATE_TYPE_P (type));
2108 preserve_temp_slots (tgtblk);
2111 /* This code assumes srcreg is at least a full word. If it isn't,
2112 copy it into a new pseudo which is a full word. */
2113 if (GET_MODE (srcreg) != BLKmode
2114 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2115 srcreg = convert_to_mode (word_mode, srcreg,
2116 TREE_UNSIGNED (type));
2118 /* Structures whose size is not a multiple of a word are aligned
2119 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2120 machine, this means we must skip the empty high order bytes when
2121 calculating the bit offset. */
2122 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2123 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2124 * BITS_PER_UNIT));
2126 /* Copy the structure BITSIZE bites at a time.
2128 We could probably emit more efficient code for machines
2129 which do not use strict alignment, but it doesn't seem
2130 worth the effort at the current time. */
2131 for (bitpos = 0, xbitpos = big_endian_correction;
2132 bitpos < bytes * BITS_PER_UNIT;
2133 bitpos += bitsize, xbitpos += bitsize)
2136 /* We need a new source operand each time xbitpos is on a
2137 word boundary and when xbitpos == big_endian_correction
2138 (the first time through). */
2139 if (xbitpos % BITS_PER_WORD == 0
2140 || xbitpos == big_endian_correction)
2141 src = operand_subword_force (srcreg,
2142 xbitpos / BITS_PER_WORD,
2143 BLKmode);
2145 /* We need a new destination operand each time bitpos is on
2146 a word boundary. */
2147 if (bitpos % BITS_PER_WORD == 0)
2148 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2150 /* Use xbitpos for the source extraction (right justified) and
2151 xbitpos for the destination store (left justified). */
2152 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2153 extract_bit_field (src, bitsize,
2154 xbitpos % BITS_PER_WORD, 1,
2155 NULL_RTX, word_mode,
2156 word_mode,
2157 bitsize / BITS_PER_UNIT,
2158 BITS_PER_WORD),
2159 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
2161 return tgtblk;
2165 /* Add a USE expression for REG to the (possibly empty) list pointed
2166 to by CALL_FUSAGE. REG must denote a hard register. */
2168 void
2169 use_reg (call_fusage, reg)
2170 rtx *call_fusage, reg;
2172 if (GET_CODE (reg) != REG
2173 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2174 abort();
2176 *call_fusage
2177 = gen_rtx_EXPR_LIST (VOIDmode,
2178 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2181 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2182 starting at REGNO. All of these registers must be hard registers. */
2184 void
2185 use_regs (call_fusage, regno, nregs)
2186 rtx *call_fusage;
2187 int regno;
2188 int nregs;
2190 int i;
2192 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2193 abort ();
2195 for (i = 0; i < nregs; i++)
2196 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2199 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2200 PARALLEL REGS. This is for calls that pass values in multiple
2201 non-contiguous locations. The Irix 6 ABI has examples of this. */
2203 void
2204 use_group_regs (call_fusage, regs)
2205 rtx *call_fusage;
2206 rtx regs;
2208 int i;
2210 for (i = 0; i < XVECLEN (regs, 0); i++)
2212 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2214 /* A NULL entry means the parameter goes both on the stack and in
2215 registers. This can also be a MEM for targets that pass values
2216 partially on the stack and partially in registers. */
2217 if (reg != 0 && GET_CODE (reg) == REG)
2218 use_reg (call_fusage, reg);
2222 /* Generate several move instructions to clear LEN bytes of block TO.
2223 (A MEM rtx with BLKmode). The caller must pass TO through
2224 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
2225 we can assume. */
2227 static void
2228 clear_by_pieces (to, len, align)
2229 rtx to;
2230 int len, align;
2232 struct clear_by_pieces data;
2233 rtx to_addr = XEXP (to, 0);
2234 int max_size = MOVE_MAX_PIECES + 1;
2235 enum machine_mode mode = VOIDmode, tmode;
2236 enum insn_code icode;
2238 data.offset = 0;
2239 data.to_addr = to_addr;
2240 data.to = to;
2241 data.autinc_to
2242 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2243 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2245 data.explicit_inc_to = 0;
2246 data.reverse
2247 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2248 if (data.reverse) data.offset = len;
2249 data.len = len;
2251 data.to_struct = MEM_IN_STRUCT_P (to);
2253 /* If copying requires more than two move insns,
2254 copy addresses to registers (to make displacements shorter)
2255 and use post-increment if available. */
2256 if (!data.autinc_to
2257 && move_by_pieces_ninsns (len, align) > 2)
2259 /* Determine the main mode we'll be using */
2260 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2261 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2262 if (GET_MODE_SIZE (tmode) < max_size)
2263 mode = tmode;
2265 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
2267 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
2268 data.autinc_to = 1;
2269 data.explicit_inc_to = -1;
2271 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
2273 data.to_addr = copy_addr_to_reg (to_addr);
2274 data.autinc_to = 1;
2275 data.explicit_inc_to = 1;
2277 if (!data.autinc_to && CONSTANT_P (to_addr))
2278 data.to_addr = copy_addr_to_reg (to_addr);
2281 if (! SLOW_UNALIGNED_ACCESS
2282 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
2283 align = MOVE_MAX;
2285 /* First move what we can in the largest integer mode, then go to
2286 successively smaller modes. */
2288 while (max_size > 1)
2290 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2291 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2292 if (GET_MODE_SIZE (tmode) < max_size)
2293 mode = tmode;
2295 if (mode == VOIDmode)
2296 break;
2298 icode = mov_optab->handlers[(int) mode].insn_code;
2299 if (icode != CODE_FOR_nothing
2300 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
2301 GET_MODE_SIZE (mode)))
2302 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2304 max_size = GET_MODE_SIZE (mode);
2307 /* The code above should have handled everything. */
2308 if (data.len != 0)
2309 abort ();
2312 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2313 with move instructions for mode MODE. GENFUN is the gen_... function
2314 to make a move insn for that mode. DATA has all the other info. */
2316 static void
2317 clear_by_pieces_1 (genfun, mode, data)
2318 rtx (*genfun) PROTO ((rtx, ...));
2319 enum machine_mode mode;
2320 struct clear_by_pieces *data;
2322 register int size = GET_MODE_SIZE (mode);
2323 register rtx to1;
2325 while (data->len >= size)
2327 if (data->reverse) data->offset -= size;
2329 to1 = (data->autinc_to
2330 ? gen_rtx_MEM (mode, data->to_addr)
2331 : copy_rtx (change_address (data->to, mode,
2332 plus_constant (data->to_addr,
2333 data->offset))));
2334 MEM_IN_STRUCT_P (to1) = data->to_struct;
2336 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2337 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2339 emit_insn ((*genfun) (to1, const0_rtx));
2340 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2341 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2343 if (! data->reverse) data->offset += size;
2345 data->len -= size;
2349 /* Write zeros through the storage of OBJECT.
2350 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
2351 the maximum alignment we can is has, measured in bytes.
2353 If we call a function that returns the length of the block, return it. */
2356 clear_storage (object, size, align)
2357 rtx object;
2358 rtx size;
2359 int align;
2361 #ifdef TARGET_MEM_FUNCTIONS
2362 static tree fn;
2363 tree call_expr, arg_list;
2364 #endif
2365 rtx retval = 0;
2367 if (GET_MODE (object) == BLKmode)
2369 object = protect_from_queue (object, 1);
2370 size = protect_from_queue (size, 0);
2372 if (GET_CODE (size) == CONST_INT
2373 && MOVE_BY_PIECES_P (INTVAL (size), align))
2374 clear_by_pieces (object, INTVAL (size), align);
2376 else
2378 /* Try the most limited insn first, because there's no point
2379 including more than one in the machine description unless
2380 the more limited one has some advantage. */
2382 rtx opalign = GEN_INT (align);
2383 enum machine_mode mode;
2385 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2386 mode = GET_MODE_WIDER_MODE (mode))
2388 enum insn_code code = clrstr_optab[(int) mode];
2389 insn_operand_predicate_fn pred;
2391 if (code != CODE_FOR_nothing
2392 /* We don't need MODE to be narrower than
2393 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2394 the mode mask, as it is returned by the macro, it will
2395 definitely be less than the actual mode mask. */
2396 && ((GET_CODE (size) == CONST_INT
2397 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2398 <= (GET_MODE_MASK (mode) >> 1)))
2399 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2400 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2401 || (*pred) (object, BLKmode))
2402 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2403 || (*pred) (opalign, VOIDmode)))
2405 rtx op1;
2406 rtx last = get_last_insn ();
2407 rtx pat;
2409 op1 = convert_to_mode (mode, size, 1);
2410 pred = insn_data[(int) code].operand[1].predicate;
2411 if (pred != 0 && ! (*pred) (op1, mode))
2412 op1 = copy_to_mode_reg (mode, op1);
2414 pat = GEN_FCN ((int) code) (object, op1, opalign);
2415 if (pat)
2417 emit_insn (pat);
2418 return 0;
2420 else
2421 delete_insns_since (last);
2425 /* OBJECT or SIZE may have been passed through protect_from_queue.
2427 It is unsafe to save the value generated by protect_from_queue
2428 and reuse it later. Consider what happens if emit_queue is
2429 called before the return value from protect_from_queue is used.
2431 Expansion of the CALL_EXPR below will call emit_queue before
2432 we are finished emitting RTL for argument setup. So if we are
2433 not careful we could get the wrong value for an argument.
2435 To avoid this problem we go ahead and emit code to copy OBJECT
2436 and SIZE into new pseudos. We can then place those new pseudos
2437 into an RTL_EXPR and use them later, even after a call to
2438 emit_queue.
2440 Note this is not strictly needed for library calls since they
2441 do not call emit_queue before loading their arguments. However,
2442 we may need to have library calls call emit_queue in the future
2443 since failing to do so could cause problems for targets which
2444 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2445 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2447 #ifdef TARGET_MEM_FUNCTIONS
2448 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2449 #else
2450 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2451 TREE_UNSIGNED (integer_type_node));
2452 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2453 #endif
2456 #ifdef TARGET_MEM_FUNCTIONS
2457 /* It is incorrect to use the libcall calling conventions to call
2458 memset in this context.
2460 This could be a user call to memset and the user may wish to
2461 examine the return value from memset.
2463 For targets where libcalls and normal calls have different
2464 conventions for returning pointers, we could end up generating
2465 incorrect code.
2467 So instead of using a libcall sequence we build up a suitable
2468 CALL_EXPR and expand the call in the normal fashion. */
2469 if (fn == NULL_TREE)
2471 tree fntype;
2473 /* This was copied from except.c, I don't know if all this is
2474 necessary in this context or not. */
2475 fn = get_identifier ("memset");
2476 push_obstacks_nochange ();
2477 end_temporary_allocation ();
2478 fntype = build_pointer_type (void_type_node);
2479 fntype = build_function_type (fntype, NULL_TREE);
2480 fn = build_decl (FUNCTION_DECL, fn, fntype);
2481 ggc_add_tree_root (&fn, 1);
2482 DECL_EXTERNAL (fn) = 1;
2483 TREE_PUBLIC (fn) = 1;
2484 DECL_ARTIFICIAL (fn) = 1;
2485 make_decl_rtl (fn, NULL_PTR, 1);
2486 assemble_external (fn);
2487 pop_obstacks ();
2490 /* We need to make an argument list for the function call.
2492 memset has three arguments, the first is a void * addresses, the
2493 second a integer with the initialization value, the last is a
2494 size_t byte count for the copy. */
2495 arg_list
2496 = build_tree_list (NULL_TREE,
2497 make_tree (build_pointer_type (void_type_node),
2498 object));
2499 TREE_CHAIN (arg_list)
2500 = build_tree_list (NULL_TREE,
2501 make_tree (integer_type_node, const0_rtx));
2502 TREE_CHAIN (TREE_CHAIN (arg_list))
2503 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2504 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2506 /* Now we have to build up the CALL_EXPR itself. */
2507 call_expr = build1 (ADDR_EXPR,
2508 build_pointer_type (TREE_TYPE (fn)), fn);
2509 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2510 call_expr, arg_list, NULL_TREE);
2511 TREE_SIDE_EFFECTS (call_expr) = 1;
2513 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2514 #else
2515 emit_library_call (bzero_libfunc, 0,
2516 VOIDmode, 2, object, Pmode, size,
2517 TYPE_MODE (integer_type_node));
2518 #endif
2521 else
2522 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2524 return retval;
2527 /* Generate code to copy Y into X.
2528 Both Y and X must have the same mode, except that
2529 Y can be a constant with VOIDmode.
2530 This mode cannot be BLKmode; use emit_block_move for that.
2532 Return the last instruction emitted. */
2535 emit_move_insn (x, y)
2536 rtx x, y;
2538 enum machine_mode mode = GET_MODE (x);
2540 x = protect_from_queue (x, 1);
2541 y = protect_from_queue (y, 0);
2543 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2544 abort ();
2546 /* Never force constant_p_rtx to memory. */
2547 if (GET_CODE (y) == CONSTANT_P_RTX)
2549 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2550 y = force_const_mem (mode, y);
2552 /* If X or Y are memory references, verify that their addresses are valid
2553 for the machine. */
2554 if (GET_CODE (x) == MEM
2555 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2556 && ! push_operand (x, GET_MODE (x)))
2557 || (flag_force_addr
2558 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2559 x = change_address (x, VOIDmode, XEXP (x, 0));
2561 if (GET_CODE (y) == MEM
2562 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2563 || (flag_force_addr
2564 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2565 y = change_address (y, VOIDmode, XEXP (y, 0));
2567 if (mode == BLKmode)
2568 abort ();
2570 return emit_move_insn_1 (x, y);
2573 /* Low level part of emit_move_insn.
2574 Called just like emit_move_insn, but assumes X and Y
2575 are basically valid. */
2578 emit_move_insn_1 (x, y)
2579 rtx x, y;
2581 enum machine_mode mode = GET_MODE (x);
2582 enum machine_mode submode;
2583 enum mode_class class = GET_MODE_CLASS (mode);
2584 int i;
2586 if (mode >= MAX_MACHINE_MODE)
2587 abort ();
2589 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2590 return
2591 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2593 /* Expand complex moves by moving real part and imag part, if possible. */
2594 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2595 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2596 * BITS_PER_UNIT),
2597 (class == MODE_COMPLEX_INT
2598 ? MODE_INT : MODE_FLOAT),
2600 && (mov_optab->handlers[(int) submode].insn_code
2601 != CODE_FOR_nothing))
2603 /* Don't split destination if it is a stack push. */
2604 int stack = push_operand (x, GET_MODE (x));
2606 /* If this is a stack, push the highpart first, so it
2607 will be in the argument order.
2609 In that case, change_address is used only to convert
2610 the mode, not to change the address. */
2611 if (stack)
2613 /* Note that the real part always precedes the imag part in memory
2614 regardless of machine's endianness. */
2615 #ifdef STACK_GROWS_DOWNWARD
2616 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2617 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2618 gen_imagpart (submode, y)));
2619 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2620 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2621 gen_realpart (submode, y)));
2622 #else
2623 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2624 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2625 gen_realpart (submode, y)));
2626 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2627 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2628 gen_imagpart (submode, y)));
2629 #endif
2631 else
2633 /* If this is a complex value with each part being smaller than a
2634 word, the usual calling sequence will likely pack the pieces into
2635 a single register. Unfortunately, SUBREG of hard registers only
2636 deals in terms of words, so we have a problem converting input
2637 arguments to the CONCAT of two registers that is used elsewhere
2638 for complex values. If this is before reload, we can copy it into
2639 memory and reload. FIXME, we should see about using extract and
2640 insert on integer registers, but complex short and complex char
2641 variables should be rarely used. */
2642 if (GET_MODE_BITSIZE (mode) < 2*BITS_PER_WORD
2643 && (reload_in_progress | reload_completed) == 0)
2645 int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2646 int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2648 if (packed_dest_p || packed_src_p)
2650 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2651 ? MODE_FLOAT : MODE_INT);
2653 enum machine_mode reg_mode =
2654 mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2656 if (reg_mode != BLKmode)
2658 rtx mem = assign_stack_temp (reg_mode,
2659 GET_MODE_SIZE (mode), 0);
2661 rtx cmem = change_address (mem, mode, NULL_RTX);
2663 current_function->cannot_inline
2664 = "function uses short complex types";
2666 if (packed_dest_p)
2668 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2669 emit_move_insn_1 (cmem, y);
2670 return emit_move_insn_1 (sreg, mem);
2672 else
2674 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2675 emit_move_insn_1 (mem, sreg);
2676 return emit_move_insn_1 (x, cmem);
2682 /* Show the output dies here. This is necessary for pseudos;
2683 hard regs shouldn't appear here except as return values.
2684 We never want to emit such a clobber after reload. */
2685 if (x != y
2686 && ! (reload_in_progress || reload_completed))
2688 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2691 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2692 (gen_realpart (submode, x), gen_realpart (submode, y)));
2693 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2694 (gen_imagpart (submode, x), gen_imagpart (submode, y)));
2697 return get_last_insn ();
2700 /* This will handle any multi-word mode that lacks a move_insn pattern.
2701 However, you will get better code if you define such patterns,
2702 even if they must turn into multiple assembler instructions. */
2703 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2705 rtx last_insn = 0;
2707 #ifdef PUSH_ROUNDING
2709 /* If X is a push on the stack, do the push now and replace
2710 X with a reference to the stack pointer. */
2711 if (push_operand (x, GET_MODE (x)))
2713 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2714 x = change_address (x, VOIDmode, stack_pointer_rtx);
2716 #endif
2718 /* Show the output dies here. This is necessary for pseudos;
2719 hard regs shouldn't appear here except as return values.
2720 We never want to emit such a clobber after reload. */
2721 if (x != y
2722 && ! (reload_in_progress || reload_completed))
2724 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2727 for (i = 0;
2728 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2729 i++)
2731 rtx xpart = operand_subword (x, i, 1, mode);
2732 rtx ypart = operand_subword (y, i, 1, mode);
2734 /* If we can't get a part of Y, put Y into memory if it is a
2735 constant. Otherwise, force it into a register. If we still
2736 can't get a part of Y, abort. */
2737 if (ypart == 0 && CONSTANT_P (y))
2739 y = force_const_mem (mode, y);
2740 ypart = operand_subword (y, i, 1, mode);
2742 else if (ypart == 0)
2743 ypart = operand_subword_force (y, i, mode);
2745 if (xpart == 0 || ypart == 0)
2746 abort ();
2748 last_insn = emit_move_insn (xpart, ypart);
2751 return last_insn;
2753 else
2754 abort ();
2757 /* Pushing data onto the stack. */
2759 /* Push a block of length SIZE (perhaps variable)
2760 and return an rtx to address the beginning of the block.
2761 Note that it is not possible for the value returned to be a QUEUED.
2762 The value may be virtual_outgoing_args_rtx.
2764 EXTRA is the number of bytes of padding to push in addition to SIZE.
2765 BELOW nonzero means this padding comes at low addresses;
2766 otherwise, the padding comes at high addresses. */
2769 push_block (size, extra, below)
2770 rtx size;
2771 int extra, below;
2773 register rtx temp;
2775 size = convert_modes (Pmode, ptr_mode, size, 1);
2776 if (CONSTANT_P (size))
2777 anti_adjust_stack (plus_constant (size, extra));
2778 else if (GET_CODE (size) == REG && extra == 0)
2779 anti_adjust_stack (size);
2780 else
2782 rtx temp = copy_to_mode_reg (Pmode, size);
2783 if (extra != 0)
2784 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2785 temp, 0, OPTAB_LIB_WIDEN);
2786 anti_adjust_stack (temp);
2789 #if defined (STACK_GROWS_DOWNWARD) \
2790 || (defined (ARGS_GROW_DOWNWARD) \
2791 && !defined (ACCUMULATE_OUTGOING_ARGS))
2793 /* Return the lowest stack address when STACK or ARGS grow downward and
2794 we are not aaccumulating outgoing arguments (the c4x port uses such
2795 conventions). */
2796 temp = virtual_outgoing_args_rtx;
2797 if (extra != 0 && below)
2798 temp = plus_constant (temp, extra);
2799 #else
2800 if (GET_CODE (size) == CONST_INT)
2801 temp = plus_constant (virtual_outgoing_args_rtx,
2802 - INTVAL (size) - (below ? 0 : extra));
2803 else if (extra != 0 && !below)
2804 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2805 negate_rtx (Pmode, plus_constant (size, extra)));
2806 else
2807 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2808 negate_rtx (Pmode, size));
2809 #endif
2811 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2815 gen_push_operand ()
2817 return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2820 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2821 block of SIZE bytes. */
2823 static rtx
2824 get_push_address (size)
2825 int size;
2827 register rtx temp;
2829 if (STACK_PUSH_CODE == POST_DEC)
2830 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2831 else if (STACK_PUSH_CODE == POST_INC)
2832 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2833 else
2834 temp = stack_pointer_rtx;
2836 return copy_to_reg (temp);
2839 /* Generate code to push X onto the stack, assuming it has mode MODE and
2840 type TYPE.
2841 MODE is redundant except when X is a CONST_INT (since they don't
2842 carry mode info).
2843 SIZE is an rtx for the size of data to be copied (in bytes),
2844 needed only if X is BLKmode.
2846 ALIGN (in bytes) is maximum alignment we can assume.
2848 If PARTIAL and REG are both nonzero, then copy that many of the first
2849 words of X into registers starting with REG, and push the rest of X.
2850 The amount of space pushed is decreased by PARTIAL words,
2851 rounded *down* to a multiple of PARM_BOUNDARY.
2852 REG must be a hard register in this case.
2853 If REG is zero but PARTIAL is not, take any all others actions for an
2854 argument partially in registers, but do not actually load any
2855 registers.
2857 EXTRA is the amount in bytes of extra space to leave next to this arg.
2858 This is ignored if an argument block has already been allocated.
2860 On a machine that lacks real push insns, ARGS_ADDR is the address of
2861 the bottom of the argument block for this call. We use indexing off there
2862 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2863 argument block has not been preallocated.
2865 ARGS_SO_FAR is the size of args previously pushed for this call.
2867 REG_PARM_STACK_SPACE is nonzero if functions require stack space
2868 for arguments passed in registers. If nonzero, it will be the number
2869 of bytes required. */
2871 void
2872 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2873 args_addr, args_so_far, reg_parm_stack_space)
2874 register rtx x;
2875 enum machine_mode mode;
2876 tree type;
2877 rtx size;
2878 int align;
2879 int partial;
2880 rtx reg;
2881 int extra;
2882 rtx args_addr;
2883 rtx args_so_far;
2884 int reg_parm_stack_space;
2886 rtx xinner;
2887 enum direction stack_direction
2888 #ifdef STACK_GROWS_DOWNWARD
2889 = downward;
2890 #else
2891 = upward;
2892 #endif
2894 /* Decide where to pad the argument: `downward' for below,
2895 `upward' for above, or `none' for don't pad it.
2896 Default is below for small data on big-endian machines; else above. */
2897 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2899 /* Invert direction if stack is post-update. */
2900 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2901 if (where_pad != none)
2902 where_pad = (where_pad == downward ? upward : downward);
2904 xinner = x = protect_from_queue (x, 0);
2906 if (mode == BLKmode)
2908 /* Copy a block into the stack, entirely or partially. */
2910 register rtx temp;
2911 int used = partial * UNITS_PER_WORD;
2912 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2913 int skip;
2915 if (size == 0)
2916 abort ();
2918 used -= offset;
2920 /* USED is now the # of bytes we need not copy to the stack
2921 because registers will take care of them. */
2923 if (partial != 0)
2924 xinner = change_address (xinner, BLKmode,
2925 plus_constant (XEXP (xinner, 0), used));
2927 /* If the partial register-part of the arg counts in its stack size,
2928 skip the part of stack space corresponding to the registers.
2929 Otherwise, start copying to the beginning of the stack space,
2930 by setting SKIP to 0. */
2931 skip = (reg_parm_stack_space == 0) ? 0 : used;
2933 #ifdef PUSH_ROUNDING
2934 /* Do it with several push insns if that doesn't take lots of insns
2935 and if there is no difficulty with push insns that skip bytes
2936 on the stack for alignment purposes. */
2937 if (args_addr == 0
2938 && GET_CODE (size) == CONST_INT
2939 && skip == 0
2940 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
2941 /* Here we avoid the case of a structure whose weak alignment
2942 forces many pushes of a small amount of data,
2943 and such small pushes do rounding that causes trouble. */
2944 && ((! SLOW_UNALIGNED_ACCESS)
2945 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
2946 || PUSH_ROUNDING (align) == align)
2947 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2949 /* Push padding now if padding above and stack grows down,
2950 or if padding below and stack grows up.
2951 But if space already allocated, this has already been done. */
2952 if (extra && args_addr == 0
2953 && where_pad != none && where_pad != stack_direction)
2954 anti_adjust_stack (GEN_INT (extra));
2956 move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
2957 INTVAL (size) - used, align);
2959 if (current_function_check_memory_usage && ! in_check_memory_usage)
2961 rtx temp;
2963 in_check_memory_usage = 1;
2964 temp = get_push_address (INTVAL(size) - used);
2965 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
2966 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2967 temp, Pmode,
2968 XEXP (xinner, 0), Pmode,
2969 GEN_INT (INTVAL(size) - used),
2970 TYPE_MODE (sizetype));
2971 else
2972 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2973 temp, Pmode,
2974 GEN_INT (INTVAL(size) - used),
2975 TYPE_MODE (sizetype),
2976 GEN_INT (MEMORY_USE_RW),
2977 TYPE_MODE (integer_type_node));
2978 in_check_memory_usage = 0;
2981 else
2982 #endif /* PUSH_ROUNDING */
2984 /* Otherwise make space on the stack and copy the data
2985 to the address of that space. */
2987 /* Deduct words put into registers from the size we must copy. */
2988 if (partial != 0)
2990 if (GET_CODE (size) == CONST_INT)
2991 size = GEN_INT (INTVAL (size) - used);
2992 else
2993 size = expand_binop (GET_MODE (size), sub_optab, size,
2994 GEN_INT (used), NULL_RTX, 0,
2995 OPTAB_LIB_WIDEN);
2998 /* Get the address of the stack space.
2999 In this case, we do not deal with EXTRA separately.
3000 A single stack adjust will do. */
3001 if (! args_addr)
3003 temp = push_block (size, extra, where_pad == downward);
3004 extra = 0;
3006 else if (GET_CODE (args_so_far) == CONST_INT)
3007 temp = memory_address (BLKmode,
3008 plus_constant (args_addr,
3009 skip + INTVAL (args_so_far)));
3010 else
3011 temp = memory_address (BLKmode,
3012 plus_constant (gen_rtx_PLUS (Pmode,
3013 args_addr,
3014 args_so_far),
3015 skip));
3016 if (current_function_check_memory_usage && ! in_check_memory_usage)
3018 rtx target;
3020 in_check_memory_usage = 1;
3021 target = copy_to_reg (temp);
3022 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3023 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3024 target, Pmode,
3025 XEXP (xinner, 0), Pmode,
3026 size, TYPE_MODE (sizetype));
3027 else
3028 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3029 target, Pmode,
3030 size, TYPE_MODE (sizetype),
3031 GEN_INT (MEMORY_USE_RW),
3032 TYPE_MODE (integer_type_node));
3033 in_check_memory_usage = 0;
3036 /* TEMP is the address of the block. Copy the data there. */
3037 if (GET_CODE (size) == CONST_INT
3038 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align)))
3040 move_by_pieces (gen_rtx_MEM (BLKmode, temp), xinner,
3041 INTVAL (size), align);
3042 goto ret;
3044 else
3046 rtx opalign = GEN_INT (align);
3047 enum machine_mode mode;
3048 rtx target = gen_rtx_MEM (BLKmode, temp);
3050 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3051 mode != VOIDmode;
3052 mode = GET_MODE_WIDER_MODE (mode))
3054 enum insn_code code = movstr_optab[(int) mode];
3055 insn_operand_predicate_fn pred;
3057 if (code != CODE_FOR_nothing
3058 && ((GET_CODE (size) == CONST_INT
3059 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3060 <= (GET_MODE_MASK (mode) >> 1)))
3061 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3062 && (!(pred = insn_data[(int) code].operand[0].predicate)
3063 || ((*pred) (target, BLKmode)))
3064 && (!(pred = insn_data[(int) code].operand[1].predicate)
3065 || ((*pred) (xinner, BLKmode)))
3066 && (!(pred = insn_data[(int) code].operand[3].predicate)
3067 || ((*pred) (opalign, VOIDmode))))
3069 rtx op2 = convert_to_mode (mode, size, 1);
3070 rtx last = get_last_insn ();
3071 rtx pat;
3073 pred = insn_data[(int) code].operand[2].predicate;
3074 if (pred != 0 && ! (*pred) (op2, mode))
3075 op2 = copy_to_mode_reg (mode, op2);
3077 pat = GEN_FCN ((int) code) (target, xinner,
3078 op2, opalign);
3079 if (pat)
3081 emit_insn (pat);
3082 goto ret;
3084 else
3085 delete_insns_since (last);
3090 #ifndef ACCUMULATE_OUTGOING_ARGS
3091 /* If the source is referenced relative to the stack pointer,
3092 copy it to another register to stabilize it. We do not need
3093 to do this if we know that we won't be changing sp. */
3095 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3096 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3097 temp = copy_to_reg (temp);
3098 #endif
3100 /* Make inhibit_defer_pop nonzero around the library call
3101 to force it to pop the bcopy-arguments right away. */
3102 NO_DEFER_POP;
3103 #ifdef TARGET_MEM_FUNCTIONS
3104 emit_library_call (memcpy_libfunc, 0,
3105 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3106 convert_to_mode (TYPE_MODE (sizetype),
3107 size, TREE_UNSIGNED (sizetype)),
3108 TYPE_MODE (sizetype));
3109 #else
3110 emit_library_call (bcopy_libfunc, 0,
3111 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3112 convert_to_mode (TYPE_MODE (integer_type_node),
3113 size,
3114 TREE_UNSIGNED (integer_type_node)),
3115 TYPE_MODE (integer_type_node));
3116 #endif
3117 OK_DEFER_POP;
3120 else if (partial > 0)
3122 /* Scalar partly in registers. */
3124 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3125 int i;
3126 int not_stack;
3127 /* # words of start of argument
3128 that we must make space for but need not store. */
3129 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3130 int args_offset = INTVAL (args_so_far);
3131 int skip;
3133 /* Push padding now if padding above and stack grows down,
3134 or if padding below and stack grows up.
3135 But if space already allocated, this has already been done. */
3136 if (extra && args_addr == 0
3137 && where_pad != none && where_pad != stack_direction)
3138 anti_adjust_stack (GEN_INT (extra));
3140 /* If we make space by pushing it, we might as well push
3141 the real data. Otherwise, we can leave OFFSET nonzero
3142 and leave the space uninitialized. */
3143 if (args_addr == 0)
3144 offset = 0;
3146 /* Now NOT_STACK gets the number of words that we don't need to
3147 allocate on the stack. */
3148 not_stack = partial - offset;
3150 /* If the partial register-part of the arg counts in its stack size,
3151 skip the part of stack space corresponding to the registers.
3152 Otherwise, start copying to the beginning of the stack space,
3153 by setting SKIP to 0. */
3154 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3156 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3157 x = validize_mem (force_const_mem (mode, x));
3159 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3160 SUBREGs of such registers are not allowed. */
3161 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3162 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3163 x = copy_to_reg (x);
3165 /* Loop over all the words allocated on the stack for this arg. */
3166 /* We can do it by words, because any scalar bigger than a word
3167 has a size a multiple of a word. */
3168 #ifndef PUSH_ARGS_REVERSED
3169 for (i = not_stack; i < size; i++)
3170 #else
3171 for (i = size - 1; i >= not_stack; i--)
3172 #endif
3173 if (i >= not_stack + offset)
3174 emit_push_insn (operand_subword_force (x, i, mode),
3175 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3176 0, args_addr,
3177 GEN_INT (args_offset + ((i - not_stack + skip)
3178 * UNITS_PER_WORD)),
3179 reg_parm_stack_space);
3181 else
3183 rtx addr;
3184 rtx target = NULL_RTX;
3186 /* Push padding now if padding above and stack grows down,
3187 or if padding below and stack grows up.
3188 But if space already allocated, this has already been done. */
3189 if (extra && args_addr == 0
3190 && where_pad != none && where_pad != stack_direction)
3191 anti_adjust_stack (GEN_INT (extra));
3193 #ifdef PUSH_ROUNDING
3194 if (args_addr == 0)
3195 addr = gen_push_operand ();
3196 else
3197 #endif
3199 if (GET_CODE (args_so_far) == CONST_INT)
3200 addr
3201 = memory_address (mode,
3202 plus_constant (args_addr,
3203 INTVAL (args_so_far)));
3204 else
3205 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3206 args_so_far));
3207 target = addr;
3210 emit_move_insn (gen_rtx_MEM (mode, addr), x);
3212 if (current_function_check_memory_usage && ! in_check_memory_usage)
3214 in_check_memory_usage = 1;
3215 if (target == 0)
3216 target = get_push_address (GET_MODE_SIZE (mode));
3218 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3219 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3220 target, Pmode,
3221 XEXP (x, 0), Pmode,
3222 GEN_INT (GET_MODE_SIZE (mode)),
3223 TYPE_MODE (sizetype));
3224 else
3225 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3226 target, Pmode,
3227 GEN_INT (GET_MODE_SIZE (mode)),
3228 TYPE_MODE (sizetype),
3229 GEN_INT (MEMORY_USE_RW),
3230 TYPE_MODE (integer_type_node));
3231 in_check_memory_usage = 0;
3235 ret:
3236 /* If part should go in registers, copy that part
3237 into the appropriate registers. Do this now, at the end,
3238 since mem-to-mem copies above may do function calls. */
3239 if (partial > 0 && reg != 0)
3241 /* Handle calls that pass values in multiple non-contiguous locations.
3242 The Irix 6 ABI has examples of this. */
3243 if (GET_CODE (reg) == PARALLEL)
3244 emit_group_load (reg, x, -1, align); /* ??? size? */
3245 else
3246 move_block_to_reg (REGNO (reg), x, partial, mode);
3249 if (extra && args_addr == 0 && where_pad == stack_direction)
3250 anti_adjust_stack (GEN_INT (extra));
3253 /* Expand an assignment that stores the value of FROM into TO.
3254 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3255 (This may contain a QUEUED rtx;
3256 if the value is constant, this rtx is a constant.)
3257 Otherwise, the returned value is NULL_RTX.
3259 SUGGEST_REG is no longer actually used.
3260 It used to mean, copy the value through a register
3261 and return that register, if that is possible.
3262 We now use WANT_VALUE to decide whether to do this. */
3265 expand_assignment (to, from, want_value, suggest_reg)
3266 tree to, from;
3267 int want_value;
3268 int suggest_reg ATTRIBUTE_UNUSED;
3270 register rtx to_rtx = 0;
3271 rtx result;
3273 /* Don't crash if the lhs of the assignment was erroneous. */
3275 if (TREE_CODE (to) == ERROR_MARK)
3277 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3278 return want_value ? result : NULL_RTX;
3281 /* Assignment of a structure component needs special treatment
3282 if the structure component's rtx is not simply a MEM.
3283 Assignment of an array element at a constant index, and assignment of
3284 an array element in an unaligned packed structure field, has the same
3285 problem. */
3287 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3288 || TREE_CODE (to) == ARRAY_REF)
3290 enum machine_mode mode1;
3291 int bitsize;
3292 int bitpos;
3293 tree offset;
3294 int unsignedp;
3295 int volatilep = 0;
3296 tree tem;
3297 int alignment;
3299 push_temp_slots ();
3300 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3301 &unsignedp, &volatilep, &alignment);
3303 /* If we are going to use store_bit_field and extract_bit_field,
3304 make sure to_rtx will be safe for multiple use. */
3306 if (mode1 == VOIDmode && want_value)
3307 tem = stabilize_reference (tem);
3309 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3310 if (offset != 0)
3312 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3314 if (GET_CODE (to_rtx) != MEM)
3315 abort ();
3317 if (GET_MODE (offset_rtx) != ptr_mode)
3319 #ifdef POINTERS_EXTEND_UNSIGNED
3320 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
3321 #else
3322 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3323 #endif
3326 /* A constant address in TO_RTX can have VOIDmode, we must not try
3327 to call force_reg for that case. Avoid that case. */
3328 if (GET_CODE (to_rtx) == MEM
3329 && GET_MODE (to_rtx) == BLKmode
3330 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3331 && bitsize
3332 && (bitpos % bitsize) == 0
3333 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3334 && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
3336 rtx temp = change_address (to_rtx, mode1,
3337 plus_constant (XEXP (to_rtx, 0),
3338 (bitpos /
3339 BITS_PER_UNIT)));
3340 if (GET_CODE (XEXP (temp, 0)) == REG)
3341 to_rtx = temp;
3342 else
3343 to_rtx = change_address (to_rtx, mode1,
3344 force_reg (GET_MODE (XEXP (temp, 0)),
3345 XEXP (temp, 0)));
3346 bitpos = 0;
3349 to_rtx = change_address (to_rtx, VOIDmode,
3350 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3351 force_reg (ptr_mode,
3352 offset_rtx)));
3355 if (volatilep)
3357 if (GET_CODE (to_rtx) == MEM)
3359 /* When the offset is zero, to_rtx is the address of the
3360 structure we are storing into, and hence may be shared.
3361 We must make a new MEM before setting the volatile bit. */
3362 if (offset == 0)
3363 to_rtx = copy_rtx (to_rtx);
3365 MEM_VOLATILE_P (to_rtx) = 1;
3367 #if 0 /* This was turned off because, when a field is volatile
3368 in an object which is not volatile, the object may be in a register,
3369 and then we would abort over here. */
3370 else
3371 abort ();
3372 #endif
3375 if (TREE_CODE (to) == COMPONENT_REF
3376 && TREE_READONLY (TREE_OPERAND (to, 1)))
3378 if (offset == 0)
3379 to_rtx = copy_rtx (to_rtx);
3381 RTX_UNCHANGING_P (to_rtx) = 1;
3384 /* Check the access. */
3385 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3387 rtx to_addr;
3388 int size;
3389 int best_mode_size;
3390 enum machine_mode best_mode;
3392 best_mode = get_best_mode (bitsize, bitpos,
3393 TYPE_ALIGN (TREE_TYPE (tem)),
3394 mode1, volatilep);
3395 if (best_mode == VOIDmode)
3396 best_mode = QImode;
3398 best_mode_size = GET_MODE_BITSIZE (best_mode);
3399 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3400 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3401 size *= GET_MODE_SIZE (best_mode);
3403 /* Check the access right of the pointer. */
3404 if (size)
3405 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3406 to_addr, Pmode,
3407 GEN_INT (size), TYPE_MODE (sizetype),
3408 GEN_INT (MEMORY_USE_WO),
3409 TYPE_MODE (integer_type_node));
3412 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3413 (want_value
3414 /* Spurious cast makes HPUX compiler happy. */
3415 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
3416 : VOIDmode),
3417 unsignedp,
3418 /* Required alignment of containing datum. */
3419 alignment,
3420 int_size_in_bytes (TREE_TYPE (tem)),
3421 get_alias_set (to));
3422 preserve_temp_slots (result);
3423 free_temp_slots ();
3424 pop_temp_slots ();
3426 /* If the value is meaningful, convert RESULT to the proper mode.
3427 Otherwise, return nothing. */
3428 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3429 TYPE_MODE (TREE_TYPE (from)),
3430 result,
3431 TREE_UNSIGNED (TREE_TYPE (to)))
3432 : NULL_RTX);
3435 /* If the rhs is a function call and its value is not an aggregate,
3436 call the function before we start to compute the lhs.
3437 This is needed for correct code for cases such as
3438 val = setjmp (buf) on machines where reference to val
3439 requires loading up part of an address in a separate insn.
3441 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
3442 a promoted variable where the zero- or sign- extension needs to be done.
3443 Handling this in the normal way is safe because no computation is done
3444 before the call. */
3445 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3446 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3447 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
3449 rtx value;
3451 push_temp_slots ();
3452 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3453 if (to_rtx == 0)
3454 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3456 /* Handle calls that return values in multiple non-contiguous locations.
3457 The Irix 6 ABI has examples of this. */
3458 if (GET_CODE (to_rtx) == PARALLEL)
3459 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3460 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3461 else if (GET_MODE (to_rtx) == BLKmode)
3462 emit_block_move (to_rtx, value, expr_size (from),
3463 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3464 else
3466 #ifdef POINTERS_EXTEND_UNSIGNED
3467 if (TREE_CODE (TREE_TYPE (to)) == REFERENCE_TYPE
3468 || TREE_CODE (TREE_TYPE (to)) == POINTER_TYPE)
3469 value = convert_memory_address (GET_MODE (to_rtx), value);
3470 #endif
3471 emit_move_insn (to_rtx, value);
3473 preserve_temp_slots (to_rtx);
3474 free_temp_slots ();
3475 pop_temp_slots ();
3476 return want_value ? to_rtx : NULL_RTX;
3479 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3480 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3482 if (to_rtx == 0)
3484 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3485 if (GET_CODE (to_rtx) == MEM)
3486 MEM_ALIAS_SET (to_rtx) = get_alias_set (to);
3489 /* Don't move directly into a return register. */
3490 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
3492 rtx temp;
3494 push_temp_slots ();
3495 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3496 emit_move_insn (to_rtx, temp);
3497 preserve_temp_slots (to_rtx);
3498 free_temp_slots ();
3499 pop_temp_slots ();
3500 return want_value ? to_rtx : NULL_RTX;
3503 /* In case we are returning the contents of an object which overlaps
3504 the place the value is being stored, use a safe function when copying
3505 a value through a pointer into a structure value return block. */
3506 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3507 && current_function_returns_struct
3508 && !current_function_returns_pcc_struct)
3510 rtx from_rtx, size;
3512 push_temp_slots ();
3513 size = expr_size (from);
3514 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3515 EXPAND_MEMORY_USE_DONT);
3517 /* Copy the rights of the bitmap. */
3518 if (current_function_check_memory_usage)
3519 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3520 XEXP (to_rtx, 0), Pmode,
3521 XEXP (from_rtx, 0), Pmode,
3522 convert_to_mode (TYPE_MODE (sizetype),
3523 size, TREE_UNSIGNED (sizetype)),
3524 TYPE_MODE (sizetype));
3526 #ifdef TARGET_MEM_FUNCTIONS
3527 emit_library_call (memcpy_libfunc, 0,
3528 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3529 XEXP (from_rtx, 0), Pmode,
3530 convert_to_mode (TYPE_MODE (sizetype),
3531 size, TREE_UNSIGNED (sizetype)),
3532 TYPE_MODE (sizetype));
3533 #else
3534 emit_library_call (bcopy_libfunc, 0,
3535 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3536 XEXP (to_rtx, 0), Pmode,
3537 convert_to_mode (TYPE_MODE (integer_type_node),
3538 size, TREE_UNSIGNED (integer_type_node)),
3539 TYPE_MODE (integer_type_node));
3540 #endif
3542 preserve_temp_slots (to_rtx);
3543 free_temp_slots ();
3544 pop_temp_slots ();
3545 return want_value ? to_rtx : NULL_RTX;
3548 /* Compute FROM and store the value in the rtx we got. */
3550 push_temp_slots ();
3551 result = store_expr (from, to_rtx, want_value);
3552 preserve_temp_slots (result);
3553 free_temp_slots ();
3554 pop_temp_slots ();
3555 return want_value ? result : NULL_RTX;
3558 /* Generate code for computing expression EXP,
3559 and storing the value into TARGET.
3560 TARGET may contain a QUEUED rtx.
3562 If WANT_VALUE is nonzero, return a copy of the value
3563 not in TARGET, so that we can be sure to use the proper
3564 value in a containing expression even if TARGET has something
3565 else stored in it. If possible, we copy the value through a pseudo
3566 and return that pseudo. Or, if the value is constant, we try to
3567 return the constant. In some cases, we return a pseudo
3568 copied *from* TARGET.
3570 If the mode is BLKmode then we may return TARGET itself.
3571 It turns out that in BLKmode it doesn't cause a problem.
3572 because C has no operators that could combine two different
3573 assignments into the same BLKmode object with different values
3574 with no sequence point. Will other languages need this to
3575 be more thorough?
3577 If WANT_VALUE is 0, we return NULL, to make sure
3578 to catch quickly any cases where the caller uses the value
3579 and fails to set WANT_VALUE. */
3582 store_expr (exp, target, want_value)
3583 register tree exp;
3584 register rtx target;
3585 int want_value;
3587 register rtx temp;
3588 int dont_return_target = 0;
3590 if (TREE_CODE (exp) == COMPOUND_EXPR)
3592 /* Perform first part of compound expression, then assign from second
3593 part. */
3594 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3595 emit_queue ();
3596 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3598 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3600 /* For conditional expression, get safe form of the target. Then
3601 test the condition, doing the appropriate assignment on either
3602 side. This avoids the creation of unnecessary temporaries.
3603 For non-BLKmode, it is more efficient not to do this. */
3605 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3607 emit_queue ();
3608 target = protect_from_queue (target, 1);
3610 do_pending_stack_adjust ();
3611 NO_DEFER_POP;
3612 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3613 start_cleanup_deferral ();
3614 store_expr (TREE_OPERAND (exp, 1), target, 0);
3615 end_cleanup_deferral ();
3616 emit_queue ();
3617 emit_jump_insn (gen_jump (lab2));
3618 emit_barrier ();
3619 emit_label (lab1);
3620 start_cleanup_deferral ();
3621 store_expr (TREE_OPERAND (exp, 2), target, 0);
3622 end_cleanup_deferral ();
3623 emit_queue ();
3624 emit_label (lab2);
3625 OK_DEFER_POP;
3627 return want_value ? target : NULL_RTX;
3629 else if (queued_subexp_p (target))
3630 /* If target contains a postincrement, let's not risk
3631 using it as the place to generate the rhs. */
3633 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3635 /* Expand EXP into a new pseudo. */
3636 temp = gen_reg_rtx (GET_MODE (target));
3637 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3639 else
3640 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3642 /* If target is volatile, ANSI requires accessing the value
3643 *from* the target, if it is accessed. So make that happen.
3644 In no case return the target itself. */
3645 if (! MEM_VOLATILE_P (target) && want_value)
3646 dont_return_target = 1;
3648 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3649 && GET_MODE (target) != BLKmode)
3650 /* If target is in memory and caller wants value in a register instead,
3651 arrange that. Pass TARGET as target for expand_expr so that,
3652 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3653 We know expand_expr will not use the target in that case.
3654 Don't do this if TARGET is volatile because we are supposed
3655 to write it and then read it. */
3657 temp = expand_expr (exp, target, GET_MODE (target), 0);
3658 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3659 temp = copy_to_reg (temp);
3660 dont_return_target = 1;
3662 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3663 /* If this is an scalar in a register that is stored in a wider mode
3664 than the declared mode, compute the result into its declared mode
3665 and then convert to the wider mode. Our value is the computed
3666 expression. */
3668 /* If we don't want a value, we can do the conversion inside EXP,
3669 which will often result in some optimizations. Do the conversion
3670 in two steps: first change the signedness, if needed, then
3671 the extend. But don't do this if the type of EXP is a subtype
3672 of something else since then the conversion might involve
3673 more than just converting modes. */
3674 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3675 && TREE_TYPE (TREE_TYPE (exp)) == 0)
3677 if (TREE_UNSIGNED (TREE_TYPE (exp))
3678 != SUBREG_PROMOTED_UNSIGNED_P (target))
3680 = convert
3681 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3682 TREE_TYPE (exp)),
3683 exp);
3685 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3686 SUBREG_PROMOTED_UNSIGNED_P (target)),
3687 exp);
3690 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3692 /* If TEMP is a volatile MEM and we want a result value, make
3693 the access now so it gets done only once. Likewise if
3694 it contains TARGET. */
3695 if (GET_CODE (temp) == MEM && want_value
3696 && (MEM_VOLATILE_P (temp)
3697 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
3698 temp = copy_to_reg (temp);
3700 /* If TEMP is a VOIDmode constant, use convert_modes to make
3701 sure that we properly convert it. */
3702 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3703 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3704 TYPE_MODE (TREE_TYPE (exp)), temp,
3705 SUBREG_PROMOTED_UNSIGNED_P (target));
3707 convert_move (SUBREG_REG (target), temp,
3708 SUBREG_PROMOTED_UNSIGNED_P (target));
3710 /* If we promoted a constant, change the mode back down to match
3711 target. Otherwise, the caller might get confused by a result whose
3712 mode is larger than expected. */
3714 if (want_value && GET_MODE (temp) != GET_MODE (target)
3715 && GET_MODE (temp) != VOIDmode)
3717 temp = gen_rtx_SUBREG (GET_MODE (target), temp, 0);
3718 SUBREG_PROMOTED_VAR_P (temp) = 1;
3719 SUBREG_PROMOTED_UNSIGNED_P (temp)
3720 = SUBREG_PROMOTED_UNSIGNED_P (target);
3723 return want_value ? temp : NULL_RTX;
3725 else
3727 temp = expand_expr (exp, target, GET_MODE (target), 0);
3728 /* Return TARGET if it's a specified hardware register.
3729 If TARGET is a volatile mem ref, either return TARGET
3730 or return a reg copied *from* TARGET; ANSI requires this.
3732 Otherwise, if TEMP is not TARGET, return TEMP
3733 if it is constant (for efficiency),
3734 or if we really want the correct value. */
3735 if (!(target && GET_CODE (target) == REG
3736 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3737 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3738 && ! rtx_equal_p (temp, target)
3739 && (CONSTANT_P (temp) || want_value))
3740 dont_return_target = 1;
3743 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3744 the same as that of TARGET, adjust the constant. This is needed, for
3745 example, in case it is a CONST_DOUBLE and we want only a word-sized
3746 value. */
3747 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3748 && TREE_CODE (exp) != ERROR_MARK
3749 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3750 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3751 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3753 if (current_function_check_memory_usage
3754 && GET_CODE (target) == MEM
3755 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
3757 if (GET_CODE (temp) == MEM)
3758 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3759 XEXP (target, 0), Pmode,
3760 XEXP (temp, 0), Pmode,
3761 expr_size (exp), TYPE_MODE (sizetype));
3762 else
3763 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3764 XEXP (target, 0), Pmode,
3765 expr_size (exp), TYPE_MODE (sizetype),
3766 GEN_INT (MEMORY_USE_WO),
3767 TYPE_MODE (integer_type_node));
3770 /* If value was not generated in the target, store it there.
3771 Convert the value to TARGET's type first if nec. */
3772 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
3773 one or both of them are volatile memory refs, we have to distinguish
3774 two cases:
3775 - expand_expr has used TARGET. In this case, we must not generate
3776 another copy. This can be detected by TARGET being equal according
3777 to == .
3778 - expand_expr has not used TARGET - that means that the source just
3779 happens to have the same RTX form. Since temp will have been created
3780 by expand_expr, it will compare unequal according to == .
3781 We must generate a copy in this case, to reach the correct number
3782 of volatile memory references. */
3784 if ((! rtx_equal_p (temp, target)
3785 || (temp != target && (side_effects_p (temp)
3786 || side_effects_p (target))))
3787 && TREE_CODE (exp) != ERROR_MARK)
3789 target = protect_from_queue (target, 1);
3790 if (GET_MODE (temp) != GET_MODE (target)
3791 && GET_MODE (temp) != VOIDmode)
3793 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3794 if (dont_return_target)
3796 /* In this case, we will return TEMP,
3797 so make sure it has the proper mode.
3798 But don't forget to store the value into TARGET. */
3799 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3800 emit_move_insn (target, temp);
3802 else
3803 convert_move (target, temp, unsignedp);
3806 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3808 /* Handle copying a string constant into an array.
3809 The string constant may be shorter than the array.
3810 So copy just the string's actual length, and clear the rest. */
3811 rtx size;
3812 rtx addr;
3814 /* Get the size of the data type of the string,
3815 which is actually the size of the target. */
3816 size = expr_size (exp);
3817 if (GET_CODE (size) == CONST_INT
3818 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3819 emit_block_move (target, temp, size,
3820 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3821 else
3823 /* Compute the size of the data to copy from the string. */
3824 tree copy_size
3825 = size_binop (MIN_EXPR,
3826 make_tree (sizetype, size),
3827 convert (sizetype,
3828 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
3829 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3830 VOIDmode, 0);
3831 rtx label = 0;
3833 /* Copy that much. */
3834 emit_block_move (target, temp, copy_size_rtx,
3835 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3837 /* Figure out how much is left in TARGET that we have to clear.
3838 Do all calculations in ptr_mode. */
3840 addr = XEXP (target, 0);
3841 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3843 if (GET_CODE (copy_size_rtx) == CONST_INT)
3845 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
3846 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
3848 else
3850 addr = force_reg (ptr_mode, addr);
3851 addr = expand_binop (ptr_mode, add_optab, addr,
3852 copy_size_rtx, NULL_RTX, 0,
3853 OPTAB_LIB_WIDEN);
3855 size = expand_binop (ptr_mode, sub_optab, size,
3856 copy_size_rtx, NULL_RTX, 0,
3857 OPTAB_LIB_WIDEN);
3859 label = gen_label_rtx ();
3860 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
3861 GET_MODE (size), 0, 0, label);
3864 if (size != const0_rtx)
3866 /* Be sure we can write on ADDR. */
3867 if (current_function_check_memory_usage)
3868 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3869 addr, Pmode,
3870 size, TYPE_MODE (sizetype),
3871 GEN_INT (MEMORY_USE_WO),
3872 TYPE_MODE (integer_type_node));
3873 #ifdef TARGET_MEM_FUNCTIONS
3874 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
3875 addr, ptr_mode,
3876 const0_rtx, TYPE_MODE (integer_type_node),
3877 convert_to_mode (TYPE_MODE (sizetype),
3878 size,
3879 TREE_UNSIGNED (sizetype)),
3880 TYPE_MODE (sizetype));
3881 #else
3882 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
3883 addr, ptr_mode,
3884 convert_to_mode (TYPE_MODE (integer_type_node),
3885 size,
3886 TREE_UNSIGNED (integer_type_node)),
3887 TYPE_MODE (integer_type_node));
3888 #endif
3891 if (label)
3892 emit_label (label);
3895 /* Handle calls that return values in multiple non-contiguous locations.
3896 The Irix 6 ABI has examples of this. */
3897 else if (GET_CODE (target) == PARALLEL)
3898 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
3899 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3900 else if (GET_MODE (temp) == BLKmode)
3901 emit_block_move (target, temp, expr_size (exp),
3902 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3903 else
3904 emit_move_insn (target, temp);
3907 /* If we don't want a value, return NULL_RTX. */
3908 if (! want_value)
3909 return NULL_RTX;
3911 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3912 ??? The latter test doesn't seem to make sense. */
3913 else if (dont_return_target && GET_CODE (temp) != MEM)
3914 return temp;
3916 /* Return TARGET itself if it is a hard register. */
3917 else if (want_value && GET_MODE (target) != BLKmode
3918 && ! (GET_CODE (target) == REG
3919 && REGNO (target) < FIRST_PSEUDO_REGISTER))
3920 return copy_to_reg (target);
3922 else
3923 return target;
3926 /* Return 1 if EXP just contains zeros. */
3928 static int
3929 is_zeros_p (exp)
3930 tree exp;
3932 tree elt;
3934 switch (TREE_CODE (exp))
3936 case CONVERT_EXPR:
3937 case NOP_EXPR:
3938 case NON_LVALUE_EXPR:
3939 return is_zeros_p (TREE_OPERAND (exp, 0));
3941 case INTEGER_CST:
3942 return TREE_INT_CST_LOW (exp) == 0 && TREE_INT_CST_HIGH (exp) == 0;
3944 case COMPLEX_CST:
3945 return
3946 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
3948 case REAL_CST:
3949 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
3951 case CONSTRUCTOR:
3952 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3953 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
3954 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3955 if (! is_zeros_p (TREE_VALUE (elt)))
3956 return 0;
3958 return 1;
3960 default:
3961 return 0;
3965 /* Return 1 if EXP contains mostly (3/4) zeros. */
3967 static int
3968 mostly_zeros_p (exp)
3969 tree exp;
3971 if (TREE_CODE (exp) == CONSTRUCTOR)
3973 int elts = 0, zeros = 0;
3974 tree elt = CONSTRUCTOR_ELTS (exp);
3975 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3977 /* If there are no ranges of true bits, it is all zero. */
3978 return elt == NULL_TREE;
3980 for (; elt; elt = TREE_CHAIN (elt))
3982 /* We do not handle the case where the index is a RANGE_EXPR,
3983 so the statistic will be somewhat inaccurate.
3984 We do make a more accurate count in store_constructor itself,
3985 so since this function is only used for nested array elements,
3986 this should be close enough. */
3987 if (mostly_zeros_p (TREE_VALUE (elt)))
3988 zeros++;
3989 elts++;
3992 return 4 * zeros >= 3 * elts;
3995 return is_zeros_p (exp);
3998 /* Helper function for store_constructor.
3999 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4000 TYPE is the type of the CONSTRUCTOR, not the element type.
4001 ALIGN and CLEARED are as for store_constructor.
4003 This provides a recursive shortcut back to store_constructor when it isn't
4004 necessary to go through store_field. This is so that we can pass through
4005 the cleared field to let store_constructor know that we may not have to
4006 clear a substructure if the outer structure has already been cleared. */
4008 static void
4009 store_constructor_field (target, bitsize, bitpos,
4010 mode, exp, type, align, cleared)
4011 rtx target;
4012 int bitsize, bitpos;
4013 enum machine_mode mode;
4014 tree exp, type;
4015 int align;
4016 int cleared;
4018 if (TREE_CODE (exp) == CONSTRUCTOR
4019 && bitpos % BITS_PER_UNIT == 0
4020 /* If we have a non-zero bitpos for a register target, then we just
4021 let store_field do the bitfield handling. This is unlikely to
4022 generate unnecessary clear instructions anyways. */
4023 && (bitpos == 0 || GET_CODE (target) == MEM))
4025 if (bitpos != 0)
4026 target = change_address (target, VOIDmode,
4027 plus_constant (XEXP (target, 0),
4028 bitpos / BITS_PER_UNIT));
4029 store_constructor (exp, target, align, cleared);
4031 else
4032 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0,
4033 (align + BITS_PER_UNIT - 1) / BITS_PER_UNIT,
4034 int_size_in_bytes (type), cleared);
4037 /* Store the value of constructor EXP into the rtx TARGET.
4038 TARGET is either a REG or a MEM.
4039 ALIGN is the maximum known alignment for TARGET, in bits.
4040 CLEARED is true if TARGET is known to have been zero'd. */
4042 static void
4043 store_constructor (exp, target, align, cleared)
4044 tree exp;
4045 rtx target;
4046 int align;
4047 int cleared;
4049 tree type = TREE_TYPE (exp);
4050 #ifdef WORD_REGISTER_OPERATIONS
4051 rtx exp_size = expr_size (exp);
4052 #endif
4054 /* We know our target cannot conflict, since safe_from_p has been called. */
4055 #if 0
4056 /* Don't try copying piece by piece into a hard register
4057 since that is vulnerable to being clobbered by EXP.
4058 Instead, construct in a pseudo register and then copy it all. */
4059 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
4061 rtx temp = gen_reg_rtx (GET_MODE (target));
4062 store_constructor (exp, temp, 0);
4063 emit_move_insn (target, temp);
4064 return;
4066 #endif
4068 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4069 || TREE_CODE (type) == QUAL_UNION_TYPE)
4071 register tree elt;
4073 /* Inform later passes that the whole union value is dead. */
4074 if (TREE_CODE (type) == UNION_TYPE
4075 || TREE_CODE (type) == QUAL_UNION_TYPE)
4077 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4079 /* If the constructor is empty, clear the union. */
4080 if (! CONSTRUCTOR_ELTS (exp) && ! cleared)
4081 clear_storage (target, expr_size (exp),
4082 TYPE_ALIGN (type) / BITS_PER_UNIT);
4085 /* If we are building a static constructor into a register,
4086 set the initial value as zero so we can fold the value into
4087 a constant. But if more than one register is involved,
4088 this probably loses. */
4089 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4090 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4092 if (! cleared)
4093 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4095 cleared = 1;
4098 /* If the constructor has fewer fields than the structure
4099 or if we are initializing the structure to mostly zeros,
4100 clear the whole structure first. */
4101 else if ((list_length (CONSTRUCTOR_ELTS (exp))
4102 != list_length (TYPE_FIELDS (type)))
4103 || mostly_zeros_p (exp))
4105 if (! cleared)
4106 clear_storage (target, expr_size (exp),
4107 (align + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
4109 cleared = 1;
4111 else
4112 /* Inform later passes that the old value is dead. */
4113 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4115 /* Store each element of the constructor into
4116 the corresponding field of TARGET. */
4118 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4120 register tree field = TREE_PURPOSE (elt);
4121 #ifdef WORD_REGISTER_OPERATIONS
4122 tree value = TREE_VALUE (elt);
4123 #endif
4124 register enum machine_mode mode;
4125 int bitsize;
4126 int bitpos = 0;
4127 int unsignedp;
4128 tree pos, constant = 0, offset = 0;
4129 rtx to_rtx = target;
4131 /* Just ignore missing fields.
4132 We cleared the whole structure, above,
4133 if any fields are missing. */
4134 if (field == 0)
4135 continue;
4137 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4138 continue;
4140 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
4141 unsignedp = TREE_UNSIGNED (field);
4142 mode = DECL_MODE (field);
4143 if (DECL_BIT_FIELD (field))
4144 mode = VOIDmode;
4146 pos = DECL_FIELD_BITPOS (field);
4147 if (TREE_CODE (pos) == INTEGER_CST)
4148 constant = pos;
4149 else if (TREE_CODE (pos) == PLUS_EXPR
4150 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4151 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
4152 else
4153 offset = pos;
4155 if (constant)
4156 bitpos = TREE_INT_CST_LOW (constant);
4158 if (offset)
4160 rtx offset_rtx;
4162 if (contains_placeholder_p (offset))
4163 offset = build (WITH_RECORD_EXPR, sizetype,
4164 offset, make_tree (TREE_TYPE (exp), target));
4166 offset = size_binop (FLOOR_DIV_EXPR, offset,
4167 size_int (BITS_PER_UNIT));
4169 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4170 if (GET_CODE (to_rtx) != MEM)
4171 abort ();
4173 if (GET_MODE (offset_rtx) != ptr_mode)
4175 #ifdef POINTERS_EXTEND_UNSIGNED
4176 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
4177 #else
4178 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4179 #endif
4182 to_rtx
4183 = change_address (to_rtx, VOIDmode,
4184 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
4185 force_reg (ptr_mode,
4186 offset_rtx)));
4189 if (TREE_READONLY (field))
4191 if (GET_CODE (to_rtx) == MEM)
4192 to_rtx = copy_rtx (to_rtx);
4194 RTX_UNCHANGING_P (to_rtx) = 1;
4197 #ifdef WORD_REGISTER_OPERATIONS
4198 /* If this initializes a field that is smaller than a word, at the
4199 start of a word, try to widen it to a full word.
4200 This special case allows us to output C++ member function
4201 initializations in a form that the optimizers can understand. */
4202 if (constant
4203 && GET_CODE (target) == REG
4204 && bitsize < BITS_PER_WORD
4205 && bitpos % BITS_PER_WORD == 0
4206 && GET_MODE_CLASS (mode) == MODE_INT
4207 && TREE_CODE (value) == INTEGER_CST
4208 && GET_CODE (exp_size) == CONST_INT
4209 && bitpos + BITS_PER_WORD <= INTVAL (exp_size) * BITS_PER_UNIT)
4211 tree type = TREE_TYPE (value);
4212 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4214 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4215 value = convert (type, value);
4217 if (BYTES_BIG_ENDIAN)
4218 value
4219 = fold (build (LSHIFT_EXPR, type, value,
4220 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4221 bitsize = BITS_PER_WORD;
4222 mode = word_mode;
4224 #endif
4225 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4226 TREE_VALUE (elt), type,
4227 MIN (align,
4228 DECL_ALIGN (TREE_PURPOSE (elt))),
4229 cleared);
4232 else if (TREE_CODE (type) == ARRAY_TYPE)
4234 register tree elt;
4235 register int i;
4236 int need_to_clear;
4237 tree domain = TYPE_DOMAIN (type);
4238 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
4239 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
4240 tree elttype = TREE_TYPE (type);
4242 /* If the constructor has fewer elements than the array,
4243 clear the whole array first. Similarly if this is
4244 static constructor of a non-BLKmode object. */
4245 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4246 need_to_clear = 1;
4247 else
4249 HOST_WIDE_INT count = 0, zero_count = 0;
4250 need_to_clear = 0;
4251 /* This loop is a more accurate version of the loop in
4252 mostly_zeros_p (it handles RANGE_EXPR in an index).
4253 It is also needed to check for missing elements. */
4254 for (elt = CONSTRUCTOR_ELTS (exp);
4255 elt != NULL_TREE;
4256 elt = TREE_CHAIN (elt))
4258 tree index = TREE_PURPOSE (elt);
4259 HOST_WIDE_INT this_node_count;
4260 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4262 tree lo_index = TREE_OPERAND (index, 0);
4263 tree hi_index = TREE_OPERAND (index, 1);
4264 if (TREE_CODE (lo_index) != INTEGER_CST
4265 || TREE_CODE (hi_index) != INTEGER_CST)
4267 need_to_clear = 1;
4268 break;
4270 this_node_count = TREE_INT_CST_LOW (hi_index)
4271 - TREE_INT_CST_LOW (lo_index) + 1;
4273 else
4274 this_node_count = 1;
4275 count += this_node_count;
4276 if (mostly_zeros_p (TREE_VALUE (elt)))
4277 zero_count += this_node_count;
4279 /* Clear the entire array first if there are any missing elements,
4280 or if the incidence of zero elements is >= 75%. */
4281 if (count < maxelt - minelt + 1
4282 || 4 * zero_count >= 3 * count)
4283 need_to_clear = 1;
4285 if (need_to_clear)
4287 if (! cleared)
4288 clear_storage (target, expr_size (exp),
4289 (align + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
4290 cleared = 1;
4292 else
4293 /* Inform later passes that the old value is dead. */
4294 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4296 /* Store each element of the constructor into
4297 the corresponding element of TARGET, determined
4298 by counting the elements. */
4299 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4300 elt;
4301 elt = TREE_CHAIN (elt), i++)
4303 register enum machine_mode mode;
4304 int bitsize;
4305 int bitpos;
4306 int unsignedp;
4307 tree value = TREE_VALUE (elt);
4308 int align = TYPE_ALIGN (TREE_TYPE (value));
4309 tree index = TREE_PURPOSE (elt);
4310 rtx xtarget = target;
4312 if (cleared && is_zeros_p (value))
4313 continue;
4315 mode = TYPE_MODE (elttype);
4316 bitsize = GET_MODE_BITSIZE (mode);
4317 unsignedp = TREE_UNSIGNED (elttype);
4319 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4321 tree lo_index = TREE_OPERAND (index, 0);
4322 tree hi_index = TREE_OPERAND (index, 1);
4323 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4324 struct nesting *loop;
4325 HOST_WIDE_INT lo, hi, count;
4326 tree position;
4328 /* If the range is constant and "small", unroll the loop. */
4329 if (TREE_CODE (lo_index) == INTEGER_CST
4330 && TREE_CODE (hi_index) == INTEGER_CST
4331 && (lo = TREE_INT_CST_LOW (lo_index),
4332 hi = TREE_INT_CST_LOW (hi_index),
4333 count = hi - lo + 1,
4334 (GET_CODE (target) != MEM
4335 || count <= 2
4336 || (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
4337 && TREE_INT_CST_LOW (TYPE_SIZE (elttype)) * count
4338 <= 40 * 8))))
4340 lo -= minelt; hi -= minelt;
4341 for (; lo <= hi; lo++)
4343 bitpos = lo * TREE_INT_CST_LOW (TYPE_SIZE (elttype));
4344 store_constructor_field (target, bitsize, bitpos, mode,
4345 value, type, align, cleared);
4348 else
4350 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4351 loop_top = gen_label_rtx ();
4352 loop_end = gen_label_rtx ();
4354 unsignedp = TREE_UNSIGNED (domain);
4356 index = build_decl (VAR_DECL, NULL_TREE, domain);
4358 DECL_RTL (index) = index_r
4359 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4360 &unsignedp, 0));
4362 if (TREE_CODE (value) == SAVE_EXPR
4363 && SAVE_EXPR_RTL (value) == 0)
4365 /* Make sure value gets expanded once before the
4366 loop. */
4367 expand_expr (value, const0_rtx, VOIDmode, 0);
4368 emit_queue ();
4370 store_expr (lo_index, index_r, 0);
4371 loop = expand_start_loop (0);
4373 /* Assign value to element index. */
4374 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
4375 size_int (BITS_PER_UNIT));
4376 position = size_binop (MULT_EXPR,
4377 size_binop (MINUS_EXPR, index,
4378 TYPE_MIN_VALUE (domain)),
4379 position);
4380 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4381 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4382 xtarget = change_address (target, mode, addr);
4383 if (TREE_CODE (value) == CONSTRUCTOR)
4384 store_constructor (value, xtarget, align, cleared);
4385 else
4386 store_expr (value, xtarget, 0);
4388 expand_exit_loop_if_false (loop,
4389 build (LT_EXPR, integer_type_node,
4390 index, hi_index));
4392 expand_increment (build (PREINCREMENT_EXPR,
4393 TREE_TYPE (index),
4394 index, integer_one_node), 0, 0);
4395 expand_end_loop ();
4396 emit_label (loop_end);
4398 /* Needed by stupid register allocation. to extend the
4399 lifetime of pseudo-regs used by target past the end
4400 of the loop. */
4401 emit_insn (gen_rtx_USE (GET_MODE (target), target));
4404 else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
4405 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
4407 rtx pos_rtx, addr;
4408 tree position;
4410 if (index == 0)
4411 index = size_int (i);
4413 if (minelt)
4414 index = size_binop (MINUS_EXPR, index,
4415 TYPE_MIN_VALUE (domain));
4416 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
4417 size_int (BITS_PER_UNIT));
4418 position = size_binop (MULT_EXPR, index, position);
4419 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4420 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4421 xtarget = change_address (target, mode, addr);
4422 store_expr (value, xtarget, 0);
4424 else
4426 if (index != 0)
4427 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
4428 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
4429 else
4430 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
4431 store_constructor_field (target, bitsize, bitpos, mode, value,
4432 type, align, cleared);
4436 /* set constructor assignments */
4437 else if (TREE_CODE (type) == SET_TYPE)
4439 tree elt = CONSTRUCTOR_ELTS (exp);
4440 int nbytes = int_size_in_bytes (type), nbits;
4441 tree domain = TYPE_DOMAIN (type);
4442 tree domain_min, domain_max, bitlength;
4444 /* The default implementation strategy is to extract the constant
4445 parts of the constructor, use that to initialize the target,
4446 and then "or" in whatever non-constant ranges we need in addition.
4448 If a large set is all zero or all ones, it is
4449 probably better to set it using memset (if available) or bzero.
4450 Also, if a large set has just a single range, it may also be
4451 better to first clear all the first clear the set (using
4452 bzero/memset), and set the bits we want. */
4454 /* Check for all zeros. */
4455 if (elt == NULL_TREE)
4457 if (!cleared)
4458 clear_storage (target, expr_size (exp),
4459 TYPE_ALIGN (type) / BITS_PER_UNIT);
4460 return;
4463 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4464 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4465 bitlength = size_binop (PLUS_EXPR,
4466 size_binop (MINUS_EXPR, domain_max, domain_min),
4467 size_one_node);
4469 if (nbytes < 0 || TREE_CODE (bitlength) != INTEGER_CST)
4470 abort ();
4471 nbits = TREE_INT_CST_LOW (bitlength);
4473 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4474 are "complicated" (more than one range), initialize (the
4475 constant parts) by copying from a constant. */
4476 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4477 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4479 int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4480 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4481 char *bit_buffer = (char *) alloca (nbits);
4482 HOST_WIDE_INT word = 0;
4483 int bit_pos = 0;
4484 int ibit = 0;
4485 int offset = 0; /* In bytes from beginning of set. */
4486 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4487 for (;;)
4489 if (bit_buffer[ibit])
4491 if (BYTES_BIG_ENDIAN)
4492 word |= (1 << (set_word_size - 1 - bit_pos));
4493 else
4494 word |= 1 << bit_pos;
4496 bit_pos++; ibit++;
4497 if (bit_pos >= set_word_size || ibit == nbits)
4499 if (word != 0 || ! cleared)
4501 rtx datum = GEN_INT (word);
4502 rtx to_rtx;
4503 /* The assumption here is that it is safe to use
4504 XEXP if the set is multi-word, but not if
4505 it's single-word. */
4506 if (GET_CODE (target) == MEM)
4508 to_rtx = plus_constant (XEXP (target, 0), offset);
4509 to_rtx = change_address (target, mode, to_rtx);
4511 else if (offset == 0)
4512 to_rtx = target;
4513 else
4514 abort ();
4515 emit_move_insn (to_rtx, datum);
4517 if (ibit == nbits)
4518 break;
4519 word = 0;
4520 bit_pos = 0;
4521 offset += set_word_size / BITS_PER_UNIT;
4525 else if (!cleared)
4527 /* Don't bother clearing storage if the set is all ones. */
4528 if (TREE_CHAIN (elt) != NULL_TREE
4529 || (TREE_PURPOSE (elt) == NULL_TREE
4530 ? nbits != 1
4531 : (TREE_CODE (TREE_VALUE (elt)) != INTEGER_CST
4532 || TREE_CODE (TREE_PURPOSE (elt)) != INTEGER_CST
4533 || (TREE_INT_CST_LOW (TREE_VALUE (elt))
4534 - TREE_INT_CST_LOW (TREE_PURPOSE (elt)) + 1
4535 != nbits))))
4536 clear_storage (target, expr_size (exp),
4537 TYPE_ALIGN (type) / BITS_PER_UNIT);
4540 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4542 /* start of range of element or NULL */
4543 tree startbit = TREE_PURPOSE (elt);
4544 /* end of range of element, or element value */
4545 tree endbit = TREE_VALUE (elt);
4546 #ifdef TARGET_MEM_FUNCTIONS
4547 HOST_WIDE_INT startb, endb;
4548 #endif
4549 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4551 bitlength_rtx = expand_expr (bitlength,
4552 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4554 /* handle non-range tuple element like [ expr ] */
4555 if (startbit == NULL_TREE)
4557 startbit = save_expr (endbit);
4558 endbit = startbit;
4560 startbit = convert (sizetype, startbit);
4561 endbit = convert (sizetype, endbit);
4562 if (! integer_zerop (domain_min))
4564 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4565 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4567 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4568 EXPAND_CONST_ADDRESS);
4569 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4570 EXPAND_CONST_ADDRESS);
4572 if (REG_P (target))
4574 targetx = assign_stack_temp (GET_MODE (target),
4575 GET_MODE_SIZE (GET_MODE (target)),
4577 emit_move_insn (targetx, target);
4579 else if (GET_CODE (target) == MEM)
4580 targetx = target;
4581 else
4582 abort ();
4584 #ifdef TARGET_MEM_FUNCTIONS
4585 /* Optimization: If startbit and endbit are
4586 constants divisible by BITS_PER_UNIT,
4587 call memset instead. */
4588 if (TREE_CODE (startbit) == INTEGER_CST
4589 && TREE_CODE (endbit) == INTEGER_CST
4590 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4591 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4593 emit_library_call (memset_libfunc, 0,
4594 VOIDmode, 3,
4595 plus_constant (XEXP (targetx, 0),
4596 startb / BITS_PER_UNIT),
4597 Pmode,
4598 constm1_rtx, TYPE_MODE (integer_type_node),
4599 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4600 TYPE_MODE (sizetype));
4602 else
4603 #endif
4605 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4606 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
4607 bitlength_rtx, TYPE_MODE (sizetype),
4608 startbit_rtx, TYPE_MODE (sizetype),
4609 endbit_rtx, TYPE_MODE (sizetype));
4611 if (REG_P (target))
4612 emit_move_insn (target, targetx);
4616 else
4617 abort ();
4620 /* Store the value of EXP (an expression tree)
4621 into a subfield of TARGET which has mode MODE and occupies
4622 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4623 If MODE is VOIDmode, it means that we are storing into a bit-field.
4625 If VALUE_MODE is VOIDmode, return nothing in particular.
4626 UNSIGNEDP is not used in this case.
4628 Otherwise, return an rtx for the value stored. This rtx
4629 has mode VALUE_MODE if that is convenient to do.
4630 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4632 ALIGN is the alignment that TARGET is known to have, measured in bytes.
4633 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
4635 ALIAS_SET is the alias set for the destination. This value will
4636 (in general) be different from that for TARGET, since TARGET is a
4637 reference to the containing structure. */
4639 static rtx
4640 store_field (target, bitsize, bitpos, mode, exp, value_mode,
4641 unsignedp, align, total_size, alias_set)
4642 rtx target;
4643 int bitsize, bitpos;
4644 enum machine_mode mode;
4645 tree exp;
4646 enum machine_mode value_mode;
4647 int unsignedp;
4648 int align;
4649 int total_size;
4650 int alias_set;
4652 HOST_WIDE_INT width_mask = 0;
4654 if (TREE_CODE (exp) == ERROR_MARK)
4655 return const0_rtx;
4657 if (bitsize < HOST_BITS_PER_WIDE_INT)
4658 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
4660 /* If we are storing into an unaligned field of an aligned union that is
4661 in a register, we may have the mode of TARGET being an integer mode but
4662 MODE == BLKmode. In that case, get an aligned object whose size and
4663 alignment are the same as TARGET and store TARGET into it (we can avoid
4664 the store if the field being stored is the entire width of TARGET). Then
4665 call ourselves recursively to store the field into a BLKmode version of
4666 that object. Finally, load from the object into TARGET. This is not
4667 very efficient in general, but should only be slightly more expensive
4668 than the otherwise-required unaligned accesses. Perhaps this can be
4669 cleaned up later. */
4671 if (mode == BLKmode
4672 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4674 rtx object = assign_stack_temp (GET_MODE (target),
4675 GET_MODE_SIZE (GET_MODE (target)), 0);
4676 rtx blk_object = copy_rtx (object);
4678 MEM_SET_IN_STRUCT_P (object, 1);
4679 MEM_SET_IN_STRUCT_P (blk_object, 1);
4680 PUT_MODE (blk_object, BLKmode);
4682 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4683 emit_move_insn (object, target);
4685 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4686 align, total_size, alias_set);
4688 /* Even though we aren't returning target, we need to
4689 give it the updated value. */
4690 emit_move_insn (target, object);
4692 return blk_object;
4695 /* If the structure is in a register or if the component
4696 is a bit field, we cannot use addressing to access it.
4697 Use bit-field techniques or SUBREG to store in it. */
4699 if (mode == VOIDmode
4700 || (mode != BLKmode && ! direct_store[(int) mode]
4701 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
4702 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
4703 || GET_CODE (target) == REG
4704 || GET_CODE (target) == SUBREG
4705 /* If the field isn't aligned enough to store as an ordinary memref,
4706 store it as a bit field. */
4707 || (SLOW_UNALIGNED_ACCESS
4708 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
4709 || (SLOW_UNALIGNED_ACCESS && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
4711 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4713 /* If BITSIZE is narrower than the size of the type of EXP
4714 we will be narrowing TEMP. Normally, what's wanted are the
4715 low-order bits. However, if EXP's type is a record and this is
4716 big-endian machine, we want the upper BITSIZE bits. */
4717 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4718 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4719 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4720 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4721 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4722 - bitsize),
4723 temp, 1);
4725 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4726 MODE. */
4727 if (mode != VOIDmode && mode != BLKmode
4728 && mode != TYPE_MODE (TREE_TYPE (exp)))
4729 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4731 /* If the modes of TARGET and TEMP are both BLKmode, both
4732 must be in memory and BITPOS must be aligned on a byte
4733 boundary. If so, we simply do a block copy. */
4734 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4736 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4737 || bitpos % BITS_PER_UNIT != 0)
4738 abort ();
4740 target = change_address (target, VOIDmode,
4741 plus_constant (XEXP (target, 0),
4742 bitpos / BITS_PER_UNIT));
4744 emit_block_move (target, temp,
4745 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4746 / BITS_PER_UNIT),
4749 return value_mode == VOIDmode ? const0_rtx : target;
4752 /* Store the value in the bitfield. */
4753 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4754 if (value_mode != VOIDmode)
4756 /* The caller wants an rtx for the value. */
4757 /* If possible, avoid refetching from the bitfield itself. */
4758 if (width_mask != 0
4759 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
4761 tree count;
4762 enum machine_mode tmode;
4764 if (unsignedp)
4765 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4766 tmode = GET_MODE (temp);
4767 if (tmode == VOIDmode)
4768 tmode = value_mode;
4769 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4770 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4771 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4773 return extract_bit_field (target, bitsize, bitpos, unsignedp,
4774 NULL_RTX, value_mode, 0, align,
4775 total_size);
4777 return const0_rtx;
4779 else
4781 rtx addr = XEXP (target, 0);
4782 rtx to_rtx;
4784 /* If a value is wanted, it must be the lhs;
4785 so make the address stable for multiple use. */
4787 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4788 && ! CONSTANT_ADDRESS_P (addr)
4789 /* A frame-pointer reference is already stable. */
4790 && ! (GET_CODE (addr) == PLUS
4791 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4792 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4793 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4794 addr = copy_to_reg (addr);
4796 /* Now build a reference to just the desired component. */
4798 to_rtx = copy_rtx (change_address (target, mode,
4799 plus_constant (addr,
4800 (bitpos
4801 / BITS_PER_UNIT))));
4802 MEM_SET_IN_STRUCT_P (to_rtx, 1);
4803 MEM_ALIAS_SET (to_rtx) = alias_set;
4805 return store_expr (exp, to_rtx, value_mode != VOIDmode);
4809 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4810 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
4811 ARRAY_REFs and find the ultimate containing object, which we return.
4813 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4814 bit position, and *PUNSIGNEDP to the signedness of the field.
4815 If the position of the field is variable, we store a tree
4816 giving the variable offset (in units) in *POFFSET.
4817 This offset is in addition to the bit position.
4818 If the position is not variable, we store 0 in *POFFSET.
4819 We set *PALIGNMENT to the alignment in bytes of the address that will be
4820 computed. This is the alignment of the thing we return if *POFFSET
4821 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
4823 If any of the extraction expressions is volatile,
4824 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4826 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4827 is a mode that can be used to access the field. In that case, *PBITSIZE
4828 is redundant.
4830 If the field describes a variable-sized object, *PMODE is set to
4831 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
4832 this case, but the address of the object can be found. */
4834 tree
4835 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
4836 punsignedp, pvolatilep, palignment)
4837 tree exp;
4838 int *pbitsize;
4839 int *pbitpos;
4840 tree *poffset;
4841 enum machine_mode *pmode;
4842 int *punsignedp;
4843 int *pvolatilep;
4844 int *palignment;
4846 tree orig_exp = exp;
4847 tree size_tree = 0;
4848 enum machine_mode mode = VOIDmode;
4849 tree offset = integer_zero_node;
4850 unsigned int alignment = BIGGEST_ALIGNMENT;
4852 if (TREE_CODE (exp) == COMPONENT_REF)
4854 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4855 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4856 mode = DECL_MODE (TREE_OPERAND (exp, 1));
4857 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
4859 else if (TREE_CODE (exp) == BIT_FIELD_REF)
4861 size_tree = TREE_OPERAND (exp, 1);
4862 *punsignedp = TREE_UNSIGNED (exp);
4864 else
4866 mode = TYPE_MODE (TREE_TYPE (exp));
4867 if (mode == BLKmode)
4868 size_tree = TYPE_SIZE (TREE_TYPE (exp));
4870 *pbitsize = GET_MODE_BITSIZE (mode);
4871 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4874 if (size_tree)
4876 if (TREE_CODE (size_tree) != INTEGER_CST)
4877 mode = BLKmode, *pbitsize = -1;
4878 else
4879 *pbitsize = TREE_INT_CST_LOW (size_tree);
4882 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4883 and find the ultimate containing object. */
4885 *pbitpos = 0;
4887 while (1)
4889 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
4891 tree pos = (TREE_CODE (exp) == COMPONENT_REF
4892 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
4893 : TREE_OPERAND (exp, 2));
4894 tree constant = integer_zero_node, var = pos;
4896 /* If this field hasn't been filled in yet, don't go
4897 past it. This should only happen when folding expressions
4898 made during type construction. */
4899 if (pos == 0)
4900 break;
4902 /* Assume here that the offset is a multiple of a unit.
4903 If not, there should be an explicitly added constant. */
4904 if (TREE_CODE (pos) == PLUS_EXPR
4905 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4906 constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
4907 else if (TREE_CODE (pos) == INTEGER_CST)
4908 constant = pos, var = integer_zero_node;
4910 *pbitpos += TREE_INT_CST_LOW (constant);
4911 offset = size_binop (PLUS_EXPR, offset,
4912 size_binop (EXACT_DIV_EXPR, var,
4913 size_int (BITS_PER_UNIT)));
4916 else if (TREE_CODE (exp) == ARRAY_REF)
4918 /* This code is based on the code in case ARRAY_REF in expand_expr
4919 below. We assume here that the size of an array element is
4920 always an integral multiple of BITS_PER_UNIT. */
4922 tree index = TREE_OPERAND (exp, 1);
4923 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
4924 tree low_bound
4925 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4926 tree index_type = TREE_TYPE (index);
4927 tree xindex;
4929 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
4931 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
4932 index);
4933 index_type = TREE_TYPE (index);
4936 /* Optimize the special-case of a zero lower bound.
4938 We convert the low_bound to sizetype to avoid some problems
4939 with constant folding. (E.g. suppose the lower bound is 1,
4940 and its mode is QI. Without the conversion, (ARRAY
4941 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
4942 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
4944 But sizetype isn't quite right either (especially if
4945 the lowbound is negative). FIXME */
4947 if (! integer_zerop (low_bound))
4948 index = fold (build (MINUS_EXPR, index_type, index,
4949 convert (sizetype, low_bound)));
4951 if (TREE_CODE (index) == INTEGER_CST)
4953 index = convert (sbitsizetype, index);
4954 index_type = TREE_TYPE (index);
4957 xindex = fold (build (MULT_EXPR, sbitsizetype, index,
4958 convert (sbitsizetype,
4959 TYPE_SIZE (TREE_TYPE (exp)))));
4961 if (TREE_CODE (xindex) == INTEGER_CST
4962 && TREE_INT_CST_HIGH (xindex) == 0)
4963 *pbitpos += TREE_INT_CST_LOW (xindex);
4964 else
4966 /* Either the bit offset calculated above is not constant, or
4967 it overflowed. In either case, redo the multiplication
4968 against the size in units. This is especially important
4969 in the non-constant case to avoid a division at runtime. */
4970 xindex = fold (build (MULT_EXPR, ssizetype, index,
4971 convert (ssizetype,
4972 TYPE_SIZE_UNIT (TREE_TYPE (exp)))));
4974 if (contains_placeholder_p (xindex))
4975 xindex = build (WITH_RECORD_EXPR, sizetype, xindex, exp);
4977 offset = size_binop (PLUS_EXPR, offset, xindex);
4980 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
4981 && ! ((TREE_CODE (exp) == NOP_EXPR
4982 || TREE_CODE (exp) == CONVERT_EXPR)
4983 && ! (TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
4984 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0)))
4985 != UNION_TYPE))
4986 && (TYPE_MODE (TREE_TYPE (exp))
4987 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
4988 break;
4990 /* If any reference in the chain is volatile, the effect is volatile. */
4991 if (TREE_THIS_VOLATILE (exp))
4992 *pvolatilep = 1;
4994 /* If the offset is non-constant already, then we can't assume any
4995 alignment more than the alignment here. */
4996 if (! integer_zerop (offset))
4997 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4999 exp = TREE_OPERAND (exp, 0);
5002 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
5003 alignment = MIN (alignment, DECL_ALIGN (exp));
5004 else if (TREE_TYPE (exp) != 0)
5005 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5007 if (integer_zerop (offset))
5008 offset = 0;
5010 if (offset != 0 && contains_placeholder_p (offset))
5011 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
5013 *pmode = mode;
5014 *poffset = offset;
5015 *palignment = alignment / BITS_PER_UNIT;
5016 return exp;
5019 /* Subroutine of expand_exp: compute memory_usage from modifier. */
5020 static enum memory_use_mode
5021 get_memory_usage_from_modifier (modifier)
5022 enum expand_modifier modifier;
5024 switch (modifier)
5026 case EXPAND_NORMAL:
5027 case EXPAND_SUM:
5028 return MEMORY_USE_RO;
5029 break;
5030 case EXPAND_MEMORY_USE_WO:
5031 return MEMORY_USE_WO;
5032 break;
5033 case EXPAND_MEMORY_USE_RW:
5034 return MEMORY_USE_RW;
5035 break;
5036 case EXPAND_MEMORY_USE_DONT:
5037 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5038 MEMORY_USE_DONT, because they are modifiers to a call of
5039 expand_expr in the ADDR_EXPR case of expand_expr. */
5040 case EXPAND_CONST_ADDRESS:
5041 case EXPAND_INITIALIZER:
5042 return MEMORY_USE_DONT;
5043 case EXPAND_MEMORY_USE_BAD:
5044 default:
5045 abort ();
5049 /* Given an rtx VALUE that may contain additions and multiplications,
5050 return an equivalent value that just refers to a register or memory.
5051 This is done by generating instructions to perform the arithmetic
5052 and returning a pseudo-register containing the value.
5054 The returned value may be a REG, SUBREG, MEM or constant. */
5057 force_operand (value, target)
5058 rtx value, target;
5060 register optab binoptab = 0;
5061 /* Use a temporary to force order of execution of calls to
5062 `force_operand'. */
5063 rtx tmp;
5064 register rtx op2;
5065 /* Use subtarget as the target for operand 0 of a binary operation. */
5066 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
5068 /* Check for a PIC address load. */
5069 if (flag_pic
5070 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5071 && XEXP (value, 0) == pic_offset_table_rtx
5072 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5073 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5074 || GET_CODE (XEXP (value, 1)) == CONST))
5076 if (!subtarget)
5077 subtarget = gen_reg_rtx (GET_MODE (value));
5078 emit_move_insn (subtarget, value);
5079 return subtarget;
5082 if (GET_CODE (value) == PLUS)
5083 binoptab = add_optab;
5084 else if (GET_CODE (value) == MINUS)
5085 binoptab = sub_optab;
5086 else if (GET_CODE (value) == MULT)
5088 op2 = XEXP (value, 1);
5089 if (!CONSTANT_P (op2)
5090 && !(GET_CODE (op2) == REG && op2 != subtarget))
5091 subtarget = 0;
5092 tmp = force_operand (XEXP (value, 0), subtarget);
5093 return expand_mult (GET_MODE (value), tmp,
5094 force_operand (op2, NULL_RTX),
5095 target, 0);
5098 if (binoptab)
5100 op2 = XEXP (value, 1);
5101 if (!CONSTANT_P (op2)
5102 && !(GET_CODE (op2) == REG && op2 != subtarget))
5103 subtarget = 0;
5104 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5106 binoptab = add_optab;
5107 op2 = negate_rtx (GET_MODE (value), op2);
5110 /* Check for an addition with OP2 a constant integer and our first
5111 operand a PLUS of a virtual register and something else. In that
5112 case, we want to emit the sum of the virtual register and the
5113 constant first and then add the other value. This allows virtual
5114 register instantiation to simply modify the constant rather than
5115 creating another one around this addition. */
5116 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5117 && GET_CODE (XEXP (value, 0)) == PLUS
5118 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5119 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5120 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5122 rtx temp = expand_binop (GET_MODE (value), binoptab,
5123 XEXP (XEXP (value, 0), 0), op2,
5124 subtarget, 0, OPTAB_LIB_WIDEN);
5125 return expand_binop (GET_MODE (value), binoptab, temp,
5126 force_operand (XEXP (XEXP (value, 0), 1), 0),
5127 target, 0, OPTAB_LIB_WIDEN);
5130 tmp = force_operand (XEXP (value, 0), subtarget);
5131 return expand_binop (GET_MODE (value), binoptab, tmp,
5132 force_operand (op2, NULL_RTX),
5133 target, 0, OPTAB_LIB_WIDEN);
5134 /* We give UNSIGNEDP = 0 to expand_binop
5135 because the only operations we are expanding here are signed ones. */
5137 return value;
5140 /* Subroutine of expand_expr:
5141 save the non-copied parts (LIST) of an expr (LHS), and return a list
5142 which can restore these values to their previous values,
5143 should something modify their storage. */
5145 static tree
5146 save_noncopied_parts (lhs, list)
5147 tree lhs;
5148 tree list;
5150 tree tail;
5151 tree parts = 0;
5153 for (tail = list; tail; tail = TREE_CHAIN (tail))
5154 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5155 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
5156 else
5158 tree part = TREE_VALUE (tail);
5159 tree part_type = TREE_TYPE (part);
5160 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
5161 rtx target = assign_temp (part_type, 0, 1, 1);
5162 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
5163 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
5164 parts = tree_cons (to_be_saved,
5165 build (RTL_EXPR, part_type, NULL_TREE,
5166 (tree) target),
5167 parts);
5168 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
5170 return parts;
5173 /* Subroutine of expand_expr:
5174 record the non-copied parts (LIST) of an expr (LHS), and return a list
5175 which specifies the initial values of these parts. */
5177 static tree
5178 init_noncopied_parts (lhs, list)
5179 tree lhs;
5180 tree list;
5182 tree tail;
5183 tree parts = 0;
5185 for (tail = list; tail; tail = TREE_CHAIN (tail))
5186 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5187 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
5188 else if (TREE_PURPOSE (tail))
5190 tree part = TREE_VALUE (tail);
5191 tree part_type = TREE_TYPE (part);
5192 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
5193 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5195 return parts;
5198 /* Subroutine of expand_expr: return nonzero iff there is no way that
5199 EXP can reference X, which is being modified. TOP_P is nonzero if this
5200 call is going to be used to determine whether we need a temporary
5201 for EXP, as opposed to a recursive call to this function.
5203 It is always safe for this routine to return zero since it merely
5204 searches for optimization opportunities. */
5206 static int
5207 safe_from_p (x, exp, top_p)
5208 rtx x;
5209 tree exp;
5210 int top_p;
5212 rtx exp_rtl = 0;
5213 int i, nops;
5214 static int save_expr_count;
5215 static int save_expr_size = 0;
5216 static tree *save_expr_rewritten;
5217 static tree save_expr_trees[256];
5219 if (x == 0
5220 /* If EXP has varying size, we MUST use a target since we currently
5221 have no way of allocating temporaries of variable size
5222 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5223 So we assume here that something at a higher level has prevented a
5224 clash. This is somewhat bogus, but the best we can do. Only
5225 do this when X is BLKmode and when we are at the top level. */
5226 || (top_p && TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
5227 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5228 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5229 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5230 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5231 != INTEGER_CST)
5232 && GET_MODE (x) == BLKmode))
5233 return 1;
5235 if (top_p && save_expr_size == 0)
5237 int rtn;
5239 save_expr_count = 0;
5240 save_expr_size = sizeof (save_expr_trees) / sizeof (save_expr_trees[0]);
5241 save_expr_rewritten = &save_expr_trees[0];
5243 rtn = safe_from_p (x, exp, 1);
5245 for (i = 0; i < save_expr_count; ++i)
5247 if (TREE_CODE (save_expr_trees[i]) != ERROR_MARK)
5248 abort ();
5249 TREE_SET_CODE (save_expr_trees[i], SAVE_EXPR);
5252 save_expr_size = 0;
5254 return rtn;
5257 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5258 find the underlying pseudo. */
5259 if (GET_CODE (x) == SUBREG)
5261 x = SUBREG_REG (x);
5262 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5263 return 0;
5266 /* If X is a location in the outgoing argument area, it is always safe. */
5267 if (GET_CODE (x) == MEM
5268 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5269 || (GET_CODE (XEXP (x, 0)) == PLUS
5270 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
5271 return 1;
5273 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5275 case 'd':
5276 exp_rtl = DECL_RTL (exp);
5277 break;
5279 case 'c':
5280 return 1;
5282 case 'x':
5283 if (TREE_CODE (exp) == TREE_LIST)
5284 return ((TREE_VALUE (exp) == 0
5285 || safe_from_p (x, TREE_VALUE (exp), 0))
5286 && (TREE_CHAIN (exp) == 0
5287 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5288 else if (TREE_CODE (exp) == ERROR_MARK)
5289 return 1; /* An already-visited SAVE_EXPR? */
5290 else
5291 return 0;
5293 case '1':
5294 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5296 case '2':
5297 case '<':
5298 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5299 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5301 case 'e':
5302 case 'r':
5303 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5304 the expression. If it is set, we conflict iff we are that rtx or
5305 both are in memory. Otherwise, we check all operands of the
5306 expression recursively. */
5308 switch (TREE_CODE (exp))
5310 case ADDR_EXPR:
5311 return (staticp (TREE_OPERAND (exp, 0))
5312 || safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5313 || TREE_STATIC (exp));
5315 case INDIRECT_REF:
5316 if (GET_CODE (x) == MEM)
5317 return 0;
5318 break;
5320 case CALL_EXPR:
5321 exp_rtl = CALL_EXPR_RTL (exp);
5322 if (exp_rtl == 0)
5324 /* Assume that the call will clobber all hard registers and
5325 all of memory. */
5326 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5327 || GET_CODE (x) == MEM)
5328 return 0;
5331 break;
5333 case RTL_EXPR:
5334 /* If a sequence exists, we would have to scan every instruction
5335 in the sequence to see if it was safe. This is probably not
5336 worthwhile. */
5337 if (RTL_EXPR_SEQUENCE (exp))
5338 return 0;
5340 exp_rtl = RTL_EXPR_RTL (exp);
5341 break;
5343 case WITH_CLEANUP_EXPR:
5344 exp_rtl = RTL_EXPR_RTL (exp);
5345 break;
5347 case CLEANUP_POINT_EXPR:
5348 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5350 case SAVE_EXPR:
5351 exp_rtl = SAVE_EXPR_RTL (exp);
5352 if (exp_rtl)
5353 break;
5355 /* This SAVE_EXPR might appear many times in the top-level
5356 safe_from_p() expression, and if it has a complex
5357 subexpression, examining it multiple times could result
5358 in a combinatorial explosion. E.g. on an Alpha
5359 running at least 200MHz, a Fortran test case compiled with
5360 optimization took about 28 minutes to compile -- even though
5361 it was only a few lines long, and the complicated line causing
5362 so much time to be spent in the earlier version of safe_from_p()
5363 had only 293 or so unique nodes.
5365 So, turn this SAVE_EXPR into an ERROR_MARK for now, but remember
5366 where it is so we can turn it back in the top-level safe_from_p()
5367 when we're done. */
5369 /* For now, don't bother re-sizing the array. */
5370 if (save_expr_count >= save_expr_size)
5371 return 0;
5372 save_expr_rewritten[save_expr_count++] = exp;
5374 nops = tree_code_length[(int) SAVE_EXPR];
5375 for (i = 0; i < nops; i++)
5377 tree operand = TREE_OPERAND (exp, i);
5378 if (operand == NULL_TREE)
5379 continue;
5380 TREE_SET_CODE (exp, ERROR_MARK);
5381 if (!safe_from_p (x, operand, 0))
5382 return 0;
5383 TREE_SET_CODE (exp, SAVE_EXPR);
5385 TREE_SET_CODE (exp, ERROR_MARK);
5386 return 1;
5388 case BIND_EXPR:
5389 /* The only operand we look at is operand 1. The rest aren't
5390 part of the expression. */
5391 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5393 case METHOD_CALL_EXPR:
5394 /* This takes a rtx argument, but shouldn't appear here. */
5395 abort ();
5397 default:
5398 break;
5401 /* If we have an rtx, we do not need to scan our operands. */
5402 if (exp_rtl)
5403 break;
5405 nops = tree_code_length[(int) TREE_CODE (exp)];
5406 for (i = 0; i < nops; i++)
5407 if (TREE_OPERAND (exp, i) != 0
5408 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5409 return 0;
5412 /* If we have an rtl, find any enclosed object. Then see if we conflict
5413 with it. */
5414 if (exp_rtl)
5416 if (GET_CODE (exp_rtl) == SUBREG)
5418 exp_rtl = SUBREG_REG (exp_rtl);
5419 if (GET_CODE (exp_rtl) == REG
5420 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5421 return 0;
5424 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5425 are memory and EXP is not readonly. */
5426 return ! (rtx_equal_p (x, exp_rtl)
5427 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5428 && ! TREE_READONLY (exp)));
5431 /* If we reach here, it is safe. */
5432 return 1;
5435 /* Subroutine of expand_expr: return nonzero iff EXP is an
5436 expression whose type is statically determinable. */
5438 static int
5439 fixed_type_p (exp)
5440 tree exp;
5442 if (TREE_CODE (exp) == PARM_DECL
5443 || TREE_CODE (exp) == VAR_DECL
5444 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5445 || TREE_CODE (exp) == COMPONENT_REF
5446 || TREE_CODE (exp) == ARRAY_REF)
5447 return 1;
5448 return 0;
5451 /* Subroutine of expand_expr: return rtx if EXP is a
5452 variable or parameter; else return 0. */
5454 static rtx
5455 var_rtx (exp)
5456 tree exp;
5458 STRIP_NOPS (exp);
5459 switch (TREE_CODE (exp))
5461 case PARM_DECL:
5462 case VAR_DECL:
5463 return DECL_RTL (exp);
5464 default:
5465 return 0;
5469 #ifdef MAX_INTEGER_COMPUTATION_MODE
5470 void
5471 check_max_integer_computation_mode (exp)
5472 tree exp;
5474 enum tree_code code;
5475 enum machine_mode mode;
5477 /* Strip any NOPs that don't change the mode. */
5478 STRIP_NOPS (exp);
5479 code = TREE_CODE (exp);
5481 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5482 if (code == NOP_EXPR
5483 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5484 return;
5486 /* First check the type of the overall operation. We need only look at
5487 unary, binary and relational operations. */
5488 if (TREE_CODE_CLASS (code) == '1'
5489 || TREE_CODE_CLASS (code) == '2'
5490 || TREE_CODE_CLASS (code) == '<')
5492 mode = TYPE_MODE (TREE_TYPE (exp));
5493 if (GET_MODE_CLASS (mode) == MODE_INT
5494 && mode > MAX_INTEGER_COMPUTATION_MODE)
5495 fatal ("unsupported wide integer operation");
5498 /* Check operand of a unary op. */
5499 if (TREE_CODE_CLASS (code) == '1')
5501 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5502 if (GET_MODE_CLASS (mode) == MODE_INT
5503 && mode > MAX_INTEGER_COMPUTATION_MODE)
5504 fatal ("unsupported wide integer operation");
5507 /* Check operands of a binary/comparison op. */
5508 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5510 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5511 if (GET_MODE_CLASS (mode) == MODE_INT
5512 && mode > MAX_INTEGER_COMPUTATION_MODE)
5513 fatal ("unsupported wide integer operation");
5515 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5516 if (GET_MODE_CLASS (mode) == MODE_INT
5517 && mode > MAX_INTEGER_COMPUTATION_MODE)
5518 fatal ("unsupported wide integer operation");
5521 #endif
5524 /* expand_expr: generate code for computing expression EXP.
5525 An rtx for the computed value is returned. The value is never null.
5526 In the case of a void EXP, const0_rtx is returned.
5528 The value may be stored in TARGET if TARGET is nonzero.
5529 TARGET is just a suggestion; callers must assume that
5530 the rtx returned may not be the same as TARGET.
5532 If TARGET is CONST0_RTX, it means that the value will be ignored.
5534 If TMODE is not VOIDmode, it suggests generating the
5535 result in mode TMODE. But this is done only when convenient.
5536 Otherwise, TMODE is ignored and the value generated in its natural mode.
5537 TMODE is just a suggestion; callers must assume that
5538 the rtx returned may not have mode TMODE.
5540 Note that TARGET may have neither TMODE nor MODE. In that case, it
5541 probably will not be used.
5543 If MODIFIER is EXPAND_SUM then when EXP is an addition
5544 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5545 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5546 products as above, or REG or MEM, or constant.
5547 Ordinarily in such cases we would output mul or add instructions
5548 and then return a pseudo reg containing the sum.
5550 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5551 it also marks a label as absolutely required (it can't be dead).
5552 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
5553 This is used for outputting expressions used in initializers.
5555 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5556 with a constant address even if that address is not normally legitimate.
5557 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
5560 expand_expr (exp, target, tmode, modifier)
5561 register tree exp;
5562 rtx target;
5563 enum machine_mode tmode;
5564 enum expand_modifier modifier;
5566 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
5567 This is static so it will be accessible to our recursive callees. */
5568 static tree placeholder_list = 0;
5569 register rtx op0, op1, temp;
5570 tree type = TREE_TYPE (exp);
5571 int unsignedp = TREE_UNSIGNED (type);
5572 register enum machine_mode mode;
5573 register enum tree_code code = TREE_CODE (exp);
5574 optab this_optab;
5575 rtx subtarget, original_target;
5576 int ignore;
5577 tree context;
5578 /* Used by check-memory-usage to make modifier read only. */
5579 enum expand_modifier ro_modifier;
5581 /* Handle ERROR_MARK before anybody tries to access its type. */
5582 if (TREE_CODE (exp) == ERROR_MARK)
5584 op0 = CONST0_RTX (tmode);
5585 if (op0 != 0)
5586 return op0;
5587 return const0_rtx;
5590 mode = TYPE_MODE (type);
5591 /* Use subtarget as the target for operand 0 of a binary operation. */
5592 subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
5593 original_target = target;
5594 ignore = (target == const0_rtx
5595 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
5596 || code == CONVERT_EXPR || code == REFERENCE_EXPR
5597 || code == COND_EXPR)
5598 && TREE_CODE (type) == VOID_TYPE));
5600 /* Make a read-only version of the modifier. */
5601 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
5602 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
5603 ro_modifier = modifier;
5604 else
5605 ro_modifier = EXPAND_NORMAL;
5607 /* Don't use hard regs as subtargets, because the combiner
5608 can only handle pseudo regs. */
5609 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
5610 subtarget = 0;
5611 /* Avoid subtargets inside loops,
5612 since they hide some invariant expressions. */
5613 if (preserve_subexpressions_p ())
5614 subtarget = 0;
5616 /* If we are going to ignore this result, we need only do something
5617 if there is a side-effect somewhere in the expression. If there
5618 is, short-circuit the most common cases here. Note that we must
5619 not call expand_expr with anything but const0_rtx in case this
5620 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
5622 if (ignore)
5624 if (! TREE_SIDE_EFFECTS (exp))
5625 return const0_rtx;
5627 /* Ensure we reference a volatile object even if value is ignored. */
5628 if (TREE_THIS_VOLATILE (exp)
5629 && TREE_CODE (exp) != FUNCTION_DECL
5630 && mode != VOIDmode && mode != BLKmode)
5632 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
5633 if (GET_CODE (temp) == MEM)
5634 temp = copy_to_reg (temp);
5635 return const0_rtx;
5638 if (TREE_CODE_CLASS (code) == '1')
5639 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5640 VOIDmode, ro_modifier);
5641 else if (TREE_CODE_CLASS (code) == '2'
5642 || TREE_CODE_CLASS (code) == '<')
5644 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5645 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5646 return const0_rtx;
5648 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
5649 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
5650 /* If the second operand has no side effects, just evaluate
5651 the first. */
5652 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5653 VOIDmode, ro_modifier);
5655 target = 0;
5658 #ifdef MAX_INTEGER_COMPUTATION_MODE
5659 /* Only check stuff here if the mode we want is different from the mode
5660 of the expression; if it's the same, check_max_integer_computiation_mode
5661 will handle it. Do we really need to check this stuff at all? */
5663 if (target
5664 && GET_MODE (target) != mode
5665 && TREE_CODE (exp) != INTEGER_CST
5666 && TREE_CODE (exp) != PARM_DECL
5667 && TREE_CODE (exp) != ARRAY_REF
5668 && TREE_CODE (exp) != COMPONENT_REF
5669 && TREE_CODE (exp) != BIT_FIELD_REF
5670 && TREE_CODE (exp) != INDIRECT_REF
5671 && TREE_CODE (exp) != CALL_EXPR
5672 && TREE_CODE (exp) != VAR_DECL
5673 && TREE_CODE (exp) != RTL_EXPR)
5675 enum machine_mode mode = GET_MODE (target);
5677 if (GET_MODE_CLASS (mode) == MODE_INT
5678 && mode > MAX_INTEGER_COMPUTATION_MODE)
5679 fatal ("unsupported wide integer operation");
5682 if (tmode != mode
5683 && TREE_CODE (exp) != INTEGER_CST
5684 && TREE_CODE (exp) != PARM_DECL
5685 && TREE_CODE (exp) != ARRAY_REF
5686 && TREE_CODE (exp) != COMPONENT_REF
5687 && TREE_CODE (exp) != BIT_FIELD_REF
5688 && TREE_CODE (exp) != INDIRECT_REF
5689 && TREE_CODE (exp) != VAR_DECL
5690 && TREE_CODE (exp) != CALL_EXPR
5691 && TREE_CODE (exp) != RTL_EXPR
5692 && GET_MODE_CLASS (tmode) == MODE_INT
5693 && tmode > MAX_INTEGER_COMPUTATION_MODE)
5694 fatal ("unsupported wide integer operation");
5696 check_max_integer_computation_mode (exp);
5697 #endif
5699 /* If will do cse, generate all results into pseudo registers
5700 since 1) that allows cse to find more things
5701 and 2) otherwise cse could produce an insn the machine
5702 cannot support. */
5704 if (! cse_not_expected && mode != BLKmode && target
5705 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
5706 target = subtarget;
5708 switch (code)
5710 case LABEL_DECL:
5712 tree function = decl_function_context (exp);
5713 /* Handle using a label in a containing function. */
5714 if (function != current_function_decl
5715 && function != inline_function_decl && function != 0)
5717 struct function *p = find_function_data (function);
5718 /* Allocate in the memory associated with the function
5719 that the label is in. */
5720 push_obstacks (p->function_obstack,
5721 p->function_maybepermanent_obstack);
5723 p->expr->x_forced_labels
5724 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
5725 p->expr->x_forced_labels);
5726 pop_obstacks ();
5728 else
5730 if (modifier == EXPAND_INITIALIZER)
5731 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5732 label_rtx (exp),
5733 forced_labels);
5736 temp = gen_rtx_MEM (FUNCTION_MODE,
5737 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
5738 if (function != current_function_decl
5739 && function != inline_function_decl && function != 0)
5740 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
5741 return temp;
5744 case PARM_DECL:
5745 if (DECL_RTL (exp) == 0)
5747 error_with_decl (exp, "prior parameter's size depends on `%s'");
5748 return CONST0_RTX (mode);
5751 /* ... fall through ... */
5753 case VAR_DECL:
5754 /* If a static var's type was incomplete when the decl was written,
5755 but the type is complete now, lay out the decl now. */
5756 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
5757 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
5759 push_obstacks_nochange ();
5760 end_temporary_allocation ();
5761 layout_decl (exp, 0);
5762 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
5763 pop_obstacks ();
5766 /* Although static-storage variables start off initialized, according to
5767 ANSI C, a memcpy could overwrite them with uninitialized values. So
5768 we check them too. This also lets us check for read-only variables
5769 accessed via a non-const declaration, in case it won't be detected
5770 any other way (e.g., in an embedded system or OS kernel without
5771 memory protection).
5773 Aggregates are not checked here; they're handled elsewhere. */
5774 if (current_function && current_function_check_memory_usage
5775 && code == VAR_DECL
5776 && GET_CODE (DECL_RTL (exp)) == MEM
5777 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5779 enum memory_use_mode memory_usage;
5780 memory_usage = get_memory_usage_from_modifier (modifier);
5782 if (memory_usage != MEMORY_USE_DONT)
5783 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5784 XEXP (DECL_RTL (exp), 0), Pmode,
5785 GEN_INT (int_size_in_bytes (type)),
5786 TYPE_MODE (sizetype),
5787 GEN_INT (memory_usage),
5788 TYPE_MODE (integer_type_node));
5791 /* ... fall through ... */
5793 case FUNCTION_DECL:
5794 case RESULT_DECL:
5795 if (DECL_RTL (exp) == 0)
5796 abort ();
5798 /* Ensure variable marked as used even if it doesn't go through
5799 a parser. If it hasn't be used yet, write out an external
5800 definition. */
5801 if (! TREE_USED (exp))
5803 assemble_external (exp);
5804 TREE_USED (exp) = 1;
5807 /* Show we haven't gotten RTL for this yet. */
5808 temp = 0;
5810 /* Handle variables inherited from containing functions. */
5811 context = decl_function_context (exp);
5813 /* We treat inline_function_decl as an alias for the current function
5814 because that is the inline function whose vars, types, etc.
5815 are being merged into the current function.
5816 See expand_inline_function. */
5818 if (context != 0 && context != current_function_decl
5819 && context != inline_function_decl
5820 /* If var is static, we don't need a static chain to access it. */
5821 && ! (GET_CODE (DECL_RTL (exp)) == MEM
5822 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
5824 rtx addr;
5826 /* Mark as non-local and addressable. */
5827 DECL_NONLOCAL (exp) = 1;
5828 if (DECL_NO_STATIC_CHAIN (current_function_decl))
5829 abort ();
5830 mark_addressable (exp);
5831 if (GET_CODE (DECL_RTL (exp)) != MEM)
5832 abort ();
5833 addr = XEXP (DECL_RTL (exp), 0);
5834 if (GET_CODE (addr) == MEM)
5835 addr = gen_rtx_MEM (Pmode,
5836 fix_lexical_addr (XEXP (addr, 0), exp));
5837 else
5838 addr = fix_lexical_addr (addr, exp);
5839 temp = change_address (DECL_RTL (exp), mode, addr);
5842 /* This is the case of an array whose size is to be determined
5843 from its initializer, while the initializer is still being parsed.
5844 See expand_decl. */
5846 else if (GET_CODE (DECL_RTL (exp)) == MEM
5847 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
5848 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
5849 XEXP (DECL_RTL (exp), 0));
5851 /* If DECL_RTL is memory, we are in the normal case and either
5852 the address is not valid or it is not a register and -fforce-addr
5853 is specified, get the address into a register. */
5855 else if (GET_CODE (DECL_RTL (exp)) == MEM
5856 && modifier != EXPAND_CONST_ADDRESS
5857 && modifier != EXPAND_SUM
5858 && modifier != EXPAND_INITIALIZER
5859 && (! memory_address_p (DECL_MODE (exp),
5860 XEXP (DECL_RTL (exp), 0))
5861 || (flag_force_addr
5862 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
5863 temp = change_address (DECL_RTL (exp), VOIDmode,
5864 copy_rtx (XEXP (DECL_RTL (exp), 0)));
5866 /* If we got something, return it. But first, set the alignment
5867 the address is a register. */
5868 if (temp != 0)
5870 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
5871 mark_reg_pointer (XEXP (temp, 0),
5872 DECL_ALIGN (exp) / BITS_PER_UNIT);
5874 return temp;
5877 /* If the mode of DECL_RTL does not match that of the decl, it
5878 must be a promoted value. We return a SUBREG of the wanted mode,
5879 but mark it so that we know that it was already extended. */
5881 if (GET_CODE (DECL_RTL (exp)) == REG
5882 && GET_MODE (DECL_RTL (exp)) != mode)
5884 /* Get the signedness used for this variable. Ensure we get the
5885 same mode we got when the variable was declared. */
5886 if (GET_MODE (DECL_RTL (exp))
5887 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
5888 abort ();
5890 temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
5891 SUBREG_PROMOTED_VAR_P (temp) = 1;
5892 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5893 return temp;
5896 return DECL_RTL (exp);
5898 case INTEGER_CST:
5899 return immed_double_const (TREE_INT_CST_LOW (exp),
5900 TREE_INT_CST_HIGH (exp),
5901 mode);
5903 case CONST_DECL:
5904 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
5905 EXPAND_MEMORY_USE_BAD);
5907 case REAL_CST:
5908 /* If optimized, generate immediate CONST_DOUBLE
5909 which will be turned into memory by reload if necessary.
5911 We used to force a register so that loop.c could see it. But
5912 this does not allow gen_* patterns to perform optimizations with
5913 the constants. It also produces two insns in cases like "x = 1.0;".
5914 On most machines, floating-point constants are not permitted in
5915 many insns, so we'd end up copying it to a register in any case.
5917 Now, we do the copying in expand_binop, if appropriate. */
5918 return immed_real_const (exp);
5920 case COMPLEX_CST:
5921 case STRING_CST:
5922 if (! TREE_CST_RTL (exp))
5923 output_constant_def (exp);
5925 /* TREE_CST_RTL probably contains a constant address.
5926 On RISC machines where a constant address isn't valid,
5927 make some insns to get that address into a register. */
5928 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
5929 && modifier != EXPAND_CONST_ADDRESS
5930 && modifier != EXPAND_INITIALIZER
5931 && modifier != EXPAND_SUM
5932 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
5933 || (flag_force_addr
5934 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
5935 return change_address (TREE_CST_RTL (exp), VOIDmode,
5936 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
5937 return TREE_CST_RTL (exp);
5939 case EXPR_WITH_FILE_LOCATION:
5941 rtx to_return;
5942 char *saved_input_filename = input_filename;
5943 int saved_lineno = lineno;
5944 input_filename = EXPR_WFL_FILENAME (exp);
5945 lineno = EXPR_WFL_LINENO (exp);
5946 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
5947 emit_line_note (input_filename, lineno);
5948 /* Possibly avoid switching back and force here */
5949 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
5950 input_filename = saved_input_filename;
5951 lineno = saved_lineno;
5952 return to_return;
5955 case SAVE_EXPR:
5956 context = decl_function_context (exp);
5958 /* If this SAVE_EXPR was at global context, assume we are an
5959 initialization function and move it into our context. */
5960 if (context == 0)
5961 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
5963 /* We treat inline_function_decl as an alias for the current function
5964 because that is the inline function whose vars, types, etc.
5965 are being merged into the current function.
5966 See expand_inline_function. */
5967 if (context == current_function_decl || context == inline_function_decl)
5968 context = 0;
5970 /* If this is non-local, handle it. */
5971 if (context)
5973 /* The following call just exists to abort if the context is
5974 not of a containing function. */
5975 find_function_data (context);
5977 temp = SAVE_EXPR_RTL (exp);
5978 if (temp && GET_CODE (temp) == REG)
5980 put_var_into_stack (exp);
5981 temp = SAVE_EXPR_RTL (exp);
5983 if (temp == 0 || GET_CODE (temp) != MEM)
5984 abort ();
5985 return change_address (temp, mode,
5986 fix_lexical_addr (XEXP (temp, 0), exp));
5988 if (SAVE_EXPR_RTL (exp) == 0)
5990 if (mode == VOIDmode)
5991 temp = const0_rtx;
5992 else
5993 temp = assign_temp (type, 3, 0, 0);
5995 SAVE_EXPR_RTL (exp) = temp;
5996 if (!optimize && GET_CODE (temp) == REG)
5997 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
5998 save_expr_regs);
6000 /* If the mode of TEMP does not match that of the expression, it
6001 must be a promoted value. We pass store_expr a SUBREG of the
6002 wanted mode but mark it so that we know that it was already
6003 extended. Note that `unsignedp' was modified above in
6004 this case. */
6006 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6008 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6009 SUBREG_PROMOTED_VAR_P (temp) = 1;
6010 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6013 if (temp == const0_rtx)
6014 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6015 EXPAND_MEMORY_USE_BAD);
6016 else
6017 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6019 TREE_USED (exp) = 1;
6022 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6023 must be a promoted value. We return a SUBREG of the wanted mode,
6024 but mark it so that we know that it was already extended. */
6026 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6027 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6029 /* Compute the signedness and make the proper SUBREG. */
6030 promote_mode (type, mode, &unsignedp, 0);
6031 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6032 SUBREG_PROMOTED_VAR_P (temp) = 1;
6033 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6034 return temp;
6037 return SAVE_EXPR_RTL (exp);
6039 case UNSAVE_EXPR:
6041 rtx temp;
6042 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6043 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6044 return temp;
6047 case PLACEHOLDER_EXPR:
6049 tree placeholder_expr;
6051 /* If there is an object on the head of the placeholder list,
6052 see if some object in it of type TYPE or a pointer to it. For
6053 further information, see tree.def. */
6054 for (placeholder_expr = placeholder_list;
6055 placeholder_expr != 0;
6056 placeholder_expr = TREE_CHAIN (placeholder_expr))
6058 tree need_type = TYPE_MAIN_VARIANT (type);
6059 tree object = 0;
6060 tree old_list = placeholder_list;
6061 tree elt;
6063 /* Find the outermost reference that is of the type we want.
6064 If none, see if any object has a type that is a pointer to
6065 the type we want. */
6066 for (elt = TREE_PURPOSE (placeholder_expr);
6067 elt != 0 && object == 0;
6069 = ((TREE_CODE (elt) == COMPOUND_EXPR
6070 || TREE_CODE (elt) == COND_EXPR)
6071 ? TREE_OPERAND (elt, 1)
6072 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6073 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6074 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6075 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6076 ? TREE_OPERAND (elt, 0) : 0))
6077 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6078 object = elt;
6080 for (elt = TREE_PURPOSE (placeholder_expr);
6081 elt != 0 && object == 0;
6083 = ((TREE_CODE (elt) == COMPOUND_EXPR
6084 || TREE_CODE (elt) == COND_EXPR)
6085 ? TREE_OPERAND (elt, 1)
6086 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6087 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6088 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6089 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6090 ? TREE_OPERAND (elt, 0) : 0))
6091 if (POINTER_TYPE_P (TREE_TYPE (elt))
6092 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6093 == need_type))
6094 object = build1 (INDIRECT_REF, need_type, elt);
6096 if (object != 0)
6098 /* Expand this object skipping the list entries before
6099 it was found in case it is also a PLACEHOLDER_EXPR.
6100 In that case, we want to translate it using subsequent
6101 entries. */
6102 placeholder_list = TREE_CHAIN (placeholder_expr);
6103 temp = expand_expr (object, original_target, tmode,
6104 ro_modifier);
6105 placeholder_list = old_list;
6106 return temp;
6111 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6112 abort ();
6114 case WITH_RECORD_EXPR:
6115 /* Put the object on the placeholder list, expand our first operand,
6116 and pop the list. */
6117 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6118 placeholder_list);
6119 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
6120 tmode, ro_modifier);
6121 placeholder_list = TREE_CHAIN (placeholder_list);
6122 return target;
6124 case GOTO_EXPR:
6125 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6126 expand_goto (TREE_OPERAND (exp, 0));
6127 else
6128 expand_computed_goto (TREE_OPERAND (exp, 0));
6129 return const0_rtx;
6131 case EXIT_EXPR:
6132 expand_exit_loop_if_false (NULL_PTR,
6133 invert_truthvalue (TREE_OPERAND (exp, 0)));
6134 return const0_rtx;
6136 case LABELED_BLOCK_EXPR:
6137 if (LABELED_BLOCK_BODY (exp))
6138 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6139 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6140 return const0_rtx;
6142 case EXIT_BLOCK_EXPR:
6143 if (EXIT_BLOCK_RETURN (exp))
6144 sorry ("returned value in block_exit_expr");
6145 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6146 return const0_rtx;
6148 case LOOP_EXPR:
6149 push_temp_slots ();
6150 expand_start_loop (1);
6151 expand_expr_stmt (TREE_OPERAND (exp, 0));
6152 expand_end_loop ();
6153 pop_temp_slots ();
6155 return const0_rtx;
6157 case BIND_EXPR:
6159 tree vars = TREE_OPERAND (exp, 0);
6160 int vars_need_expansion = 0;
6162 /* Need to open a binding contour here because
6163 if there are any cleanups they must be contained here. */
6164 expand_start_bindings (2);
6166 /* Mark the corresponding BLOCK for output in its proper place. */
6167 if (TREE_OPERAND (exp, 2) != 0
6168 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6169 insert_block (TREE_OPERAND (exp, 2));
6171 /* If VARS have not yet been expanded, expand them now. */
6172 while (vars)
6174 if (DECL_RTL (vars) == 0)
6176 vars_need_expansion = 1;
6177 expand_decl (vars);
6179 expand_decl_init (vars);
6180 vars = TREE_CHAIN (vars);
6183 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6185 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6187 return temp;
6190 case RTL_EXPR:
6191 if (RTL_EXPR_SEQUENCE (exp))
6193 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6194 abort ();
6195 emit_insns (RTL_EXPR_SEQUENCE (exp));
6196 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6198 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6199 free_temps_for_rtl_expr (exp);
6200 return RTL_EXPR_RTL (exp);
6202 case CONSTRUCTOR:
6203 /* If we don't need the result, just ensure we evaluate any
6204 subexpressions. */
6205 if (ignore)
6207 tree elt;
6208 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6209 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6210 EXPAND_MEMORY_USE_BAD);
6211 return const0_rtx;
6214 /* All elts simple constants => refer to a constant in memory. But
6215 if this is a non-BLKmode mode, let it store a field at a time
6216 since that should make a CONST_INT or CONST_DOUBLE when we
6217 fold. Likewise, if we have a target we can use, it is best to
6218 store directly into the target unless the type is large enough
6219 that memcpy will be used. If we are making an initializer and
6220 all operands are constant, put it in memory as well. */
6221 else if ((TREE_STATIC (exp)
6222 && ((mode == BLKmode
6223 && ! (target != 0 && safe_from_p (target, exp, 1)))
6224 || TREE_ADDRESSABLE (exp)
6225 || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
6226 && (!MOVE_BY_PIECES_P
6227 (TREE_INT_CST_LOW (TYPE_SIZE (type))/BITS_PER_UNIT,
6228 TYPE_ALIGN (type) / BITS_PER_UNIT))
6229 && ! mostly_zeros_p (exp))))
6230 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6232 rtx constructor = output_constant_def (exp);
6233 if (modifier != EXPAND_CONST_ADDRESS
6234 && modifier != EXPAND_INITIALIZER
6235 && modifier != EXPAND_SUM
6236 && (! memory_address_p (GET_MODE (constructor),
6237 XEXP (constructor, 0))
6238 || (flag_force_addr
6239 && GET_CODE (XEXP (constructor, 0)) != REG)))
6240 constructor = change_address (constructor, VOIDmode,
6241 XEXP (constructor, 0));
6242 return constructor;
6245 else
6247 /* Handle calls that pass values in multiple non-contiguous
6248 locations. The Irix 6 ABI has examples of this. */
6249 if (target == 0 || ! safe_from_p (target, exp, 1)
6250 || GET_CODE (target) == PARALLEL)
6252 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
6253 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6254 else
6255 target = assign_temp (type, 0, 1, 1);
6258 if (TREE_READONLY (exp))
6260 if (GET_CODE (target) == MEM)
6261 target = copy_rtx (target);
6263 RTX_UNCHANGING_P (target) = 1;
6266 store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0);
6267 return target;
6270 case INDIRECT_REF:
6272 tree exp1 = TREE_OPERAND (exp, 0);
6273 tree exp2;
6274 tree index;
6275 tree string = string_constant (exp1, &index);
6276 int i;
6278 /* Try to optimize reads from const strings. */
6279 if (string
6280 && TREE_CODE (string) == STRING_CST
6281 && TREE_CODE (index) == INTEGER_CST
6282 && !TREE_INT_CST_HIGH (index)
6283 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (string)
6284 && GET_MODE_CLASS (mode) == MODE_INT
6285 && GET_MODE_SIZE (mode) == 1
6286 && modifier != EXPAND_MEMORY_USE_WO)
6287 return GEN_INT (TREE_STRING_POINTER (string)[i]);
6289 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6290 op0 = memory_address (mode, op0);
6292 if (current_function && current_function_check_memory_usage
6293 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6295 enum memory_use_mode memory_usage;
6296 memory_usage = get_memory_usage_from_modifier (modifier);
6298 if (memory_usage != MEMORY_USE_DONT)
6300 in_check_memory_usage = 1;
6301 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6302 op0, Pmode,
6303 GEN_INT (int_size_in_bytes (type)),
6304 TYPE_MODE (sizetype),
6305 GEN_INT (memory_usage),
6306 TYPE_MODE (integer_type_node));
6307 in_check_memory_usage = 0;
6311 temp = gen_rtx_MEM (mode, op0);
6312 /* If address was computed by addition,
6313 mark this as an element of an aggregate. */
6314 if (TREE_CODE (exp1) == PLUS_EXPR
6315 || (TREE_CODE (exp1) == SAVE_EXPR
6316 && TREE_CODE (TREE_OPERAND (exp1, 0)) == PLUS_EXPR)
6317 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
6318 || (TREE_CODE (exp1) == ADDR_EXPR
6319 && (exp2 = TREE_OPERAND (exp1, 0))
6320 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
6321 MEM_SET_IN_STRUCT_P (temp, 1);
6323 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
6324 MEM_ALIAS_SET (temp) = get_alias_set (exp);
6326 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6327 here, because, in C and C++, the fact that a location is accessed
6328 through a pointer to const does not mean that the value there can
6329 never change. Languages where it can never change should
6330 also set TREE_STATIC. */
6331 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
6332 return temp;
6335 case ARRAY_REF:
6336 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6337 abort ();
6340 tree array = TREE_OPERAND (exp, 0);
6341 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6342 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6343 tree index = TREE_OPERAND (exp, 1);
6344 tree index_type = TREE_TYPE (index);
6345 HOST_WIDE_INT i;
6347 /* Optimize the special-case of a zero lower bound.
6349 We convert the low_bound to sizetype to avoid some problems
6350 with constant folding. (E.g. suppose the lower bound is 1,
6351 and its mode is QI. Without the conversion, (ARRAY
6352 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6353 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
6355 But sizetype isn't quite right either (especially if
6356 the lowbound is negative). FIXME */
6358 if (! integer_zerop (low_bound))
6359 index = fold (build (MINUS_EXPR, index_type, index,
6360 convert (sizetype, low_bound)));
6362 /* Fold an expression like: "foo"[2].
6363 This is not done in fold so it won't happen inside &.
6364 Don't fold if this is for wide characters since it's too
6365 difficult to do correctly and this is a very rare case. */
6367 if (TREE_CODE (array) == STRING_CST
6368 && TREE_CODE (index) == INTEGER_CST
6369 && !TREE_INT_CST_HIGH (index)
6370 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
6371 && GET_MODE_CLASS (mode) == MODE_INT
6372 && GET_MODE_SIZE (mode) == 1)
6373 return GEN_INT (TREE_STRING_POINTER (array)[i]);
6375 /* If this is a constant index into a constant array,
6376 just get the value from the array. Handle both the cases when
6377 we have an explicit constructor and when our operand is a variable
6378 that was declared const. */
6380 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
6382 if (TREE_CODE (index) == INTEGER_CST
6383 && TREE_INT_CST_HIGH (index) == 0)
6385 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
6387 i = TREE_INT_CST_LOW (index);
6388 while (elem && i--)
6389 elem = TREE_CHAIN (elem);
6390 if (elem)
6391 return expand_expr (fold (TREE_VALUE (elem)), target,
6392 tmode, ro_modifier);
6396 else if (optimize >= 1
6397 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6398 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6399 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6401 if (TREE_CODE (index) == INTEGER_CST)
6403 tree init = DECL_INITIAL (array);
6405 i = TREE_INT_CST_LOW (index);
6406 if (TREE_CODE (init) == CONSTRUCTOR)
6408 tree elem = CONSTRUCTOR_ELTS (init);
6410 while (elem
6411 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
6412 elem = TREE_CHAIN (elem);
6413 if (elem)
6414 return expand_expr (fold (TREE_VALUE (elem)), target,
6415 tmode, ro_modifier);
6417 else if (TREE_CODE (init) == STRING_CST
6418 && TREE_INT_CST_HIGH (index) == 0
6419 && (TREE_INT_CST_LOW (index)
6420 < TREE_STRING_LENGTH (init)))
6421 return (GEN_INT
6422 (TREE_STRING_POINTER
6423 (init)[TREE_INT_CST_LOW (index)]));
6428 /* ... fall through ... */
6430 case COMPONENT_REF:
6431 case BIT_FIELD_REF:
6432 /* If the operand is a CONSTRUCTOR, we can just extract the
6433 appropriate field if it is present. Don't do this if we have
6434 already written the data since we want to refer to that copy
6435 and varasm.c assumes that's what we'll do. */
6436 if (code != ARRAY_REF
6437 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6438 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6440 tree elt;
6442 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6443 elt = TREE_CHAIN (elt))
6444 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6445 /* We can normally use the value of the field in the
6446 CONSTRUCTOR. However, if this is a bitfield in
6447 an integral mode that we can fit in a HOST_WIDE_INT,
6448 we must mask only the number of bits in the bitfield,
6449 since this is done implicitly by the constructor. If
6450 the bitfield does not meet either of those conditions,
6451 we can't do this optimization. */
6452 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6453 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6454 == MODE_INT)
6455 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6456 <= HOST_BITS_PER_WIDE_INT))))
6458 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6459 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6461 int bitsize = DECL_FIELD_SIZE (TREE_PURPOSE (elt));
6463 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6465 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6466 op0 = expand_and (op0, op1, target);
6468 else
6470 enum machine_mode imode
6471 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6472 tree count
6473 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6476 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6477 target, 0);
6478 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6479 target, 0);
6483 return op0;
6488 enum machine_mode mode1;
6489 int bitsize;
6490 int bitpos;
6491 tree offset;
6492 int volatilep = 0;
6493 int alignment;
6494 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6495 &mode1, &unsignedp, &volatilep,
6496 &alignment);
6498 /* If we got back the original object, something is wrong. Perhaps
6499 we are evaluating an expression too early. In any event, don't
6500 infinitely recurse. */
6501 if (tem == exp)
6502 abort ();
6504 /* If TEM's type is a union of variable size, pass TARGET to the inner
6505 computation, since it will need a temporary and TARGET is known
6506 to have to do. This occurs in unchecked conversion in Ada. */
6508 op0 = expand_expr (tem,
6509 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6510 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6511 != INTEGER_CST)
6512 ? target : NULL_RTX),
6513 VOIDmode,
6514 modifier == EXPAND_INITIALIZER
6515 ? modifier : EXPAND_NORMAL);
6517 /* If this is a constant, put it into a register if it is a
6518 legitimate constant and memory if it isn't. */
6519 if (CONSTANT_P (op0))
6521 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6522 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
6523 op0 = force_reg (mode, op0);
6524 else
6525 op0 = validize_mem (force_const_mem (mode, op0));
6528 if (offset != 0)
6530 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
6532 if (GET_CODE (op0) != MEM)
6533 abort ();
6535 if (GET_MODE (offset_rtx) != ptr_mode)
6537 #ifdef POINTERS_EXTEND_UNSIGNED
6538 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
6539 #else
6540 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6541 #endif
6544 /* A constant address in TO_RTX can have VOIDmode, we must not try
6545 to call force_reg for that case. Avoid that case. */
6546 if (GET_CODE (op0) == MEM
6547 && GET_MODE (op0) == BLKmode
6548 && GET_MODE (XEXP (op0, 0)) != VOIDmode
6549 && bitsize
6550 && (bitpos % bitsize) == 0
6551 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6552 && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
6554 rtx temp = change_address (op0, mode1,
6555 plus_constant (XEXP (op0, 0),
6556 (bitpos /
6557 BITS_PER_UNIT)));
6558 if (GET_CODE (XEXP (temp, 0)) == REG)
6559 op0 = temp;
6560 else
6561 op0 = change_address (op0, mode1,
6562 force_reg (GET_MODE (XEXP (temp, 0)),
6563 XEXP (temp, 0)));
6564 bitpos = 0;
6568 op0 = change_address (op0, VOIDmode,
6569 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
6570 force_reg (ptr_mode,
6571 offset_rtx)));
6574 /* Don't forget about volatility even if this is a bitfield. */
6575 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6577 op0 = copy_rtx (op0);
6578 MEM_VOLATILE_P (op0) = 1;
6581 /* Check the access. */
6582 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
6584 enum memory_use_mode memory_usage;
6585 memory_usage = get_memory_usage_from_modifier (modifier);
6587 if (memory_usage != MEMORY_USE_DONT)
6589 rtx to;
6590 int size;
6592 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
6593 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
6595 /* Check the access right of the pointer. */
6596 if (size > BITS_PER_UNIT)
6597 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6598 to, Pmode,
6599 GEN_INT (size / BITS_PER_UNIT),
6600 TYPE_MODE (sizetype),
6601 GEN_INT (memory_usage),
6602 TYPE_MODE (integer_type_node));
6606 /* In cases where an aligned union has an unaligned object
6607 as a field, we might be extracting a BLKmode value from
6608 an integer-mode (e.g., SImode) object. Handle this case
6609 by doing the extract into an object as wide as the field
6610 (which we know to be the width of a basic mode), then
6611 storing into memory, and changing the mode to BLKmode.
6612 If we ultimately want the address (EXPAND_CONST_ADDRESS or
6613 EXPAND_INITIALIZER), then we must not copy to a temporary. */
6614 if (mode1 == VOIDmode
6615 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6616 || (modifier != EXPAND_CONST_ADDRESS
6617 && modifier != EXPAND_INITIALIZER
6618 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
6619 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6620 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6621 /* If the field isn't aligned enough to fetch as a memref,
6622 fetch it as a bit field. */
6623 || (SLOW_UNALIGNED_ACCESS
6624 && ((TYPE_ALIGN (TREE_TYPE (tem)) < (unsigned int) GET_MODE_ALIGNMENT (mode))
6625 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))))))
6627 enum machine_mode ext_mode = mode;
6629 if (ext_mode == BLKmode)
6630 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6632 if (ext_mode == BLKmode)
6634 /* In this case, BITPOS must start at a byte boundary and
6635 TARGET, if specified, must be a MEM. */
6636 if (GET_CODE (op0) != MEM
6637 || (target != 0 && GET_CODE (target) != MEM)
6638 || bitpos % BITS_PER_UNIT != 0)
6639 abort ();
6641 op0 = change_address (op0, VOIDmode,
6642 plus_constant (XEXP (op0, 0),
6643 bitpos / BITS_PER_UNIT));
6644 if (target == 0)
6645 target = assign_temp (type, 0, 1, 1);
6647 emit_block_move (target, op0,
6648 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6649 / BITS_PER_UNIT),
6652 return target;
6655 op0 = validize_mem (op0);
6657 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
6658 mark_reg_pointer (XEXP (op0, 0), alignment);
6660 op0 = extract_bit_field (op0, bitsize, bitpos,
6661 unsignedp, target, ext_mode, ext_mode,
6662 alignment,
6663 int_size_in_bytes (TREE_TYPE (tem)));
6665 /* If the result is a record type and BITSIZE is narrower than
6666 the mode of OP0, an integral mode, and this is a big endian
6667 machine, we must put the field into the high-order bits. */
6668 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
6669 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
6670 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
6671 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
6672 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
6673 - bitsize),
6674 op0, 1);
6676 if (mode == BLKmode)
6678 rtx new = assign_stack_temp (ext_mode,
6679 bitsize / BITS_PER_UNIT, 0);
6681 emit_move_insn (new, op0);
6682 op0 = copy_rtx (new);
6683 PUT_MODE (op0, BLKmode);
6684 MEM_SET_IN_STRUCT_P (op0, 1);
6687 return op0;
6690 /* If the result is BLKmode, use that to access the object
6691 now as well. */
6692 if (mode == BLKmode)
6693 mode1 = BLKmode;
6695 /* Get a reference to just this component. */
6696 if (modifier == EXPAND_CONST_ADDRESS
6697 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6698 op0 = gen_rtx_MEM (mode1, plus_constant (XEXP (op0, 0),
6699 (bitpos / BITS_PER_UNIT)));
6700 else
6701 op0 = change_address (op0, mode1,
6702 plus_constant (XEXP (op0, 0),
6703 (bitpos / BITS_PER_UNIT)));
6705 if (GET_CODE (op0) == MEM)
6706 MEM_ALIAS_SET (op0) = get_alias_set (exp);
6708 if (GET_CODE (XEXP (op0, 0)) == REG)
6709 mark_reg_pointer (XEXP (op0, 0), alignment);
6711 MEM_SET_IN_STRUCT_P (op0, 1);
6712 MEM_VOLATILE_P (op0) |= volatilep;
6713 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
6714 || modifier == EXPAND_CONST_ADDRESS
6715 || modifier == EXPAND_INITIALIZER)
6716 return op0;
6717 else if (target == 0)
6718 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6720 convert_move (target, op0, unsignedp);
6721 return target;
6724 /* Intended for a reference to a buffer of a file-object in Pascal.
6725 But it's not certain that a special tree code will really be
6726 necessary for these. INDIRECT_REF might work for them. */
6727 case BUFFER_REF:
6728 abort ();
6730 case IN_EXPR:
6732 /* Pascal set IN expression.
6734 Algorithm:
6735 rlo = set_low - (set_low%bits_per_word);
6736 the_word = set [ (index - rlo)/bits_per_word ];
6737 bit_index = index % bits_per_word;
6738 bitmask = 1 << bit_index;
6739 return !!(the_word & bitmask); */
6741 tree set = TREE_OPERAND (exp, 0);
6742 tree index = TREE_OPERAND (exp, 1);
6743 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
6744 tree set_type = TREE_TYPE (set);
6745 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
6746 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
6747 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
6748 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
6749 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
6750 rtx setval = expand_expr (set, 0, VOIDmode, 0);
6751 rtx setaddr = XEXP (setval, 0);
6752 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
6753 rtx rlow;
6754 rtx diff, quo, rem, addr, bit, result;
6756 preexpand_calls (exp);
6758 /* If domain is empty, answer is no. Likewise if index is constant
6759 and out of bounds. */
6760 if (((TREE_CODE (set_high_bound) == INTEGER_CST
6761 && TREE_CODE (set_low_bound) == INTEGER_CST
6762 && tree_int_cst_lt (set_high_bound, set_low_bound))
6763 || (TREE_CODE (index) == INTEGER_CST
6764 && TREE_CODE (set_low_bound) == INTEGER_CST
6765 && tree_int_cst_lt (index, set_low_bound))
6766 || (TREE_CODE (set_high_bound) == INTEGER_CST
6767 && TREE_CODE (index) == INTEGER_CST
6768 && tree_int_cst_lt (set_high_bound, index))))
6769 return const0_rtx;
6771 if (target == 0)
6772 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6774 /* If we get here, we have to generate the code for both cases
6775 (in range and out of range). */
6777 op0 = gen_label_rtx ();
6778 op1 = gen_label_rtx ();
6780 if (! (GET_CODE (index_val) == CONST_INT
6781 && GET_CODE (lo_r) == CONST_INT))
6783 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
6784 GET_MODE (index_val), iunsignedp, 0, op1);
6787 if (! (GET_CODE (index_val) == CONST_INT
6788 && GET_CODE (hi_r) == CONST_INT))
6790 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
6791 GET_MODE (index_val), iunsignedp, 0, op1);
6794 /* Calculate the element number of bit zero in the first word
6795 of the set. */
6796 if (GET_CODE (lo_r) == CONST_INT)
6797 rlow = GEN_INT (INTVAL (lo_r)
6798 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
6799 else
6800 rlow = expand_binop (index_mode, and_optab, lo_r,
6801 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
6802 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6804 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
6805 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6807 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
6808 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6809 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
6810 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6812 addr = memory_address (byte_mode,
6813 expand_binop (index_mode, add_optab, diff,
6814 setaddr, NULL_RTX, iunsignedp,
6815 OPTAB_LIB_WIDEN));
6817 /* Extract the bit we want to examine */
6818 bit = expand_shift (RSHIFT_EXPR, byte_mode,
6819 gen_rtx_MEM (byte_mode, addr),
6820 make_tree (TREE_TYPE (index), rem),
6821 NULL_RTX, 1);
6822 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
6823 GET_MODE (target) == byte_mode ? target : 0,
6824 1, OPTAB_LIB_WIDEN);
6826 if (result != target)
6827 convert_move (target, result, 1);
6829 /* Output the code to handle the out-of-range case. */
6830 emit_jump (op0);
6831 emit_label (op1);
6832 emit_move_insn (target, const0_rtx);
6833 emit_label (op0);
6834 return target;
6837 case WITH_CLEANUP_EXPR:
6838 if (RTL_EXPR_RTL (exp) == 0)
6840 RTL_EXPR_RTL (exp)
6841 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
6842 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
6844 /* That's it for this cleanup. */
6845 TREE_OPERAND (exp, 2) = 0;
6847 return RTL_EXPR_RTL (exp);
6849 case CLEANUP_POINT_EXPR:
6851 /* Start a new binding layer that will keep track of all cleanup
6852 actions to be performed. */
6853 expand_start_bindings (2);
6855 target_temp_slot_level = temp_slot_level;
6857 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
6858 /* If we're going to use this value, load it up now. */
6859 if (! ignore)
6860 op0 = force_not_mem (op0);
6861 preserve_temp_slots (op0);
6862 expand_end_bindings (NULL_TREE, 0, 0);
6864 return op0;
6866 case CALL_EXPR:
6867 /* Check for a built-in function. */
6868 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
6869 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6870 == FUNCTION_DECL)
6871 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6872 return expand_builtin (exp, target, subtarget, tmode, ignore);
6874 /* If this call was expanded already by preexpand_calls,
6875 just return the result we got. */
6876 if (CALL_EXPR_RTL (exp) != 0)
6877 return CALL_EXPR_RTL (exp);
6879 return expand_call (exp, target, ignore);
6881 case NON_LVALUE_EXPR:
6882 case NOP_EXPR:
6883 case CONVERT_EXPR:
6884 case REFERENCE_EXPR:
6885 if (TREE_CODE (type) == UNION_TYPE)
6887 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
6888 if (target == 0)
6890 if (mode != BLKmode)
6891 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6892 else
6893 target = assign_temp (type, 0, 1, 1);
6896 if (GET_CODE (target) == MEM)
6897 /* Store data into beginning of memory target. */
6898 store_expr (TREE_OPERAND (exp, 0),
6899 change_address (target, TYPE_MODE (valtype), 0), 0);
6901 else if (GET_CODE (target) == REG)
6902 /* Store this field into a union of the proper type. */
6903 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
6904 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
6905 VOIDmode, 0, 1,
6906 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))),
6908 else
6909 abort ();
6911 /* Return the entire union. */
6912 return target;
6915 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6917 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
6918 ro_modifier);
6920 /* If the signedness of the conversion differs and OP0 is
6921 a promoted SUBREG, clear that indication since we now
6922 have to do the proper extension. */
6923 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
6924 && GET_CODE (op0) == SUBREG)
6925 SUBREG_PROMOTED_VAR_P (op0) = 0;
6927 return op0;
6930 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
6931 if (GET_MODE (op0) == mode)
6932 return op0;
6934 /* If OP0 is a constant, just convert it into the proper mode. */
6935 if (CONSTANT_P (op0))
6936 return
6937 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6938 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6940 if (modifier == EXPAND_INITIALIZER)
6941 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
6943 if (target == 0)
6944 return
6945 convert_to_mode (mode, op0,
6946 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6947 else
6948 convert_move (target, op0,
6949 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6950 return target;
6952 case PLUS_EXPR:
6953 /* We come here from MINUS_EXPR when the second operand is a
6954 constant. */
6955 plus_expr:
6956 this_optab = add_optab;
6958 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
6959 something else, make sure we add the register to the constant and
6960 then to the other thing. This case can occur during strength
6961 reduction and doing it this way will produce better code if the
6962 frame pointer or argument pointer is eliminated.
6964 fold-const.c will ensure that the constant is always in the inner
6965 PLUS_EXPR, so the only case we need to do anything about is if
6966 sp, ap, or fp is our second argument, in which case we must swap
6967 the innermost first argument and our second argument. */
6969 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
6970 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
6971 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
6972 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
6973 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
6974 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
6976 tree t = TREE_OPERAND (exp, 1);
6978 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6979 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
6982 /* If the result is to be ptr_mode and we are adding an integer to
6983 something, we might be forming a constant. So try to use
6984 plus_constant. If it produces a sum and we can't accept it,
6985 use force_operand. This allows P = &ARR[const] to generate
6986 efficient code on machines where a SYMBOL_REF is not a valid
6987 address.
6989 If this is an EXPAND_SUM call, always return the sum. */
6990 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
6991 || mode == ptr_mode)
6993 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
6994 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
6995 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
6997 rtx constant_part;
6999 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7000 EXPAND_SUM);
7001 /* Use immed_double_const to ensure that the constant is
7002 truncated according to the mode of OP1, then sign extended
7003 to a HOST_WIDE_INT. Using the constant directly can result
7004 in non-canonical RTL in a 64x32 cross compile. */
7005 constant_part
7006 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7007 (HOST_WIDE_INT) 0,
7008 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7009 op1 = plus_constant (op1, INTVAL (constant_part));
7010 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7011 op1 = force_operand (op1, target);
7012 return op1;
7015 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7016 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7017 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7019 rtx constant_part;
7021 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7022 EXPAND_SUM);
7023 if (! CONSTANT_P (op0))
7025 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7026 VOIDmode, modifier);
7027 /* Don't go to both_summands if modifier
7028 says it's not right to return a PLUS. */
7029 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7030 goto binop2;
7031 goto both_summands;
7033 /* Use immed_double_const to ensure that the constant is
7034 truncated according to the mode of OP1, then sign extended
7035 to a HOST_WIDE_INT. Using the constant directly can result
7036 in non-canonical RTL in a 64x32 cross compile. */
7037 constant_part
7038 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7039 (HOST_WIDE_INT) 0,
7040 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7041 op0 = plus_constant (op0, INTVAL (constant_part));
7042 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7043 op0 = force_operand (op0, target);
7044 return op0;
7048 /* No sense saving up arithmetic to be done
7049 if it's all in the wrong mode to form part of an address.
7050 And force_operand won't know whether to sign-extend or
7051 zero-extend. */
7052 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7053 || mode != ptr_mode)
7054 goto binop;
7056 preexpand_calls (exp);
7057 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7058 subtarget = 0;
7060 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7061 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
7063 both_summands:
7064 /* Make sure any term that's a sum with a constant comes last. */
7065 if (GET_CODE (op0) == PLUS
7066 && CONSTANT_P (XEXP (op0, 1)))
7068 temp = op0;
7069 op0 = op1;
7070 op1 = temp;
7072 /* If adding to a sum including a constant,
7073 associate it to put the constant outside. */
7074 if (GET_CODE (op1) == PLUS
7075 && CONSTANT_P (XEXP (op1, 1)))
7077 rtx constant_term = const0_rtx;
7079 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7080 if (temp != 0)
7081 op0 = temp;
7082 /* Ensure that MULT comes first if there is one. */
7083 else if (GET_CODE (op0) == MULT)
7084 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7085 else
7086 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7088 /* Let's also eliminate constants from op0 if possible. */
7089 op0 = eliminate_constant_term (op0, &constant_term);
7091 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7092 their sum should be a constant. Form it into OP1, since the
7093 result we want will then be OP0 + OP1. */
7095 temp = simplify_binary_operation (PLUS, mode, constant_term,
7096 XEXP (op1, 1));
7097 if (temp != 0)
7098 op1 = temp;
7099 else
7100 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7103 /* Put a constant term last and put a multiplication first. */
7104 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7105 temp = op1, op1 = op0, op0 = temp;
7107 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7108 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7110 case MINUS_EXPR:
7111 /* For initializers, we are allowed to return a MINUS of two
7112 symbolic constants. Here we handle all cases when both operands
7113 are constant. */
7114 /* Handle difference of two symbolic constants,
7115 for the sake of an initializer. */
7116 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7117 && really_constant_p (TREE_OPERAND (exp, 0))
7118 && really_constant_p (TREE_OPERAND (exp, 1)))
7120 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
7121 VOIDmode, ro_modifier);
7122 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7123 VOIDmode, ro_modifier);
7125 /* If the last operand is a CONST_INT, use plus_constant of
7126 the negated constant. Else make the MINUS. */
7127 if (GET_CODE (op1) == CONST_INT)
7128 return plus_constant (op0, - INTVAL (op1));
7129 else
7130 return gen_rtx_MINUS (mode, op0, op1);
7132 /* Convert A - const to A + (-const). */
7133 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7135 tree negated = fold (build1 (NEGATE_EXPR, type,
7136 TREE_OPERAND (exp, 1)));
7138 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7139 /* If we can't negate the constant in TYPE, leave it alone and
7140 expand_binop will negate it for us. We used to try to do it
7141 here in the signed version of TYPE, but that doesn't work
7142 on POINTER_TYPEs. */;
7143 else
7145 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7146 goto plus_expr;
7149 this_optab = sub_optab;
7150 goto binop;
7152 case MULT_EXPR:
7153 preexpand_calls (exp);
7154 /* If first operand is constant, swap them.
7155 Thus the following special case checks need only
7156 check the second operand. */
7157 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7159 register tree t1 = TREE_OPERAND (exp, 0);
7160 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7161 TREE_OPERAND (exp, 1) = t1;
7164 /* Attempt to return something suitable for generating an
7165 indexed address, for machines that support that. */
7167 if (modifier == EXPAND_SUM && mode == ptr_mode
7168 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7169 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7171 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7172 EXPAND_SUM);
7174 /* Apply distributive law if OP0 is x+c. */
7175 if (GET_CODE (op0) == PLUS
7176 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7177 return
7178 gen_rtx_PLUS
7179 (mode,
7180 gen_rtx_MULT
7181 (mode, XEXP (op0, 0),
7182 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7183 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7184 * INTVAL (XEXP (op0, 1))));
7186 if (GET_CODE (op0) != REG)
7187 op0 = force_operand (op0, NULL_RTX);
7188 if (GET_CODE (op0) != REG)
7189 op0 = copy_to_mode_reg (mode, op0);
7191 return
7192 gen_rtx_MULT (mode, op0,
7193 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7196 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7197 subtarget = 0;
7199 /* Check for multiplying things that have been extended
7200 from a narrower type. If this machine supports multiplying
7201 in that narrower type with a result in the desired type,
7202 do it that way, and avoid the explicit type-conversion. */
7203 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7204 && TREE_CODE (type) == INTEGER_TYPE
7205 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7206 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7207 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7208 && int_fits_type_p (TREE_OPERAND (exp, 1),
7209 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7210 /* Don't use a widening multiply if a shift will do. */
7211 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7212 > HOST_BITS_PER_WIDE_INT)
7213 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7215 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7216 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7218 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7219 /* If both operands are extended, they must either both
7220 be zero-extended or both be sign-extended. */
7221 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7223 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7225 enum machine_mode innermode
7226 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7227 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7228 ? smul_widen_optab : umul_widen_optab);
7229 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7230 ? umul_widen_optab : smul_widen_optab);
7231 if (mode == GET_MODE_WIDER_MODE (innermode))
7233 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7235 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7236 NULL_RTX, VOIDmode, 0);
7237 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7238 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7239 VOIDmode, 0);
7240 else
7241 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7242 NULL_RTX, VOIDmode, 0);
7243 goto binop2;
7245 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7246 && innermode == word_mode)
7248 rtx htem;
7249 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7250 NULL_RTX, VOIDmode, 0);
7251 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7252 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7253 VOIDmode, 0);
7254 else
7255 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7256 NULL_RTX, VOIDmode, 0);
7257 temp = expand_binop (mode, other_optab, op0, op1, target,
7258 unsignedp, OPTAB_LIB_WIDEN);
7259 htem = expand_mult_highpart_adjust (innermode,
7260 gen_highpart (innermode, temp),
7261 op0, op1,
7262 gen_highpart (innermode, temp),
7263 unsignedp);
7264 emit_move_insn (gen_highpart (innermode, temp), htem);
7265 return temp;
7269 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7270 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7271 return expand_mult (mode, op0, op1, target, unsignedp);
7273 case TRUNC_DIV_EXPR:
7274 case FLOOR_DIV_EXPR:
7275 case CEIL_DIV_EXPR:
7276 case ROUND_DIV_EXPR:
7277 case EXACT_DIV_EXPR:
7278 preexpand_calls (exp);
7279 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7280 subtarget = 0;
7281 /* Possible optimization: compute the dividend with EXPAND_SUM
7282 then if the divisor is constant can optimize the case
7283 where some terms of the dividend have coeffs divisible by it. */
7284 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7285 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7286 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7288 case RDIV_EXPR:
7289 this_optab = flodiv_optab;
7290 goto binop;
7292 case TRUNC_MOD_EXPR:
7293 case FLOOR_MOD_EXPR:
7294 case CEIL_MOD_EXPR:
7295 case ROUND_MOD_EXPR:
7296 preexpand_calls (exp);
7297 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7298 subtarget = 0;
7299 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7300 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7301 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7303 case FIX_ROUND_EXPR:
7304 case FIX_FLOOR_EXPR:
7305 case FIX_CEIL_EXPR:
7306 abort (); /* Not used for C. */
7308 case FIX_TRUNC_EXPR:
7309 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7310 if (target == 0)
7311 target = gen_reg_rtx (mode);
7312 expand_fix (target, op0, unsignedp);
7313 return target;
7315 case FLOAT_EXPR:
7316 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7317 if (target == 0)
7318 target = gen_reg_rtx (mode);
7319 /* expand_float can't figure out what to do if FROM has VOIDmode.
7320 So give it the correct mode. With -O, cse will optimize this. */
7321 if (GET_MODE (op0) == VOIDmode)
7322 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7323 op0);
7324 expand_float (target, op0,
7325 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7326 return target;
7328 case NEGATE_EXPR:
7329 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7330 temp = expand_unop (mode, neg_optab, op0, target, 0);
7331 if (temp == 0)
7332 abort ();
7333 return temp;
7335 case ABS_EXPR:
7336 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7338 /* Handle complex values specially. */
7339 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7340 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7341 return expand_complex_abs (mode, op0, target, unsignedp);
7343 /* Unsigned abs is simply the operand. Testing here means we don't
7344 risk generating incorrect code below. */
7345 if (TREE_UNSIGNED (type))
7346 return op0;
7348 return expand_abs (mode, op0, target,
7349 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7351 case MAX_EXPR:
7352 case MIN_EXPR:
7353 target = original_target;
7354 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7355 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7356 || GET_MODE (target) != mode
7357 || (GET_CODE (target) == REG
7358 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7359 target = gen_reg_rtx (mode);
7360 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7361 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7363 /* First try to do it with a special MIN or MAX instruction.
7364 If that does not win, use a conditional jump to select the proper
7365 value. */
7366 this_optab = (TREE_UNSIGNED (type)
7367 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7368 : (code == MIN_EXPR ? smin_optab : smax_optab));
7370 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7371 OPTAB_WIDEN);
7372 if (temp != 0)
7373 return temp;
7375 /* At this point, a MEM target is no longer useful; we will get better
7376 code without it. */
7378 if (GET_CODE (target) == MEM)
7379 target = gen_reg_rtx (mode);
7381 if (target != op0)
7382 emit_move_insn (target, op0);
7384 op0 = gen_label_rtx ();
7386 /* If this mode is an integer too wide to compare properly,
7387 compare word by word. Rely on cse to optimize constant cases. */
7388 if (GET_MODE_CLASS (mode) == MODE_INT && ! can_compare_p (mode, ccp_jump))
7390 if (code == MAX_EXPR)
7391 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7392 target, op1, NULL_RTX, op0);
7393 else
7394 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7395 op1, target, NULL_RTX, op0);
7397 else
7399 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7400 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7401 unsignedp, mode, NULL_RTX, 0, NULL_RTX,
7402 op0);
7404 emit_move_insn (target, op1);
7405 emit_label (op0);
7406 return target;
7408 case BIT_NOT_EXPR:
7409 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7410 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7411 if (temp == 0)
7412 abort ();
7413 return temp;
7415 case FFS_EXPR:
7416 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7417 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7418 if (temp == 0)
7419 abort ();
7420 return temp;
7422 /* ??? Can optimize bitwise operations with one arg constant.
7423 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7424 and (a bitwise1 b) bitwise2 b (etc)
7425 but that is probably not worth while. */
7427 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7428 boolean values when we want in all cases to compute both of them. In
7429 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7430 as actual zero-or-1 values and then bitwise anding. In cases where
7431 there cannot be any side effects, better code would be made by
7432 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7433 how to recognize those cases. */
7435 case TRUTH_AND_EXPR:
7436 case BIT_AND_EXPR:
7437 this_optab = and_optab;
7438 goto binop;
7440 case TRUTH_OR_EXPR:
7441 case BIT_IOR_EXPR:
7442 this_optab = ior_optab;
7443 goto binop;
7445 case TRUTH_XOR_EXPR:
7446 case BIT_XOR_EXPR:
7447 this_optab = xor_optab;
7448 goto binop;
7450 case LSHIFT_EXPR:
7451 case RSHIFT_EXPR:
7452 case LROTATE_EXPR:
7453 case RROTATE_EXPR:
7454 preexpand_calls (exp);
7455 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7456 subtarget = 0;
7457 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7458 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7459 unsignedp);
7461 /* Could determine the answer when only additive constants differ. Also,
7462 the addition of one can be handled by changing the condition. */
7463 case LT_EXPR:
7464 case LE_EXPR:
7465 case GT_EXPR:
7466 case GE_EXPR:
7467 case EQ_EXPR:
7468 case NE_EXPR:
7469 preexpand_calls (exp);
7470 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7471 if (temp != 0)
7472 return temp;
7474 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7475 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7476 && original_target
7477 && GET_CODE (original_target) == REG
7478 && (GET_MODE (original_target)
7479 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7481 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7482 VOIDmode, 0);
7484 if (temp != original_target)
7485 temp = copy_to_reg (temp);
7487 op1 = gen_label_rtx ();
7488 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7489 GET_MODE (temp), unsignedp, 0, op1);
7490 emit_move_insn (temp, const1_rtx);
7491 emit_label (op1);
7492 return temp;
7495 /* If no set-flag instruction, must generate a conditional
7496 store into a temporary variable. Drop through
7497 and handle this like && and ||. */
7499 case TRUTH_ANDIF_EXPR:
7500 case TRUTH_ORIF_EXPR:
7501 if (! ignore
7502 && (target == 0 || ! safe_from_p (target, exp, 1)
7503 /* Make sure we don't have a hard reg (such as function's return
7504 value) live across basic blocks, if not optimizing. */
7505 || (!optimize && GET_CODE (target) == REG
7506 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7507 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7509 if (target)
7510 emit_clr_insn (target);
7512 op1 = gen_label_rtx ();
7513 jumpifnot (exp, op1);
7515 if (target)
7516 emit_0_to_1_insn (target);
7518 emit_label (op1);
7519 return ignore ? const0_rtx : target;
7521 case TRUTH_NOT_EXPR:
7522 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7523 /* The parser is careful to generate TRUTH_NOT_EXPR
7524 only with operands that are always zero or one. */
7525 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7526 target, 1, OPTAB_LIB_WIDEN);
7527 if (temp == 0)
7528 abort ();
7529 return temp;
7531 case COMPOUND_EXPR:
7532 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7533 emit_queue ();
7534 return expand_expr (TREE_OPERAND (exp, 1),
7535 (ignore ? const0_rtx : target),
7536 VOIDmode, 0);
7538 case COND_EXPR:
7539 /* If we would have a "singleton" (see below) were it not for a
7540 conversion in each arm, bring that conversion back out. */
7541 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7542 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
7543 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
7544 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
7546 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
7547 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
7549 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
7550 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7551 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
7552 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
7553 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
7554 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7555 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
7556 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
7557 return expand_expr (build1 (NOP_EXPR, type,
7558 build (COND_EXPR, TREE_TYPE (true),
7559 TREE_OPERAND (exp, 0),
7560 true, false)),
7561 target, tmode, modifier);
7565 /* Note that COND_EXPRs whose type is a structure or union
7566 are required to be constructed to contain assignments of
7567 a temporary variable, so that we can evaluate them here
7568 for side effect only. If type is void, we must do likewise. */
7570 /* If an arm of the branch requires a cleanup,
7571 only that cleanup is performed. */
7573 tree singleton = 0;
7574 tree binary_op = 0, unary_op = 0;
7576 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
7577 convert it to our mode, if necessary. */
7578 if (integer_onep (TREE_OPERAND (exp, 1))
7579 && integer_zerop (TREE_OPERAND (exp, 2))
7580 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7582 if (ignore)
7584 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
7585 ro_modifier);
7586 return const0_rtx;
7589 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
7590 if (GET_MODE (op0) == mode)
7591 return op0;
7593 if (target == 0)
7594 target = gen_reg_rtx (mode);
7595 convert_move (target, op0, unsignedp);
7596 return target;
7599 /* Check for X ? A + B : A. If we have this, we can copy A to the
7600 output and conditionally add B. Similarly for unary operations.
7601 Don't do this if X has side-effects because those side effects
7602 might affect A or B and the "?" operation is a sequence point in
7603 ANSI. (operand_equal_p tests for side effects.) */
7605 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
7606 && operand_equal_p (TREE_OPERAND (exp, 2),
7607 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7608 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
7609 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
7610 && operand_equal_p (TREE_OPERAND (exp, 1),
7611 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7612 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
7613 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
7614 && operand_equal_p (TREE_OPERAND (exp, 2),
7615 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7616 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
7617 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
7618 && operand_equal_p (TREE_OPERAND (exp, 1),
7619 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7620 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
7622 /* If we are not to produce a result, we have no target. Otherwise,
7623 if a target was specified use it; it will not be used as an
7624 intermediate target unless it is safe. If no target, use a
7625 temporary. */
7627 if (ignore)
7628 temp = 0;
7629 else if (original_target
7630 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
7631 || (singleton && GET_CODE (original_target) == REG
7632 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
7633 && original_target == var_rtx (singleton)))
7634 && GET_MODE (original_target) == mode
7635 #ifdef HAVE_conditional_move
7636 && (! can_conditionally_move_p (mode)
7637 || GET_CODE (original_target) == REG
7638 || TREE_ADDRESSABLE (type))
7639 #endif
7640 && ! (GET_CODE (original_target) == MEM
7641 && MEM_VOLATILE_P (original_target)))
7642 temp = original_target;
7643 else if (TREE_ADDRESSABLE (type))
7644 abort ();
7645 else
7646 temp = assign_temp (type, 0, 0, 1);
7648 /* If we had X ? A + C : A, with C a constant power of 2, and we can
7649 do the test of X as a store-flag operation, do this as
7650 A + ((X != 0) << log C). Similarly for other simple binary
7651 operators. Only do for C == 1 if BRANCH_COST is low. */
7652 if (temp && singleton && binary_op
7653 && (TREE_CODE (binary_op) == PLUS_EXPR
7654 || TREE_CODE (binary_op) == MINUS_EXPR
7655 || TREE_CODE (binary_op) == BIT_IOR_EXPR
7656 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
7657 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
7658 : integer_onep (TREE_OPERAND (binary_op, 1)))
7659 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7661 rtx result;
7662 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
7663 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
7664 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
7665 : xor_optab);
7667 /* If we had X ? A : A + 1, do this as A + (X == 0).
7669 We have to invert the truth value here and then put it
7670 back later if do_store_flag fails. We cannot simply copy
7671 TREE_OPERAND (exp, 0) to another variable and modify that
7672 because invert_truthvalue can modify the tree pointed to
7673 by its argument. */
7674 if (singleton == TREE_OPERAND (exp, 1))
7675 TREE_OPERAND (exp, 0)
7676 = invert_truthvalue (TREE_OPERAND (exp, 0));
7678 result = do_store_flag (TREE_OPERAND (exp, 0),
7679 (safe_from_p (temp, singleton, 1)
7680 ? temp : NULL_RTX),
7681 mode, BRANCH_COST <= 1);
7683 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
7684 result = expand_shift (LSHIFT_EXPR, mode, result,
7685 build_int_2 (tree_log2
7686 (TREE_OPERAND
7687 (binary_op, 1)),
7689 (safe_from_p (temp, singleton, 1)
7690 ? temp : NULL_RTX), 0);
7692 if (result)
7694 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
7695 return expand_binop (mode, boptab, op1, result, temp,
7696 unsignedp, OPTAB_LIB_WIDEN);
7698 else if (singleton == TREE_OPERAND (exp, 1))
7699 TREE_OPERAND (exp, 0)
7700 = invert_truthvalue (TREE_OPERAND (exp, 0));
7703 do_pending_stack_adjust ();
7704 NO_DEFER_POP;
7705 op0 = gen_label_rtx ();
7707 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
7709 if (temp != 0)
7711 /* If the target conflicts with the other operand of the
7712 binary op, we can't use it. Also, we can't use the target
7713 if it is a hard register, because evaluating the condition
7714 might clobber it. */
7715 if ((binary_op
7716 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
7717 || (GET_CODE (temp) == REG
7718 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
7719 temp = gen_reg_rtx (mode);
7720 store_expr (singleton, temp, 0);
7722 else
7723 expand_expr (singleton,
7724 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7725 if (singleton == TREE_OPERAND (exp, 1))
7726 jumpif (TREE_OPERAND (exp, 0), op0);
7727 else
7728 jumpifnot (TREE_OPERAND (exp, 0), op0);
7730 start_cleanup_deferral ();
7731 if (binary_op && temp == 0)
7732 /* Just touch the other operand. */
7733 expand_expr (TREE_OPERAND (binary_op, 1),
7734 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7735 else if (binary_op)
7736 store_expr (build (TREE_CODE (binary_op), type,
7737 make_tree (type, temp),
7738 TREE_OPERAND (binary_op, 1)),
7739 temp, 0);
7740 else
7741 store_expr (build1 (TREE_CODE (unary_op), type,
7742 make_tree (type, temp)),
7743 temp, 0);
7744 op1 = op0;
7746 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
7747 comparison operator. If we have one of these cases, set the
7748 output to A, branch on A (cse will merge these two references),
7749 then set the output to FOO. */
7750 else if (temp
7751 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7752 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7753 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7754 TREE_OPERAND (exp, 1), 0)
7755 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7756 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
7757 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
7759 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7760 temp = gen_reg_rtx (mode);
7761 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7762 jumpif (TREE_OPERAND (exp, 0), op0);
7764 start_cleanup_deferral ();
7765 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7766 op1 = op0;
7768 else if (temp
7769 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7770 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7771 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7772 TREE_OPERAND (exp, 2), 0)
7773 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7774 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
7775 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
7777 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7778 temp = gen_reg_rtx (mode);
7779 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7780 jumpifnot (TREE_OPERAND (exp, 0), op0);
7782 start_cleanup_deferral ();
7783 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7784 op1 = op0;
7786 else
7788 op1 = gen_label_rtx ();
7789 jumpifnot (TREE_OPERAND (exp, 0), op0);
7791 start_cleanup_deferral ();
7793 /* One branch of the cond can be void, if it never returns. For
7794 example A ? throw : E */
7795 if (temp != 0
7796 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
7797 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7798 else
7799 expand_expr (TREE_OPERAND (exp, 1),
7800 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7801 end_cleanup_deferral ();
7802 emit_queue ();
7803 emit_jump_insn (gen_jump (op1));
7804 emit_barrier ();
7805 emit_label (op0);
7806 start_cleanup_deferral ();
7807 if (temp != 0
7808 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
7809 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7810 else
7811 expand_expr (TREE_OPERAND (exp, 2),
7812 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7815 end_cleanup_deferral ();
7817 emit_queue ();
7818 emit_label (op1);
7819 OK_DEFER_POP;
7821 return temp;
7824 case TARGET_EXPR:
7826 /* Something needs to be initialized, but we didn't know
7827 where that thing was when building the tree. For example,
7828 it could be the return value of a function, or a parameter
7829 to a function which lays down in the stack, or a temporary
7830 variable which must be passed by reference.
7832 We guarantee that the expression will either be constructed
7833 or copied into our original target. */
7835 tree slot = TREE_OPERAND (exp, 0);
7836 tree cleanups = NULL_TREE;
7837 tree exp1;
7839 if (TREE_CODE (slot) != VAR_DECL)
7840 abort ();
7842 if (! ignore)
7843 target = original_target;
7845 /* Set this here so that if we get a target that refers to a
7846 register variable that's already been used, put_reg_into_stack
7847 knows that it should fix up those uses. */
7848 TREE_USED (slot) = 1;
7850 if (target == 0)
7852 if (DECL_RTL (slot) != 0)
7854 target = DECL_RTL (slot);
7855 /* If we have already expanded the slot, so don't do
7856 it again. (mrs) */
7857 if (TREE_OPERAND (exp, 1) == NULL_TREE)
7858 return target;
7860 else
7862 target = assign_temp (type, 2, 0, 1);
7863 /* All temp slots at this level must not conflict. */
7864 preserve_temp_slots (target);
7865 DECL_RTL (slot) = target;
7866 if (TREE_ADDRESSABLE (slot))
7868 TREE_ADDRESSABLE (slot) = 0;
7869 mark_addressable (slot);
7872 /* Since SLOT is not known to the called function
7873 to belong to its stack frame, we must build an explicit
7874 cleanup. This case occurs when we must build up a reference
7875 to pass the reference as an argument. In this case,
7876 it is very likely that such a reference need not be
7877 built here. */
7879 if (TREE_OPERAND (exp, 2) == 0)
7880 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
7881 cleanups = TREE_OPERAND (exp, 2);
7884 else
7886 /* This case does occur, when expanding a parameter which
7887 needs to be constructed on the stack. The target
7888 is the actual stack address that we want to initialize.
7889 The function we call will perform the cleanup in this case. */
7891 /* If we have already assigned it space, use that space,
7892 not target that we were passed in, as our target
7893 parameter is only a hint. */
7894 if (DECL_RTL (slot) != 0)
7896 target = DECL_RTL (slot);
7897 /* If we have already expanded the slot, so don't do
7898 it again. (mrs) */
7899 if (TREE_OPERAND (exp, 1) == NULL_TREE)
7900 return target;
7902 else
7904 DECL_RTL (slot) = target;
7905 /* If we must have an addressable slot, then make sure that
7906 the RTL that we just stored in slot is OK. */
7907 if (TREE_ADDRESSABLE (slot))
7909 TREE_ADDRESSABLE (slot) = 0;
7910 mark_addressable (slot);
7915 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
7916 /* Mark it as expanded. */
7917 TREE_OPERAND (exp, 1) = NULL_TREE;
7919 store_expr (exp1, target, 0);
7921 expand_decl_cleanup (NULL_TREE, cleanups);
7923 return target;
7926 case INIT_EXPR:
7928 tree lhs = TREE_OPERAND (exp, 0);
7929 tree rhs = TREE_OPERAND (exp, 1);
7930 tree noncopied_parts = 0;
7931 tree lhs_type = TREE_TYPE (lhs);
7933 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7934 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
7935 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
7936 TYPE_NONCOPIED_PARTS (lhs_type));
7937 while (noncopied_parts != 0)
7939 expand_assignment (TREE_VALUE (noncopied_parts),
7940 TREE_PURPOSE (noncopied_parts), 0, 0);
7941 noncopied_parts = TREE_CHAIN (noncopied_parts);
7943 return temp;
7946 case MODIFY_EXPR:
7948 /* If lhs is complex, expand calls in rhs before computing it.
7949 That's so we don't compute a pointer and save it over a call.
7950 If lhs is simple, compute it first so we can give it as a
7951 target if the rhs is just a call. This avoids an extra temp and copy
7952 and that prevents a partial-subsumption which makes bad code.
7953 Actually we could treat component_ref's of vars like vars. */
7955 tree lhs = TREE_OPERAND (exp, 0);
7956 tree rhs = TREE_OPERAND (exp, 1);
7957 tree noncopied_parts = 0;
7958 tree lhs_type = TREE_TYPE (lhs);
7960 temp = 0;
7962 if (TREE_CODE (lhs) != VAR_DECL
7963 && TREE_CODE (lhs) != RESULT_DECL
7964 && TREE_CODE (lhs) != PARM_DECL
7965 && ! (TREE_CODE (lhs) == INDIRECT_REF
7966 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
7967 preexpand_calls (exp);
7969 /* Check for |= or &= of a bitfield of size one into another bitfield
7970 of size 1. In this case, (unless we need the result of the
7971 assignment) we can do this more efficiently with a
7972 test followed by an assignment, if necessary.
7974 ??? At this point, we can't get a BIT_FIELD_REF here. But if
7975 things change so we do, this code should be enhanced to
7976 support it. */
7977 if (ignore
7978 && TREE_CODE (lhs) == COMPONENT_REF
7979 && (TREE_CODE (rhs) == BIT_IOR_EXPR
7980 || TREE_CODE (rhs) == BIT_AND_EXPR)
7981 && TREE_OPERAND (rhs, 0) == lhs
7982 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
7983 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
7984 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
7986 rtx label = gen_label_rtx ();
7988 do_jump (TREE_OPERAND (rhs, 1),
7989 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
7990 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
7991 expand_assignment (lhs, convert (TREE_TYPE (rhs),
7992 (TREE_CODE (rhs) == BIT_IOR_EXPR
7993 ? integer_one_node
7994 : integer_zero_node)),
7995 0, 0);
7996 do_pending_stack_adjust ();
7997 emit_label (label);
7998 return const0_rtx;
8001 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
8002 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
8003 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
8004 TYPE_NONCOPIED_PARTS (lhs_type));
8006 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8007 while (noncopied_parts != 0)
8009 expand_assignment (TREE_PURPOSE (noncopied_parts),
8010 TREE_VALUE (noncopied_parts), 0, 0);
8011 noncopied_parts = TREE_CHAIN (noncopied_parts);
8013 return temp;
8016 case RETURN_EXPR:
8017 if (!TREE_OPERAND (exp, 0))
8018 expand_null_return ();
8019 else
8020 expand_return (TREE_OPERAND (exp, 0));
8021 return const0_rtx;
8023 case PREINCREMENT_EXPR:
8024 case PREDECREMENT_EXPR:
8025 return expand_increment (exp, 0, ignore);
8027 case POSTINCREMENT_EXPR:
8028 case POSTDECREMENT_EXPR:
8029 /* Faster to treat as pre-increment if result is not used. */
8030 return expand_increment (exp, ! ignore, ignore);
8032 case ADDR_EXPR:
8033 /* If nonzero, TEMP will be set to the address of something that might
8034 be a MEM corresponding to a stack slot. */
8035 temp = 0;
8037 /* Are we taking the address of a nested function? */
8038 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8039 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8040 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8041 && ! TREE_STATIC (exp))
8043 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8044 op0 = force_operand (op0, target);
8046 /* If we are taking the address of something erroneous, just
8047 return a zero. */
8048 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8049 return const0_rtx;
8050 else
8052 /* We make sure to pass const0_rtx down if we came in with
8053 ignore set, to avoid doing the cleanups twice for something. */
8054 op0 = expand_expr (TREE_OPERAND (exp, 0),
8055 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8056 (modifier == EXPAND_INITIALIZER
8057 ? modifier : EXPAND_CONST_ADDRESS));
8059 /* If we are going to ignore the result, OP0 will have been set
8060 to const0_rtx, so just return it. Don't get confused and
8061 think we are taking the address of the constant. */
8062 if (ignore)
8063 return op0;
8065 op0 = protect_from_queue (op0, 0);
8067 /* We would like the object in memory. If it is a constant, we can
8068 have it be statically allocated into memory. For a non-constant,
8069 we need to allocate some memory and store the value into it. */
8071 if (CONSTANT_P (op0))
8072 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8073 op0);
8074 else if (GET_CODE (op0) == MEM)
8076 mark_temp_addr_taken (op0);
8077 temp = XEXP (op0, 0);
8080 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8081 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8083 /* If this object is in a register, it must be not
8084 be BLKmode. */
8085 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8086 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8088 mark_temp_addr_taken (memloc);
8089 emit_move_insn (memloc, op0);
8090 op0 = memloc;
8093 if (GET_CODE (op0) != MEM)
8094 abort ();
8096 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8098 temp = XEXP (op0, 0);
8099 #ifdef POINTERS_EXTEND_UNSIGNED
8100 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8101 && mode == ptr_mode)
8102 temp = convert_memory_address (ptr_mode, temp);
8103 #endif
8104 return temp;
8107 op0 = force_operand (XEXP (op0, 0), target);
8110 if (flag_force_addr && GET_CODE (op0) != REG)
8111 op0 = force_reg (Pmode, op0);
8113 if (GET_CODE (op0) == REG
8114 && ! REG_USERVAR_P (op0))
8115 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)) / BITS_PER_UNIT);
8117 /* If we might have had a temp slot, add an equivalent address
8118 for it. */
8119 if (temp != 0)
8120 update_temp_slot_address (temp, op0);
8122 #ifdef POINTERS_EXTEND_UNSIGNED
8123 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8124 && mode == ptr_mode)
8125 op0 = convert_memory_address (ptr_mode, op0);
8126 #endif
8128 return op0;
8130 case ENTRY_VALUE_EXPR:
8131 abort ();
8133 /* COMPLEX type for Extended Pascal & Fortran */
8134 case COMPLEX_EXPR:
8136 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8137 rtx insns;
8139 /* Get the rtx code of the operands. */
8140 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8141 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8143 if (! target)
8144 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8146 start_sequence ();
8148 /* Move the real (op0) and imaginary (op1) parts to their location. */
8149 emit_move_insn (gen_realpart (mode, target), op0);
8150 emit_move_insn (gen_imagpart (mode, target), op1);
8152 insns = get_insns ();
8153 end_sequence ();
8155 /* Complex construction should appear as a single unit. */
8156 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8157 each with a separate pseudo as destination.
8158 It's not correct for flow to treat them as a unit. */
8159 if (GET_CODE (target) != CONCAT)
8160 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8161 else
8162 emit_insns (insns);
8164 return target;
8167 case REALPART_EXPR:
8168 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8169 return gen_realpart (mode, op0);
8171 case IMAGPART_EXPR:
8172 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8173 return gen_imagpart (mode, op0);
8175 case CONJ_EXPR:
8177 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8178 rtx imag_t;
8179 rtx insns;
8181 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8183 if (! target)
8184 target = gen_reg_rtx (mode);
8186 start_sequence ();
8188 /* Store the realpart and the negated imagpart to target. */
8189 emit_move_insn (gen_realpart (partmode, target),
8190 gen_realpart (partmode, op0));
8192 imag_t = gen_imagpart (partmode, target);
8193 temp = expand_unop (partmode, neg_optab,
8194 gen_imagpart (partmode, op0), imag_t, 0);
8195 if (temp != imag_t)
8196 emit_move_insn (imag_t, temp);
8198 insns = get_insns ();
8199 end_sequence ();
8201 /* Conjugate should appear as a single unit
8202 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8203 each with a separate pseudo as destination.
8204 It's not correct for flow to treat them as a unit. */
8205 if (GET_CODE (target) != CONCAT)
8206 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8207 else
8208 emit_insns (insns);
8210 return target;
8213 case TRY_CATCH_EXPR:
8215 tree handler = TREE_OPERAND (exp, 1);
8217 expand_eh_region_start ();
8219 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8221 expand_eh_region_end (handler);
8223 return op0;
8226 case TRY_FINALLY_EXPR:
8228 tree try_block = TREE_OPERAND (exp, 0);
8229 tree finally_block = TREE_OPERAND (exp, 1);
8230 rtx finally_label = gen_label_rtx ();
8231 rtx done_label = gen_label_rtx ();
8232 rtx return_link = gen_reg_rtx (Pmode);
8233 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8234 (tree) finally_label, (tree) return_link);
8235 TREE_SIDE_EFFECTS (cleanup) = 1;
8237 /* Start a new binding layer that will keep track of all cleanup
8238 actions to be performed. */
8239 expand_start_bindings (2);
8241 target_temp_slot_level = temp_slot_level;
8243 expand_decl_cleanup (NULL_TREE, cleanup);
8244 op0 = expand_expr (try_block, target, tmode, modifier);
8246 preserve_temp_slots (op0);
8247 expand_end_bindings (NULL_TREE, 0, 0);
8248 emit_jump (done_label);
8249 emit_label (finally_label);
8250 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8251 emit_indirect_jump (return_link);
8252 emit_label (done_label);
8253 return op0;
8256 case GOTO_SUBROUTINE_EXPR:
8258 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8259 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8260 rtx return_address = gen_label_rtx ();
8261 emit_move_insn (return_link, gen_rtx_LABEL_REF (Pmode, return_address));
8262 emit_jump (subr);
8263 emit_label (return_address);
8264 return const0_rtx;
8267 case POPDCC_EXPR:
8269 rtx dcc = get_dynamic_cleanup_chain ();
8270 emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc)));
8271 return const0_rtx;
8274 case POPDHC_EXPR:
8276 rtx dhc = get_dynamic_handler_chain ();
8277 emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc)));
8278 return const0_rtx;
8281 case VA_ARG_EXPR:
8282 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8284 default:
8285 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8288 /* Here to do an ordinary binary operator, generating an instruction
8289 from the optab already placed in `this_optab'. */
8290 binop:
8291 preexpand_calls (exp);
8292 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8293 subtarget = 0;
8294 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8295 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8296 binop2:
8297 temp = expand_binop (mode, this_optab, op0, op1, target,
8298 unsignedp, OPTAB_LIB_WIDEN);
8299 if (temp == 0)
8300 abort ();
8301 return temp;
8304 /* Return the tree node and offset if a given argument corresponds to
8305 a string constant. */
8307 tree
8308 string_constant (arg, ptr_offset)
8309 tree arg;
8310 tree *ptr_offset;
8312 STRIP_NOPS (arg);
8314 if (TREE_CODE (arg) == ADDR_EXPR
8315 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8317 *ptr_offset = integer_zero_node;
8318 return TREE_OPERAND (arg, 0);
8320 else if (TREE_CODE (arg) == PLUS_EXPR)
8322 tree arg0 = TREE_OPERAND (arg, 0);
8323 tree arg1 = TREE_OPERAND (arg, 1);
8325 STRIP_NOPS (arg0);
8326 STRIP_NOPS (arg1);
8328 if (TREE_CODE (arg0) == ADDR_EXPR
8329 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
8331 *ptr_offset = arg1;
8332 return TREE_OPERAND (arg0, 0);
8334 else if (TREE_CODE (arg1) == ADDR_EXPR
8335 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
8337 *ptr_offset = arg0;
8338 return TREE_OPERAND (arg1, 0);
8342 return 0;
8345 /* Expand code for a post- or pre- increment or decrement
8346 and return the RTX for the result.
8347 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
8349 static rtx
8350 expand_increment (exp, post, ignore)
8351 register tree exp;
8352 int post, ignore;
8354 register rtx op0, op1;
8355 register rtx temp, value;
8356 register tree incremented = TREE_OPERAND (exp, 0);
8357 optab this_optab = add_optab;
8358 int icode;
8359 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
8360 int op0_is_copy = 0;
8361 int single_insn = 0;
8362 /* 1 means we can't store into OP0 directly,
8363 because it is a subreg narrower than a word,
8364 and we don't dare clobber the rest of the word. */
8365 int bad_subreg = 0;
8367 /* Stabilize any component ref that might need to be
8368 evaluated more than once below. */
8369 if (!post
8370 || TREE_CODE (incremented) == BIT_FIELD_REF
8371 || (TREE_CODE (incremented) == COMPONENT_REF
8372 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
8373 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
8374 incremented = stabilize_reference (incremented);
8375 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
8376 ones into save exprs so that they don't accidentally get evaluated
8377 more than once by the code below. */
8378 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
8379 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
8380 incremented = save_expr (incremented);
8382 /* Compute the operands as RTX.
8383 Note whether OP0 is the actual lvalue or a copy of it:
8384 I believe it is a copy iff it is a register or subreg
8385 and insns were generated in computing it. */
8387 temp = get_last_insn ();
8388 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
8390 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
8391 in place but instead must do sign- or zero-extension during assignment,
8392 so we copy it into a new register and let the code below use it as
8393 a copy.
8395 Note that we can safely modify this SUBREG since it is know not to be
8396 shared (it was made by the expand_expr call above). */
8398 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
8400 if (post)
8401 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
8402 else
8403 bad_subreg = 1;
8405 else if (GET_CODE (op0) == SUBREG
8406 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
8408 /* We cannot increment this SUBREG in place. If we are
8409 post-incrementing, get a copy of the old value. Otherwise,
8410 just mark that we cannot increment in place. */
8411 if (post)
8412 op0 = copy_to_reg (op0);
8413 else
8414 bad_subreg = 1;
8417 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
8418 && temp != get_last_insn ());
8419 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
8420 EXPAND_MEMORY_USE_BAD);
8422 /* Decide whether incrementing or decrementing. */
8423 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
8424 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8425 this_optab = sub_optab;
8427 /* Convert decrement by a constant into a negative increment. */
8428 if (this_optab == sub_optab
8429 && GET_CODE (op1) == CONST_INT)
8431 op1 = GEN_INT (- INTVAL (op1));
8432 this_optab = add_optab;
8435 /* For a preincrement, see if we can do this with a single instruction. */
8436 if (!post)
8438 icode = (int) this_optab->handlers[(int) mode].insn_code;
8439 if (icode != (int) CODE_FOR_nothing
8440 /* Make sure that OP0 is valid for operands 0 and 1
8441 of the insn we want to queue. */
8442 && (*insn_data[icode].operand[0].predicate) (op0, mode)
8443 && (*insn_data[icode].operand[1].predicate) (op0, mode)
8444 && (*insn_data[icode].operand[2].predicate) (op1, mode))
8445 single_insn = 1;
8448 /* If OP0 is not the actual lvalue, but rather a copy in a register,
8449 then we cannot just increment OP0. We must therefore contrive to
8450 increment the original value. Then, for postincrement, we can return
8451 OP0 since it is a copy of the old value. For preincrement, expand here
8452 unless we can do it with a single insn.
8454 Likewise if storing directly into OP0 would clobber high bits
8455 we need to preserve (bad_subreg). */
8456 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
8458 /* This is the easiest way to increment the value wherever it is.
8459 Problems with multiple evaluation of INCREMENTED are prevented
8460 because either (1) it is a component_ref or preincrement,
8461 in which case it was stabilized above, or (2) it is an array_ref
8462 with constant index in an array in a register, which is
8463 safe to reevaluate. */
8464 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
8465 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8466 ? MINUS_EXPR : PLUS_EXPR),
8467 TREE_TYPE (exp),
8468 incremented,
8469 TREE_OPERAND (exp, 1));
8471 while (TREE_CODE (incremented) == NOP_EXPR
8472 || TREE_CODE (incremented) == CONVERT_EXPR)
8474 newexp = convert (TREE_TYPE (incremented), newexp);
8475 incremented = TREE_OPERAND (incremented, 0);
8478 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
8479 return post ? op0 : temp;
8482 if (post)
8484 /* We have a true reference to the value in OP0.
8485 If there is an insn to add or subtract in this mode, queue it.
8486 Queueing the increment insn avoids the register shuffling
8487 that often results if we must increment now and first save
8488 the old value for subsequent use. */
8490 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
8491 op0 = stabilize (op0);
8492 #endif
8494 icode = (int) this_optab->handlers[(int) mode].insn_code;
8495 if (icode != (int) CODE_FOR_nothing
8496 /* Make sure that OP0 is valid for operands 0 and 1
8497 of the insn we want to queue. */
8498 && (*insn_data[icode].operand[0].predicate) (op0, mode)
8499 && (*insn_data[icode].operand[1].predicate) (op0, mode))
8501 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
8502 op1 = force_reg (mode, op1);
8504 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
8506 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
8508 rtx addr = (general_operand (XEXP (op0, 0), mode)
8509 ? force_reg (Pmode, XEXP (op0, 0))
8510 : copy_to_reg (XEXP (op0, 0)));
8511 rtx temp, result;
8513 op0 = change_address (op0, VOIDmode, addr);
8514 temp = force_reg (GET_MODE (op0), op0);
8515 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
8516 op1 = force_reg (mode, op1);
8518 /* The increment queue is LIFO, thus we have to `queue'
8519 the instructions in reverse order. */
8520 enqueue_insn (op0, gen_move_insn (op0, temp));
8521 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
8522 return result;
8526 /* Preincrement, or we can't increment with one simple insn. */
8527 if (post)
8528 /* Save a copy of the value before inc or dec, to return it later. */
8529 temp = value = copy_to_reg (op0);
8530 else
8531 /* Arrange to return the incremented value. */
8532 /* Copy the rtx because expand_binop will protect from the queue,
8533 and the results of that would be invalid for us to return
8534 if our caller does emit_queue before using our result. */
8535 temp = copy_rtx (value = op0);
8537 /* Increment however we can. */
8538 op1 = expand_binop (mode, this_optab, value, op1,
8539 current_function_check_memory_usage ? NULL_RTX : op0,
8540 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
8541 /* Make sure the value is stored into OP0. */
8542 if (op1 != op0)
8543 emit_move_insn (op0, op1);
8545 return temp;
8548 /* Expand all function calls contained within EXP, innermost ones first.
8549 But don't look within expressions that have sequence points.
8550 For each CALL_EXPR, record the rtx for its value
8551 in the CALL_EXPR_RTL field. */
8553 static void
8554 preexpand_calls (exp)
8555 tree exp;
8557 register int nops, i;
8558 int type = TREE_CODE_CLASS (TREE_CODE (exp));
8560 if (! do_preexpand_calls)
8561 return;
8563 /* Only expressions and references can contain calls. */
8565 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
8566 return;
8568 switch (TREE_CODE (exp))
8570 case CALL_EXPR:
8571 /* Do nothing if already expanded. */
8572 if (CALL_EXPR_RTL (exp) != 0
8573 /* Do nothing if the call returns a variable-sized object. */
8574 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
8575 /* Do nothing to built-in functions. */
8576 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
8577 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
8578 == FUNCTION_DECL)
8579 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
8580 return;
8582 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
8583 return;
8585 case COMPOUND_EXPR:
8586 case COND_EXPR:
8587 case TRUTH_ANDIF_EXPR:
8588 case TRUTH_ORIF_EXPR:
8589 /* If we find one of these, then we can be sure
8590 the adjust will be done for it (since it makes jumps).
8591 Do it now, so that if this is inside an argument
8592 of a function, we don't get the stack adjustment
8593 after some other args have already been pushed. */
8594 do_pending_stack_adjust ();
8595 return;
8597 case BLOCK:
8598 case RTL_EXPR:
8599 case WITH_CLEANUP_EXPR:
8600 case CLEANUP_POINT_EXPR:
8601 case TRY_CATCH_EXPR:
8602 return;
8604 case SAVE_EXPR:
8605 if (SAVE_EXPR_RTL (exp) != 0)
8606 return;
8608 default:
8609 break;
8612 nops = tree_code_length[(int) TREE_CODE (exp)];
8613 for (i = 0; i < nops; i++)
8614 if (TREE_OPERAND (exp, i) != 0)
8616 if (TREE_CODE (exp) == TARGET_EXPR && i == 2)
8617 /* We don't need to preexpand the cleanup for a TARGET_EXPR.
8618 It doesn't happen before the call is made. */
8620 else
8622 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
8623 if (type == 'e' || type == '<' || type == '1' || type == '2'
8624 || type == 'r')
8625 preexpand_calls (TREE_OPERAND (exp, i));
8630 /* At the start of a function, record that we have no previously-pushed
8631 arguments waiting to be popped. */
8633 void
8634 init_pending_stack_adjust ()
8636 pending_stack_adjust = 0;
8639 /* When exiting from function, if safe, clear out any pending stack adjust
8640 so the adjustment won't get done.
8642 Note, if the current function calls alloca, then it must have a
8643 frame pointer regardless of the value of flag_omit_frame_pointer. */
8645 void
8646 clear_pending_stack_adjust ()
8648 #ifdef EXIT_IGNORE_STACK
8649 if (optimize > 0
8650 && (! flag_omit_frame_pointer || current_function_calls_alloca)
8651 && EXIT_IGNORE_STACK
8652 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
8653 && ! flag_inline_functions)
8654 pending_stack_adjust = 0;
8655 #endif
8658 /* Pop any previously-pushed arguments that have not been popped yet. */
8660 void
8661 do_pending_stack_adjust ()
8663 if (inhibit_defer_pop == 0)
8665 if (pending_stack_adjust != 0)
8666 adjust_stack (GEN_INT (pending_stack_adjust));
8667 pending_stack_adjust = 0;
8671 /* Expand conditional expressions. */
8673 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
8674 LABEL is an rtx of code CODE_LABEL, in this function and all the
8675 functions here. */
8677 void
8678 jumpifnot (exp, label)
8679 tree exp;
8680 rtx label;
8682 do_jump (exp, label, NULL_RTX);
8685 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
8687 void
8688 jumpif (exp, label)
8689 tree exp;
8690 rtx label;
8692 do_jump (exp, NULL_RTX, label);
8695 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
8696 the result is zero, or IF_TRUE_LABEL if the result is one.
8697 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
8698 meaning fall through in that case.
8700 do_jump always does any pending stack adjust except when it does not
8701 actually perform a jump. An example where there is no jump
8702 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
8704 This function is responsible for optimizing cases such as
8705 &&, || and comparison operators in EXP. */
8707 void
8708 do_jump (exp, if_false_label, if_true_label)
8709 tree exp;
8710 rtx if_false_label, if_true_label;
8712 register enum tree_code code = TREE_CODE (exp);
8713 /* Some cases need to create a label to jump to
8714 in order to properly fall through.
8715 These cases set DROP_THROUGH_LABEL nonzero. */
8716 rtx drop_through_label = 0;
8717 rtx temp;
8718 int i;
8719 tree type;
8720 enum machine_mode mode;
8722 #ifdef MAX_INTEGER_COMPUTATION_MODE
8723 check_max_integer_computation_mode (exp);
8724 #endif
8726 emit_queue ();
8728 switch (code)
8730 case ERROR_MARK:
8731 break;
8733 case INTEGER_CST:
8734 temp = integer_zerop (exp) ? if_false_label : if_true_label;
8735 if (temp)
8736 emit_jump (temp);
8737 break;
8739 #if 0
8740 /* This is not true with #pragma weak */
8741 case ADDR_EXPR:
8742 /* The address of something can never be zero. */
8743 if (if_true_label)
8744 emit_jump (if_true_label);
8745 break;
8746 #endif
8748 case NOP_EXPR:
8749 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
8750 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
8751 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
8752 goto normal;
8753 case CONVERT_EXPR:
8754 /* If we are narrowing the operand, we have to do the compare in the
8755 narrower mode. */
8756 if ((TYPE_PRECISION (TREE_TYPE (exp))
8757 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8758 goto normal;
8759 case NON_LVALUE_EXPR:
8760 case REFERENCE_EXPR:
8761 case ABS_EXPR:
8762 case NEGATE_EXPR:
8763 case LROTATE_EXPR:
8764 case RROTATE_EXPR:
8765 /* These cannot change zero->non-zero or vice versa. */
8766 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
8767 break;
8769 #if 0
8770 /* This is never less insns than evaluating the PLUS_EXPR followed by
8771 a test and can be longer if the test is eliminated. */
8772 case PLUS_EXPR:
8773 /* Reduce to minus. */
8774 exp = build (MINUS_EXPR, TREE_TYPE (exp),
8775 TREE_OPERAND (exp, 0),
8776 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
8777 TREE_OPERAND (exp, 1))));
8778 /* Process as MINUS. */
8779 #endif
8781 case MINUS_EXPR:
8782 /* Non-zero iff operands of minus differ. */
8783 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
8784 TREE_OPERAND (exp, 0),
8785 TREE_OPERAND (exp, 1)),
8786 NE, NE, if_false_label, if_true_label);
8787 break;
8789 case BIT_AND_EXPR:
8790 /* If we are AND'ing with a small constant, do this comparison in the
8791 smallest type that fits. If the machine doesn't have comparisons
8792 that small, it will be converted back to the wider comparison.
8793 This helps if we are testing the sign bit of a narrower object.
8794 combine can't do this for us because it can't know whether a
8795 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
8797 if (! SLOW_BYTE_ACCESS
8798 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8799 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
8800 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
8801 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
8802 && (type = type_for_mode (mode, 1)) != 0
8803 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
8804 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
8805 != CODE_FOR_nothing))
8807 do_jump (convert (type, exp), if_false_label, if_true_label);
8808 break;
8810 goto normal;
8812 case TRUTH_NOT_EXPR:
8813 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
8814 break;
8816 case TRUTH_ANDIF_EXPR:
8817 if (if_false_label == 0)
8818 if_false_label = drop_through_label = gen_label_rtx ();
8819 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
8820 start_cleanup_deferral ();
8821 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8822 end_cleanup_deferral ();
8823 break;
8825 case TRUTH_ORIF_EXPR:
8826 if (if_true_label == 0)
8827 if_true_label = drop_through_label = gen_label_rtx ();
8828 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
8829 start_cleanup_deferral ();
8830 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8831 end_cleanup_deferral ();
8832 break;
8834 case COMPOUND_EXPR:
8835 push_temp_slots ();
8836 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8837 preserve_temp_slots (NULL_RTX);
8838 free_temp_slots ();
8839 pop_temp_slots ();
8840 emit_queue ();
8841 do_pending_stack_adjust ();
8842 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8843 break;
8845 case COMPONENT_REF:
8846 case BIT_FIELD_REF:
8847 case ARRAY_REF:
8849 int bitsize, bitpos, unsignedp;
8850 enum machine_mode mode;
8851 tree type;
8852 tree offset;
8853 int volatilep = 0;
8854 int alignment;
8856 /* Get description of this reference. We don't actually care
8857 about the underlying object here. */
8858 get_inner_reference (exp, &bitsize, &bitpos, &offset,
8859 &mode, &unsignedp, &volatilep,
8860 &alignment);
8862 type = type_for_size (bitsize, unsignedp);
8863 if (! SLOW_BYTE_ACCESS
8864 && type != 0 && bitsize >= 0
8865 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
8866 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
8867 != CODE_FOR_nothing))
8869 do_jump (convert (type, exp), if_false_label, if_true_label);
8870 break;
8872 goto normal;
8875 case COND_EXPR:
8876 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
8877 if (integer_onep (TREE_OPERAND (exp, 1))
8878 && integer_zerop (TREE_OPERAND (exp, 2)))
8879 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
8881 else if (integer_zerop (TREE_OPERAND (exp, 1))
8882 && integer_onep (TREE_OPERAND (exp, 2)))
8883 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
8885 else
8887 register rtx label1 = gen_label_rtx ();
8888 drop_through_label = gen_label_rtx ();
8890 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
8892 start_cleanup_deferral ();
8893 /* Now the THEN-expression. */
8894 do_jump (TREE_OPERAND (exp, 1),
8895 if_false_label ? if_false_label : drop_through_label,
8896 if_true_label ? if_true_label : drop_through_label);
8897 /* In case the do_jump just above never jumps. */
8898 do_pending_stack_adjust ();
8899 emit_label (label1);
8901 /* Now the ELSE-expression. */
8902 do_jump (TREE_OPERAND (exp, 2),
8903 if_false_label ? if_false_label : drop_through_label,
8904 if_true_label ? if_true_label : drop_through_label);
8905 end_cleanup_deferral ();
8907 break;
8909 case EQ_EXPR:
8911 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8913 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
8914 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
8916 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
8917 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
8918 do_jump
8919 (fold
8920 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
8921 fold (build (EQ_EXPR, TREE_TYPE (exp),
8922 fold (build1 (REALPART_EXPR,
8923 TREE_TYPE (inner_type),
8924 exp0)),
8925 fold (build1 (REALPART_EXPR,
8926 TREE_TYPE (inner_type),
8927 exp1)))),
8928 fold (build (EQ_EXPR, TREE_TYPE (exp),
8929 fold (build1 (IMAGPART_EXPR,
8930 TREE_TYPE (inner_type),
8931 exp0)),
8932 fold (build1 (IMAGPART_EXPR,
8933 TREE_TYPE (inner_type),
8934 exp1)))))),
8935 if_false_label, if_true_label);
8938 else if (integer_zerop (TREE_OPERAND (exp, 1)))
8939 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
8941 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
8942 && !can_compare_p (TYPE_MODE (inner_type), ccp_jump))
8943 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
8944 else
8945 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
8946 break;
8949 case NE_EXPR:
8951 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8953 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
8954 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
8956 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
8957 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
8958 do_jump
8959 (fold
8960 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
8961 fold (build (NE_EXPR, TREE_TYPE (exp),
8962 fold (build1 (REALPART_EXPR,
8963 TREE_TYPE (inner_type),
8964 exp0)),
8965 fold (build1 (REALPART_EXPR,
8966 TREE_TYPE (inner_type),
8967 exp1)))),
8968 fold (build (NE_EXPR, TREE_TYPE (exp),
8969 fold (build1 (IMAGPART_EXPR,
8970 TREE_TYPE (inner_type),
8971 exp0)),
8972 fold (build1 (IMAGPART_EXPR,
8973 TREE_TYPE (inner_type),
8974 exp1)))))),
8975 if_false_label, if_true_label);
8978 else if (integer_zerop (TREE_OPERAND (exp, 1)))
8979 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
8981 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
8982 && !can_compare_p (TYPE_MODE (inner_type), ccp_jump))
8983 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
8984 else
8985 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
8986 break;
8989 case LT_EXPR:
8990 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
8991 if (GET_MODE_CLASS (mode) == MODE_INT
8992 && ! can_compare_p (mode, ccp_jump))
8993 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
8994 else
8995 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
8996 break;
8998 case LE_EXPR:
8999 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9000 if (GET_MODE_CLASS (mode) == MODE_INT
9001 && ! can_compare_p (mode, ccp_jump))
9002 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9003 else
9004 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9005 break;
9007 case GT_EXPR:
9008 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9009 if (GET_MODE_CLASS (mode) == MODE_INT
9010 && ! can_compare_p (mode, ccp_jump))
9011 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9012 else
9013 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9014 break;
9016 case GE_EXPR:
9017 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9018 if (GET_MODE_CLASS (mode) == MODE_INT
9019 && ! can_compare_p (mode, ccp_jump))
9020 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9021 else
9022 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9023 break;
9025 default:
9026 normal:
9027 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9028 #if 0
9029 /* This is not needed any more and causes poor code since it causes
9030 comparisons and tests from non-SI objects to have different code
9031 sequences. */
9032 /* Copy to register to avoid generating bad insns by cse
9033 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9034 if (!cse_not_expected && GET_CODE (temp) == MEM)
9035 temp = copy_to_reg (temp);
9036 #endif
9037 do_pending_stack_adjust ();
9038 /* Do any postincrements in the expression that was tested. */
9039 emit_queue ();
9041 if (GET_CODE (temp) == CONST_INT || GET_CODE (temp) == LABEL_REF)
9043 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9044 if (target)
9045 emit_jump (target);
9047 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9048 && ! can_compare_p (GET_MODE (temp), ccp_jump))
9049 /* Note swapping the labels gives us not-equal. */
9050 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9051 else if (GET_MODE (temp) != VOIDmode)
9052 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9053 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9054 GET_MODE (temp), NULL_RTX, 0,
9055 if_false_label, if_true_label);
9056 else
9057 abort ();
9060 if (drop_through_label)
9062 /* If do_jump produces code that might be jumped around,
9063 do any stack adjusts from that code, before the place
9064 where control merges in. */
9065 do_pending_stack_adjust ();
9066 emit_label (drop_through_label);
9070 /* Given a comparison expression EXP for values too wide to be compared
9071 with one insn, test the comparison and jump to the appropriate label.
9072 The code of EXP is ignored; we always test GT if SWAP is 0,
9073 and LT if SWAP is 1. */
9075 static void
9076 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9077 tree exp;
9078 int swap;
9079 rtx if_false_label, if_true_label;
9081 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9082 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9083 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9084 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9086 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9089 /* Compare OP0 with OP1, word at a time, in mode MODE.
9090 UNSIGNEDP says to do unsigned comparison.
9091 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9093 void
9094 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9095 enum machine_mode mode;
9096 int unsignedp;
9097 rtx op0, op1;
9098 rtx if_false_label, if_true_label;
9100 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9101 rtx drop_through_label = 0;
9102 int i;
9104 if (! if_true_label || ! if_false_label)
9105 drop_through_label = gen_label_rtx ();
9106 if (! if_true_label)
9107 if_true_label = drop_through_label;
9108 if (! if_false_label)
9109 if_false_label = drop_through_label;
9111 /* Compare a word at a time, high order first. */
9112 for (i = 0; i < nwords; i++)
9114 rtx op0_word, op1_word;
9116 if (WORDS_BIG_ENDIAN)
9118 op0_word = operand_subword_force (op0, i, mode);
9119 op1_word = operand_subword_force (op1, i, mode);
9121 else
9123 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9124 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9127 /* All but high-order word must be compared as unsigned. */
9128 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9129 (unsignedp || i > 0), word_mode, NULL_RTX, 0,
9130 NULL_RTX, if_true_label);
9132 /* Consider lower words only if these are equal. */
9133 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9134 NULL_RTX, 0, NULL_RTX, if_false_label);
9137 if (if_false_label)
9138 emit_jump (if_false_label);
9139 if (drop_through_label)
9140 emit_label (drop_through_label);
9143 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9144 with one insn, test the comparison and jump to the appropriate label. */
9146 static void
9147 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9148 tree exp;
9149 rtx if_false_label, if_true_label;
9151 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9152 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9153 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9154 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9155 int i;
9156 rtx drop_through_label = 0;
9158 if (! if_false_label)
9159 drop_through_label = if_false_label = gen_label_rtx ();
9161 for (i = 0; i < nwords; i++)
9162 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
9163 operand_subword_force (op1, i, mode),
9164 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9165 word_mode, NULL_RTX, 0, if_false_label,
9166 NULL_RTX);
9168 if (if_true_label)
9169 emit_jump (if_true_label);
9170 if (drop_through_label)
9171 emit_label (drop_through_label);
9174 /* Jump according to whether OP0 is 0.
9175 We assume that OP0 has an integer mode that is too wide
9176 for the available compare insns. */
9178 void
9179 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9180 rtx op0;
9181 rtx if_false_label, if_true_label;
9183 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9184 rtx part;
9185 int i;
9186 rtx drop_through_label = 0;
9188 /* The fastest way of doing this comparison on almost any machine is to
9189 "or" all the words and compare the result. If all have to be loaded
9190 from memory and this is a very wide item, it's possible this may
9191 be slower, but that's highly unlikely. */
9193 part = gen_reg_rtx (word_mode);
9194 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
9195 for (i = 1; i < nwords && part != 0; i++)
9196 part = expand_binop (word_mode, ior_optab, part,
9197 operand_subword_force (op0, i, GET_MODE (op0)),
9198 part, 1, OPTAB_WIDEN);
9200 if (part != 0)
9202 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
9203 NULL_RTX, 0, if_false_label, if_true_label);
9205 return;
9208 /* If we couldn't do the "or" simply, do this with a series of compares. */
9209 if (! if_false_label)
9210 drop_through_label = if_false_label = gen_label_rtx ();
9212 for (i = 0; i < nwords; i++)
9213 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
9214 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0,
9215 if_false_label, NULL_RTX);
9217 if (if_true_label)
9218 emit_jump (if_true_label);
9220 if (drop_through_label)
9221 emit_label (drop_through_label);
9224 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
9225 (including code to compute the values to be compared)
9226 and set (CC0) according to the result.
9227 The decision as to signed or unsigned comparison must be made by the caller.
9229 We force a stack adjustment unless there are currently
9230 things pushed on the stack that aren't yet used.
9232 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9233 compared.
9235 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9236 size of MODE should be used. */
9239 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
9240 register rtx op0, op1;
9241 enum rtx_code code;
9242 int unsignedp;
9243 enum machine_mode mode;
9244 rtx size;
9245 int align;
9247 rtx tem;
9249 /* If one operand is constant, make it the second one. Only do this
9250 if the other operand is not constant as well. */
9252 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9253 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9255 tem = op0;
9256 op0 = op1;
9257 op1 = tem;
9258 code = swap_condition (code);
9261 if (flag_force_mem)
9263 op0 = force_not_mem (op0);
9264 op1 = force_not_mem (op1);
9267 do_pending_stack_adjust ();
9269 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9270 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9271 return tem;
9273 #if 0
9274 /* There's no need to do this now that combine.c can eliminate lots of
9275 sign extensions. This can be less efficient in certain cases on other
9276 machines. */
9278 /* If this is a signed equality comparison, we can do it as an
9279 unsigned comparison since zero-extension is cheaper than sign
9280 extension and comparisons with zero are done as unsigned. This is
9281 the case even on machines that can do fast sign extension, since
9282 zero-extension is easier to combine with other operations than
9283 sign-extension is. If we are comparing against a constant, we must
9284 convert it to what it would look like unsigned. */
9285 if ((code == EQ || code == NE) && ! unsignedp
9286 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9288 if (GET_CODE (op1) == CONST_INT
9289 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9290 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9291 unsignedp = 1;
9293 #endif
9295 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
9297 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
9300 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
9301 The decision as to signed or unsigned comparison must be made by the caller.
9303 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9304 compared.
9306 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9307 size of MODE should be used. */
9309 void
9310 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, align,
9311 if_false_label, if_true_label)
9312 register rtx op0, op1;
9313 enum rtx_code code;
9314 int unsignedp;
9315 enum machine_mode mode;
9316 rtx size;
9317 int align;
9318 rtx if_false_label, if_true_label;
9320 rtx tem;
9321 int dummy_true_label = 0;
9323 /* Reverse the comparison if that is safe and we want to jump if it is
9324 false. */
9325 if (! if_true_label && ! FLOAT_MODE_P (mode))
9327 if_true_label = if_false_label;
9328 if_false_label = 0;
9329 code = reverse_condition (code);
9332 /* If one operand is constant, make it the second one. Only do this
9333 if the other operand is not constant as well. */
9335 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9336 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9338 tem = op0;
9339 op0 = op1;
9340 op1 = tem;
9341 code = swap_condition (code);
9344 if (flag_force_mem)
9346 op0 = force_not_mem (op0);
9347 op1 = force_not_mem (op1);
9350 do_pending_stack_adjust ();
9352 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9353 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9355 if (tem == const_true_rtx)
9357 if (if_true_label)
9358 emit_jump (if_true_label);
9360 else
9362 if (if_false_label)
9363 emit_jump (if_false_label);
9365 return;
9368 #if 0
9369 /* There's no need to do this now that combine.c can eliminate lots of
9370 sign extensions. This can be less efficient in certain cases on other
9371 machines. */
9373 /* If this is a signed equality comparison, we can do it as an
9374 unsigned comparison since zero-extension is cheaper than sign
9375 extension and comparisons with zero are done as unsigned. This is
9376 the case even on machines that can do fast sign extension, since
9377 zero-extension is easier to combine with other operations than
9378 sign-extension is. If we are comparing against a constant, we must
9379 convert it to what it would look like unsigned. */
9380 if ((code == EQ || code == NE) && ! unsignedp
9381 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9383 if (GET_CODE (op1) == CONST_INT
9384 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9385 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9386 unsignedp = 1;
9388 #endif
9390 if (! if_true_label)
9392 dummy_true_label = 1;
9393 if_true_label = gen_label_rtx ();
9396 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, align,
9397 if_true_label);
9399 if (if_false_label)
9400 emit_jump (if_false_label);
9401 if (dummy_true_label)
9402 emit_label (if_true_label);
9405 /* Generate code for a comparison expression EXP (including code to compute
9406 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
9407 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
9408 generated code will drop through.
9409 SIGNED_CODE should be the rtx operation for this comparison for
9410 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
9412 We force a stack adjustment unless there are currently
9413 things pushed on the stack that aren't yet used. */
9415 static void
9416 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
9417 if_true_label)
9418 register tree exp;
9419 enum rtx_code signed_code, unsigned_code;
9420 rtx if_false_label, if_true_label;
9422 register rtx op0, op1;
9423 register tree type;
9424 register enum machine_mode mode;
9425 int unsignedp;
9426 enum rtx_code code;
9428 /* Don't crash if the comparison was erroneous. */
9429 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9430 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
9431 return;
9433 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9434 type = TREE_TYPE (TREE_OPERAND (exp, 0));
9435 mode = TYPE_MODE (type);
9436 unsignedp = TREE_UNSIGNED (type);
9437 code = unsignedp ? unsigned_code : signed_code;
9439 #ifdef HAVE_canonicalize_funcptr_for_compare
9440 /* If function pointers need to be "canonicalized" before they can
9441 be reliably compared, then canonicalize them. */
9442 if (HAVE_canonicalize_funcptr_for_compare
9443 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9444 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9445 == FUNCTION_TYPE))
9447 rtx new_op0 = gen_reg_rtx (mode);
9449 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
9450 op0 = new_op0;
9453 if (HAVE_canonicalize_funcptr_for_compare
9454 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9455 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9456 == FUNCTION_TYPE))
9458 rtx new_op1 = gen_reg_rtx (mode);
9460 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
9461 op1 = new_op1;
9463 #endif
9465 /* Do any postincrements in the expression that was tested. */
9466 emit_queue ();
9468 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
9469 ((mode == BLKmode)
9470 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
9471 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT,
9472 if_false_label, if_true_label);
9475 /* Generate code to calculate EXP using a store-flag instruction
9476 and return an rtx for the result. EXP is either a comparison
9477 or a TRUTH_NOT_EXPR whose operand is a comparison.
9479 If TARGET is nonzero, store the result there if convenient.
9481 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
9482 cheap.
9484 Return zero if there is no suitable set-flag instruction
9485 available on this machine.
9487 Once expand_expr has been called on the arguments of the comparison,
9488 we are committed to doing the store flag, since it is not safe to
9489 re-evaluate the expression. We emit the store-flag insn by calling
9490 emit_store_flag, but only expand the arguments if we have a reason
9491 to believe that emit_store_flag will be successful. If we think that
9492 it will, but it isn't, we have to simulate the store-flag with a
9493 set/jump/set sequence. */
9495 static rtx
9496 do_store_flag (exp, target, mode, only_cheap)
9497 tree exp;
9498 rtx target;
9499 enum machine_mode mode;
9500 int only_cheap;
9502 enum rtx_code code;
9503 tree arg0, arg1, type;
9504 tree tem;
9505 enum machine_mode operand_mode;
9506 int invert = 0;
9507 int unsignedp;
9508 rtx op0, op1;
9509 enum insn_code icode;
9510 rtx subtarget = target;
9511 rtx result, label;
9513 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9514 result at the end. We can't simply invert the test since it would
9515 have already been inverted if it were valid. This case occurs for
9516 some floating-point comparisons. */
9518 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9519 invert = 1, exp = TREE_OPERAND (exp, 0);
9521 arg0 = TREE_OPERAND (exp, 0);
9522 arg1 = TREE_OPERAND (exp, 1);
9523 type = TREE_TYPE (arg0);
9524 operand_mode = TYPE_MODE (type);
9525 unsignedp = TREE_UNSIGNED (type);
9527 /* We won't bother with BLKmode store-flag operations because it would mean
9528 passing a lot of information to emit_store_flag. */
9529 if (operand_mode == BLKmode)
9530 return 0;
9532 /* We won't bother with store-flag operations involving function pointers
9533 when function pointers must be canonicalized before comparisons. */
9534 #ifdef HAVE_canonicalize_funcptr_for_compare
9535 if (HAVE_canonicalize_funcptr_for_compare
9536 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9537 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9538 == FUNCTION_TYPE))
9539 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9540 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9541 == FUNCTION_TYPE))))
9542 return 0;
9543 #endif
9545 STRIP_NOPS (arg0);
9546 STRIP_NOPS (arg1);
9548 /* Get the rtx comparison code to use. We know that EXP is a comparison
9549 operation of some type. Some comparisons against 1 and -1 can be
9550 converted to comparisons with zero. Do so here so that the tests
9551 below will be aware that we have a comparison with zero. These
9552 tests will not catch constants in the first operand, but constants
9553 are rarely passed as the first operand. */
9555 switch (TREE_CODE (exp))
9557 case EQ_EXPR:
9558 code = EQ;
9559 break;
9560 case NE_EXPR:
9561 code = NE;
9562 break;
9563 case LT_EXPR:
9564 if (integer_onep (arg1))
9565 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9566 else
9567 code = unsignedp ? LTU : LT;
9568 break;
9569 case LE_EXPR:
9570 if (! unsignedp && integer_all_onesp (arg1))
9571 arg1 = integer_zero_node, code = LT;
9572 else
9573 code = unsignedp ? LEU : LE;
9574 break;
9575 case GT_EXPR:
9576 if (! unsignedp && integer_all_onesp (arg1))
9577 arg1 = integer_zero_node, code = GE;
9578 else
9579 code = unsignedp ? GTU : GT;
9580 break;
9581 case GE_EXPR:
9582 if (integer_onep (arg1))
9583 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9584 else
9585 code = unsignedp ? GEU : GE;
9586 break;
9587 default:
9588 abort ();
9591 /* Put a constant second. */
9592 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9594 tem = arg0; arg0 = arg1; arg1 = tem;
9595 code = swap_condition (code);
9598 /* If this is an equality or inequality test of a single bit, we can
9599 do this by shifting the bit being tested to the low-order bit and
9600 masking the result with the constant 1. If the condition was EQ,
9601 we xor it with 1. This does not require an scc insn and is faster
9602 than an scc insn even if we have it. */
9604 if ((code == NE || code == EQ)
9605 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9606 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9608 tree inner = TREE_OPERAND (arg0, 0);
9609 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
9610 int ops_unsignedp;
9612 /* If INNER is a right shift of a constant and it plus BITNUM does
9613 not overflow, adjust BITNUM and INNER. */
9615 if (TREE_CODE (inner) == RSHIFT_EXPR
9616 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
9617 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
9618 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
9619 < TYPE_PRECISION (type)))
9621 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
9622 inner = TREE_OPERAND (inner, 0);
9625 /* If we are going to be able to omit the AND below, we must do our
9626 operations as unsigned. If we must use the AND, we have a choice.
9627 Normally unsigned is faster, but for some machines signed is. */
9628 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
9629 #ifdef LOAD_EXTEND_OP
9630 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
9631 #else
9633 #endif
9636 if (subtarget == 0 || GET_CODE (subtarget) != REG
9637 || GET_MODE (subtarget) != operand_mode
9638 || ! safe_from_p (subtarget, inner, 1))
9639 subtarget = 0;
9641 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
9643 if (bitnum != 0)
9644 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
9645 size_int (bitnum), subtarget, ops_unsignedp);
9647 if (GET_MODE (op0) != mode)
9648 op0 = convert_to_mode (mode, op0, ops_unsignedp);
9650 if ((code == EQ && ! invert) || (code == NE && invert))
9651 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
9652 ops_unsignedp, OPTAB_LIB_WIDEN);
9654 /* Put the AND last so it can combine with more things. */
9655 if (bitnum != TYPE_PRECISION (type) - 1)
9656 op0 = expand_and (op0, const1_rtx, subtarget);
9658 return op0;
9661 /* Now see if we are likely to be able to do this. Return if not. */
9662 if (! can_compare_p (operand_mode, ccp_store_flag))
9663 return 0;
9664 icode = setcc_gen_code[(int) code];
9665 if (icode == CODE_FOR_nothing
9666 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
9668 /* We can only do this if it is one of the special cases that
9669 can be handled without an scc insn. */
9670 if ((code == LT && integer_zerop (arg1))
9671 || (! only_cheap && code == GE && integer_zerop (arg1)))
9673 else if (BRANCH_COST >= 0
9674 && ! only_cheap && (code == NE || code == EQ)
9675 && TREE_CODE (type) != REAL_TYPE
9676 && ((abs_optab->handlers[(int) operand_mode].insn_code
9677 != CODE_FOR_nothing)
9678 || (ffs_optab->handlers[(int) operand_mode].insn_code
9679 != CODE_FOR_nothing)))
9681 else
9682 return 0;
9685 preexpand_calls (exp);
9686 if (subtarget == 0 || GET_CODE (subtarget) != REG
9687 || GET_MODE (subtarget) != operand_mode
9688 || ! safe_from_p (subtarget, arg1, 1))
9689 subtarget = 0;
9691 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
9692 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
9694 if (target == 0)
9695 target = gen_reg_rtx (mode);
9697 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
9698 because, if the emit_store_flag does anything it will succeed and
9699 OP0 and OP1 will not be used subsequently. */
9701 result = emit_store_flag (target, code,
9702 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
9703 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
9704 operand_mode, unsignedp, 1);
9706 if (result)
9708 if (invert)
9709 result = expand_binop (mode, xor_optab, result, const1_rtx,
9710 result, 0, OPTAB_LIB_WIDEN);
9711 return result;
9714 /* If this failed, we have to do this with set/compare/jump/set code. */
9715 if (GET_CODE (target) != REG
9716 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9717 target = gen_reg_rtx (GET_MODE (target));
9719 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9720 result = compare_from_rtx (op0, op1, code, unsignedp,
9721 operand_mode, NULL_RTX, 0);
9722 if (GET_CODE (result) == CONST_INT)
9723 return (((result == const0_rtx && ! invert)
9724 || (result != const0_rtx && invert))
9725 ? const0_rtx : const1_rtx);
9727 label = gen_label_rtx ();
9728 if (bcc_gen_fctn[(int) code] == 0)
9729 abort ();
9731 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
9732 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9733 emit_label (label);
9735 return target;
9738 /* Generate a tablejump instruction (used for switch statements). */
9740 #ifdef HAVE_tablejump
9742 /* INDEX is the value being switched on, with the lowest value
9743 in the table already subtracted.
9744 MODE is its expected mode (needed if INDEX is constant).
9745 RANGE is the length of the jump table.
9746 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9748 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9749 index value is out of range. */
9751 void
9752 do_tablejump (index, mode, range, table_label, default_label)
9753 rtx index, range, table_label, default_label;
9754 enum machine_mode mode;
9756 register rtx temp, vector;
9758 /* Do an unsigned comparison (in the proper mode) between the index
9759 expression and the value which represents the length of the range.
9760 Since we just finished subtracting the lower bound of the range
9761 from the index expression, this comparison allows us to simultaneously
9762 check that the original index expression value is both greater than
9763 or equal to the minimum value of the range and less than or equal to
9764 the maximum value of the range. */
9766 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
9767 0, default_label);
9769 /* If index is in range, it must fit in Pmode.
9770 Convert to Pmode so we can index with it. */
9771 if (mode != Pmode)
9772 index = convert_to_mode (Pmode, index, 1);
9774 /* Don't let a MEM slip thru, because then INDEX that comes
9775 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9776 and break_out_memory_refs will go to work on it and mess it up. */
9777 #ifdef PIC_CASE_VECTOR_ADDRESS
9778 if (flag_pic && GET_CODE (index) != REG)
9779 index = copy_to_mode_reg (Pmode, index);
9780 #endif
9782 /* If flag_force_addr were to affect this address
9783 it could interfere with the tricky assumptions made
9784 about addresses that contain label-refs,
9785 which may be valid only very near the tablejump itself. */
9786 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9787 GET_MODE_SIZE, because this indicates how large insns are. The other
9788 uses should all be Pmode, because they are addresses. This code
9789 could fail if addresses and insns are not the same size. */
9790 index = gen_rtx_PLUS (Pmode,
9791 gen_rtx_MULT (Pmode, index,
9792 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9793 gen_rtx_LABEL_REF (Pmode, table_label));
9794 #ifdef PIC_CASE_VECTOR_ADDRESS
9795 if (flag_pic)
9796 index = PIC_CASE_VECTOR_ADDRESS (index);
9797 else
9798 #endif
9799 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9800 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9801 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
9802 RTX_UNCHANGING_P (vector) = 1;
9803 convert_move (temp, vector, 0);
9805 emit_jump_insn (gen_tablejump (temp, table_label));
9807 /* If we are generating PIC code or if the table is PC-relative, the
9808 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9809 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9810 emit_barrier ();
9813 #endif /* HAVE_tablejump */