* cp-tree.h (cp_make_lake_type): Renamed from make_lang_type.
[official-gcc.git] / gcc / expr.c
blob79e651fc7f6b72c0406e4dac30856213894e7808
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 92-98, 1999 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "tm_p.h"
28 #include "obstack.h"
29 #include "flags.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "except.h"
33 #include "function.h"
34 #include "insn-flags.h"
35 #include "insn-codes.h"
36 #include "insn-config.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38 #include "expr.h"
39 #include "recog.h"
40 #include "output.h"
41 #include "typeclass.h"
42 #include "defaults.h"
43 #include "toplev.h"
44 #include "ggc.h"
46 #define CEIL(x,y) (((x) + (y) - 1) / (y))
48 /* Decide whether a function's arguments should be processed
49 from first to last or from last to first.
51 They should if the stack and args grow in opposite directions, but
52 only if we have push insns. */
54 #ifdef PUSH_ROUNDING
56 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
57 #define PUSH_ARGS_REVERSED /* If it's last to first */
58 #endif
60 #endif
62 #ifndef STACK_PUSH_CODE
63 #ifdef STACK_GROWS_DOWNWARD
64 #define STACK_PUSH_CODE PRE_DEC
65 #else
66 #define STACK_PUSH_CODE PRE_INC
67 #endif
68 #endif
70 /* Assume that case vectors are not pc-relative. */
71 #ifndef CASE_VECTOR_PC_RELATIVE
72 #define CASE_VECTOR_PC_RELATIVE 0
73 #endif
75 /* If this is nonzero, we do not bother generating VOLATILE
76 around volatile memory references, and we are willing to
77 output indirect addresses. If cse is to follow, we reject
78 indirect addresses so a useful potential cse is generated;
79 if it is used only once, instruction combination will produce
80 the same indirect address eventually. */
81 int cse_not_expected;
83 /* Nonzero to generate code for all the subroutines within an
84 expression before generating the upper levels of the expression.
85 Nowadays this is never zero. */
86 int do_preexpand_calls = 1;
88 /* Don't check memory usage, since code is being emitted to check a memory
89 usage. Used when current_function_check_memory_usage is true, to avoid
90 infinite recursion. */
91 static int in_check_memory_usage;
93 /* This structure is used by move_by_pieces to describe the move to
94 be performed. */
95 struct move_by_pieces
97 rtx to;
98 rtx to_addr;
99 int autinc_to;
100 int explicit_inc_to;
101 int to_struct;
102 int to_readonly;
103 rtx from;
104 rtx from_addr;
105 int autinc_from;
106 int explicit_inc_from;
107 int from_struct;
108 int from_readonly;
109 int len;
110 int offset;
111 int reverse;
114 /* This structure is used by clear_by_pieces to describe the clear to
115 be performed. */
117 struct clear_by_pieces
119 rtx to;
120 rtx to_addr;
121 int autinc_to;
122 int explicit_inc_to;
123 int to_struct;
124 int len;
125 int offset;
126 int reverse;
129 extern struct obstack permanent_obstack;
131 static rtx get_push_address PROTO ((int));
133 static rtx enqueue_insn PROTO((rtx, rtx));
134 static int move_by_pieces_ninsns PROTO((unsigned int, int));
135 static void move_by_pieces_1 PROTO((rtx (*) (rtx, ...), enum machine_mode,
136 struct move_by_pieces *));
137 static void clear_by_pieces PROTO((rtx, int, int));
138 static void clear_by_pieces_1 PROTO((rtx (*) (rtx, ...),
139 enum machine_mode,
140 struct clear_by_pieces *));
141 static int is_zeros_p PROTO((tree));
142 static int mostly_zeros_p PROTO((tree));
143 static void store_constructor_field PROTO((rtx, int, int, enum machine_mode,
144 tree, tree, int, int));
145 static void store_constructor PROTO((tree, rtx, int, int));
146 static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
147 enum machine_mode, int, int,
148 int, int));
149 static enum memory_use_mode
150 get_memory_usage_from_modifier PROTO((enum expand_modifier));
151 static tree save_noncopied_parts PROTO((tree, tree));
152 static tree init_noncopied_parts PROTO((tree, tree));
153 static int safe_from_p PROTO((rtx, tree, int));
154 static int fixed_type_p PROTO((tree));
155 static rtx var_rtx PROTO((tree));
156 static rtx expand_increment PROTO((tree, int, int));
157 static void preexpand_calls PROTO((tree));
158 static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
159 static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
160 static void do_compare_and_jump PROTO((tree, enum rtx_code, enum rtx_code, rtx, rtx));
161 static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
163 /* Record for each mode whether we can move a register directly to or
164 from an object of that mode in memory. If we can't, we won't try
165 to use that mode directly when accessing a field of that mode. */
167 static char direct_load[NUM_MACHINE_MODES];
168 static char direct_store[NUM_MACHINE_MODES];
170 /* If a memory-to-memory move would take MOVE_RATIO or more simple
171 move-instruction sequences, we will do a movstr or libcall instead. */
173 #ifndef MOVE_RATIO
174 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
175 #define MOVE_RATIO 2
176 #else
177 /* If we are optimizing for space (-Os), cut down the default move ratio */
178 #define MOVE_RATIO (optimize_size ? 3 : 15)
179 #endif
180 #endif
182 /* This macro is used to determine whether move_by_pieces should be called
183 to perform a structure copy. */
184 #ifndef MOVE_BY_PIECES_P
185 #define MOVE_BY_PIECES_P(SIZE, ALIGN) (move_by_pieces_ninsns \
186 (SIZE, ALIGN) < MOVE_RATIO)
187 #endif
189 /* This array records the insn_code of insns to perform block moves. */
190 enum insn_code movstr_optab[NUM_MACHINE_MODES];
192 /* This array records the insn_code of insns to perform block clears. */
193 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
195 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
197 #ifndef SLOW_UNALIGNED_ACCESS
198 #define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
199 #endif
201 /* This is run once per compilation to set up which modes can be used
202 directly in memory and to initialize the block move optab. */
204 void
205 init_expr_once ()
207 rtx insn, pat;
208 enum machine_mode mode;
209 int num_clobbers;
210 rtx mem, mem1;
211 char *free_point;
213 start_sequence ();
215 /* Since we are on the permanent obstack, we must be sure we save this
216 spot AFTER we call start_sequence, since it will reuse the rtl it
217 makes. */
218 free_point = (char *) oballoc (0);
220 /* Try indexing by frame ptr and try by stack ptr.
221 It is known that on the Convex the stack ptr isn't a valid index.
222 With luck, one or the other is valid on any machine. */
223 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
224 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
226 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
227 pat = PATTERN (insn);
229 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
230 mode = (enum machine_mode) ((int) mode + 1))
232 int regno;
233 rtx reg;
235 direct_load[(int) mode] = direct_store[(int) mode] = 0;
236 PUT_MODE (mem, mode);
237 PUT_MODE (mem1, mode);
239 /* See if there is some register that can be used in this mode and
240 directly loaded or stored from memory. */
242 if (mode != VOIDmode && mode != BLKmode)
243 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
244 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
245 regno++)
247 if (! HARD_REGNO_MODE_OK (regno, mode))
248 continue;
250 reg = gen_rtx_REG (mode, regno);
252 SET_SRC (pat) = mem;
253 SET_DEST (pat) = reg;
254 if (recog (pat, insn, &num_clobbers) >= 0)
255 direct_load[(int) mode] = 1;
257 SET_SRC (pat) = mem1;
258 SET_DEST (pat) = reg;
259 if (recog (pat, insn, &num_clobbers) >= 0)
260 direct_load[(int) mode] = 1;
262 SET_SRC (pat) = reg;
263 SET_DEST (pat) = mem;
264 if (recog (pat, insn, &num_clobbers) >= 0)
265 direct_store[(int) mode] = 1;
267 SET_SRC (pat) = reg;
268 SET_DEST (pat) = mem1;
269 if (recog (pat, insn, &num_clobbers) >= 0)
270 direct_store[(int) mode] = 1;
274 end_sequence ();
275 obfree (free_point);
278 /* This is run at the start of compiling a function. */
280 void
281 init_expr ()
283 current_function->expr
284 = (struct expr_status *) xmalloc (sizeof (struct expr_status));
286 pending_chain = 0;
287 pending_stack_adjust = 0;
288 inhibit_defer_pop = 0;
289 saveregs_value = 0;
290 apply_args_value = 0;
291 forced_labels = 0;
294 void
295 mark_expr_status (p)
296 struct expr_status *p;
298 if (p == NULL)
299 return;
301 ggc_mark_rtx (p->x_saveregs_value);
302 ggc_mark_rtx (p->x_apply_args_value);
303 ggc_mark_rtx (p->x_forced_labels);
306 void
307 free_expr_status (f)
308 struct function *f;
310 free (f->expr);
311 f->expr = NULL;
314 /* Small sanity check that the queue is empty at the end of a function. */
315 void
316 finish_expr_for_function ()
318 if (pending_chain)
319 abort ();
322 /* Manage the queue of increment instructions to be output
323 for POSTINCREMENT_EXPR expressions, etc. */
325 /* Queue up to increment (or change) VAR later. BODY says how:
326 BODY should be the same thing you would pass to emit_insn
327 to increment right away. It will go to emit_insn later on.
329 The value is a QUEUED expression to be used in place of VAR
330 where you want to guarantee the pre-incrementation value of VAR. */
332 static rtx
333 enqueue_insn (var, body)
334 rtx var, body;
336 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
337 body, pending_chain);
338 return pending_chain;
341 /* Use protect_from_queue to convert a QUEUED expression
342 into something that you can put immediately into an instruction.
343 If the queued incrementation has not happened yet,
344 protect_from_queue returns the variable itself.
345 If the incrementation has happened, protect_from_queue returns a temp
346 that contains a copy of the old value of the variable.
348 Any time an rtx which might possibly be a QUEUED is to be put
349 into an instruction, it must be passed through protect_from_queue first.
350 QUEUED expressions are not meaningful in instructions.
352 Do not pass a value through protect_from_queue and then hold
353 on to it for a while before putting it in an instruction!
354 If the queue is flushed in between, incorrect code will result. */
357 protect_from_queue (x, modify)
358 register rtx x;
359 int modify;
361 register RTX_CODE code = GET_CODE (x);
363 #if 0 /* A QUEUED can hang around after the queue is forced out. */
364 /* Shortcut for most common case. */
365 if (pending_chain == 0)
366 return x;
367 #endif
369 if (code != QUEUED)
371 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
372 use of autoincrement. Make a copy of the contents of the memory
373 location rather than a copy of the address, but not if the value is
374 of mode BLKmode. Don't modify X in place since it might be
375 shared. */
376 if (code == MEM && GET_MODE (x) != BLKmode
377 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
379 register rtx y = XEXP (x, 0);
380 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
382 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
383 MEM_COPY_ATTRIBUTES (new, x);
384 MEM_ALIAS_SET (new) = MEM_ALIAS_SET (x);
386 if (QUEUED_INSN (y))
388 register rtx temp = gen_reg_rtx (GET_MODE (new));
389 emit_insn_before (gen_move_insn (temp, new),
390 QUEUED_INSN (y));
391 return temp;
393 return new;
395 /* Otherwise, recursively protect the subexpressions of all
396 the kinds of rtx's that can contain a QUEUED. */
397 if (code == MEM)
399 rtx tem = protect_from_queue (XEXP (x, 0), 0);
400 if (tem != XEXP (x, 0))
402 x = copy_rtx (x);
403 XEXP (x, 0) = tem;
406 else if (code == PLUS || code == MULT)
408 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
409 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
410 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
412 x = copy_rtx (x);
413 XEXP (x, 0) = new0;
414 XEXP (x, 1) = new1;
417 return x;
419 /* If the increment has not happened, use the variable itself. */
420 if (QUEUED_INSN (x) == 0)
421 return QUEUED_VAR (x);
422 /* If the increment has happened and a pre-increment copy exists,
423 use that copy. */
424 if (QUEUED_COPY (x) != 0)
425 return QUEUED_COPY (x);
426 /* The increment has happened but we haven't set up a pre-increment copy.
427 Set one up now, and use it. */
428 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
429 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
430 QUEUED_INSN (x));
431 return QUEUED_COPY (x);
434 /* Return nonzero if X contains a QUEUED expression:
435 if it contains anything that will be altered by a queued increment.
436 We handle only combinations of MEM, PLUS, MINUS and MULT operators
437 since memory addresses generally contain only those. */
440 queued_subexp_p (x)
441 rtx x;
443 register enum rtx_code code = GET_CODE (x);
444 switch (code)
446 case QUEUED:
447 return 1;
448 case MEM:
449 return queued_subexp_p (XEXP (x, 0));
450 case MULT:
451 case PLUS:
452 case MINUS:
453 return (queued_subexp_p (XEXP (x, 0))
454 || queued_subexp_p (XEXP (x, 1)));
455 default:
456 return 0;
460 /* Perform all the pending incrementations. */
462 void
463 emit_queue ()
465 register rtx p;
466 while ((p = pending_chain))
468 rtx body = QUEUED_BODY (p);
470 if (GET_CODE (body) == SEQUENCE)
472 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
473 emit_insn (QUEUED_BODY (p));
475 else
476 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
477 pending_chain = QUEUED_NEXT (p);
481 /* Copy data from FROM to TO, where the machine modes are not the same.
482 Both modes may be integer, or both may be floating.
483 UNSIGNEDP should be nonzero if FROM is an unsigned type.
484 This causes zero-extension instead of sign-extension. */
486 void
487 convert_move (to, from, unsignedp)
488 register rtx to, from;
489 int unsignedp;
491 enum machine_mode to_mode = GET_MODE (to);
492 enum machine_mode from_mode = GET_MODE (from);
493 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
494 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
495 enum insn_code code;
496 rtx libcall;
498 /* rtx code for making an equivalent value. */
499 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
501 to = protect_from_queue (to, 1);
502 from = protect_from_queue (from, 0);
504 if (to_real != from_real)
505 abort ();
507 /* If FROM is a SUBREG that indicates that we have already done at least
508 the required extension, strip it. We don't handle such SUBREGs as
509 TO here. */
511 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
512 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
513 >= GET_MODE_SIZE (to_mode))
514 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
515 from = gen_lowpart (to_mode, from), from_mode = to_mode;
517 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
518 abort ();
520 if (to_mode == from_mode
521 || (from_mode == VOIDmode && CONSTANT_P (from)))
523 emit_move_insn (to, from);
524 return;
527 if (to_real)
529 rtx value;
531 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
533 /* Try converting directly if the insn is supported. */
534 if ((code = can_extend_p (to_mode, from_mode, 0))
535 != CODE_FOR_nothing)
537 emit_unop_insn (code, to, from, UNKNOWN);
538 return;
542 #ifdef HAVE_trunchfqf2
543 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
545 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
546 return;
548 #endif
549 #ifdef HAVE_trunctqfqf2
550 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
552 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
553 return;
555 #endif
556 #ifdef HAVE_truncsfqf2
557 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
559 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
560 return;
562 #endif
563 #ifdef HAVE_truncdfqf2
564 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
566 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
567 return;
569 #endif
570 #ifdef HAVE_truncxfqf2
571 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
573 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
574 return;
576 #endif
577 #ifdef HAVE_trunctfqf2
578 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
580 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
581 return;
583 #endif
585 #ifdef HAVE_trunctqfhf2
586 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
588 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
589 return;
591 #endif
592 #ifdef HAVE_truncsfhf2
593 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
595 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
596 return;
598 #endif
599 #ifdef HAVE_truncdfhf2
600 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
602 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
603 return;
605 #endif
606 #ifdef HAVE_truncxfhf2
607 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
609 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
610 return;
612 #endif
613 #ifdef HAVE_trunctfhf2
614 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
616 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
617 return;
619 #endif
621 #ifdef HAVE_truncsftqf2
622 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
624 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
625 return;
627 #endif
628 #ifdef HAVE_truncdftqf2
629 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
631 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
632 return;
634 #endif
635 #ifdef HAVE_truncxftqf2
636 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
638 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
639 return;
641 #endif
642 #ifdef HAVE_trunctftqf2
643 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
645 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
646 return;
648 #endif
650 #ifdef HAVE_truncdfsf2
651 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
653 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
654 return;
656 #endif
657 #ifdef HAVE_truncxfsf2
658 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
660 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
661 return;
663 #endif
664 #ifdef HAVE_trunctfsf2
665 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
667 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
668 return;
670 #endif
671 #ifdef HAVE_truncxfdf2
672 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
674 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
675 return;
677 #endif
678 #ifdef HAVE_trunctfdf2
679 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
681 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
682 return;
684 #endif
686 libcall = (rtx) 0;
687 switch (from_mode)
689 case SFmode:
690 switch (to_mode)
692 case DFmode:
693 libcall = extendsfdf2_libfunc;
694 break;
696 case XFmode:
697 libcall = extendsfxf2_libfunc;
698 break;
700 case TFmode:
701 libcall = extendsftf2_libfunc;
702 break;
704 default:
705 break;
707 break;
709 case DFmode:
710 switch (to_mode)
712 case SFmode:
713 libcall = truncdfsf2_libfunc;
714 break;
716 case XFmode:
717 libcall = extenddfxf2_libfunc;
718 break;
720 case TFmode:
721 libcall = extenddftf2_libfunc;
722 break;
724 default:
725 break;
727 break;
729 case XFmode:
730 switch (to_mode)
732 case SFmode:
733 libcall = truncxfsf2_libfunc;
734 break;
736 case DFmode:
737 libcall = truncxfdf2_libfunc;
738 break;
740 default:
741 break;
743 break;
745 case TFmode:
746 switch (to_mode)
748 case SFmode:
749 libcall = trunctfsf2_libfunc;
750 break;
752 case DFmode:
753 libcall = trunctfdf2_libfunc;
754 break;
756 default:
757 break;
759 break;
761 default:
762 break;
765 if (libcall == (rtx) 0)
766 /* This conversion is not implemented yet. */
767 abort ();
769 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
770 1, from, from_mode);
771 emit_move_insn (to, value);
772 return;
775 /* Now both modes are integers. */
777 /* Handle expanding beyond a word. */
778 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
779 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
781 rtx insns;
782 rtx lowpart;
783 rtx fill_value;
784 rtx lowfrom;
785 int i;
786 enum machine_mode lowpart_mode;
787 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
789 /* Try converting directly if the insn is supported. */
790 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
791 != CODE_FOR_nothing)
793 /* If FROM is a SUBREG, put it into a register. Do this
794 so that we always generate the same set of insns for
795 better cse'ing; if an intermediate assignment occurred,
796 we won't be doing the operation directly on the SUBREG. */
797 if (optimize > 0 && GET_CODE (from) == SUBREG)
798 from = force_reg (from_mode, from);
799 emit_unop_insn (code, to, from, equiv_code);
800 return;
802 /* Next, try converting via full word. */
803 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
804 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
805 != CODE_FOR_nothing))
807 if (GET_CODE (to) == REG)
808 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
809 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
810 emit_unop_insn (code, to,
811 gen_lowpart (word_mode, to), equiv_code);
812 return;
815 /* No special multiword conversion insn; do it by hand. */
816 start_sequence ();
818 /* Since we will turn this into a no conflict block, we must ensure
819 that the source does not overlap the target. */
821 if (reg_overlap_mentioned_p (to, from))
822 from = force_reg (from_mode, from);
824 /* Get a copy of FROM widened to a word, if necessary. */
825 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
826 lowpart_mode = word_mode;
827 else
828 lowpart_mode = from_mode;
830 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
832 lowpart = gen_lowpart (lowpart_mode, to);
833 emit_move_insn (lowpart, lowfrom);
835 /* Compute the value to put in each remaining word. */
836 if (unsignedp)
837 fill_value = const0_rtx;
838 else
840 #ifdef HAVE_slt
841 if (HAVE_slt
842 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
843 && STORE_FLAG_VALUE == -1)
845 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
846 lowpart_mode, 0, 0);
847 fill_value = gen_reg_rtx (word_mode);
848 emit_insn (gen_slt (fill_value));
850 else
851 #endif
853 fill_value
854 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
855 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
856 NULL_RTX, 0);
857 fill_value = convert_to_mode (word_mode, fill_value, 1);
861 /* Fill the remaining words. */
862 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
864 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
865 rtx subword = operand_subword (to, index, 1, to_mode);
867 if (subword == 0)
868 abort ();
870 if (fill_value != subword)
871 emit_move_insn (subword, fill_value);
874 insns = get_insns ();
875 end_sequence ();
877 emit_no_conflict_block (insns, to, from, NULL_RTX,
878 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
879 return;
882 /* Truncating multi-word to a word or less. */
883 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
884 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
886 if (!((GET_CODE (from) == MEM
887 && ! MEM_VOLATILE_P (from)
888 && direct_load[(int) to_mode]
889 && ! mode_dependent_address_p (XEXP (from, 0)))
890 || GET_CODE (from) == REG
891 || GET_CODE (from) == SUBREG))
892 from = force_reg (from_mode, from);
893 convert_move (to, gen_lowpart (word_mode, from), 0);
894 return;
897 /* Handle pointer conversion */ /* SPEE 900220 */
898 if (to_mode == PQImode)
900 if (from_mode != QImode)
901 from = convert_to_mode (QImode, from, unsignedp);
903 #ifdef HAVE_truncqipqi2
904 if (HAVE_truncqipqi2)
906 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
907 return;
909 #endif /* HAVE_truncqipqi2 */
910 abort ();
913 if (from_mode == PQImode)
915 if (to_mode != QImode)
917 from = convert_to_mode (QImode, from, unsignedp);
918 from_mode = QImode;
920 else
922 #ifdef HAVE_extendpqiqi2
923 if (HAVE_extendpqiqi2)
925 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
926 return;
928 #endif /* HAVE_extendpqiqi2 */
929 abort ();
933 if (to_mode == PSImode)
935 if (from_mode != SImode)
936 from = convert_to_mode (SImode, from, unsignedp);
938 #ifdef HAVE_truncsipsi2
939 if (HAVE_truncsipsi2)
941 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
942 return;
944 #endif /* HAVE_truncsipsi2 */
945 abort ();
948 if (from_mode == PSImode)
950 if (to_mode != SImode)
952 from = convert_to_mode (SImode, from, unsignedp);
953 from_mode = SImode;
955 else
957 #ifdef HAVE_extendpsisi2
958 if (HAVE_extendpsisi2)
960 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
961 return;
963 #endif /* HAVE_extendpsisi2 */
964 abort ();
968 if (to_mode == PDImode)
970 if (from_mode != DImode)
971 from = convert_to_mode (DImode, from, unsignedp);
973 #ifdef HAVE_truncdipdi2
974 if (HAVE_truncdipdi2)
976 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
977 return;
979 #endif /* HAVE_truncdipdi2 */
980 abort ();
983 if (from_mode == PDImode)
985 if (to_mode != DImode)
987 from = convert_to_mode (DImode, from, unsignedp);
988 from_mode = DImode;
990 else
992 #ifdef HAVE_extendpdidi2
993 if (HAVE_extendpdidi2)
995 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
996 return;
998 #endif /* HAVE_extendpdidi2 */
999 abort ();
1003 /* Now follow all the conversions between integers
1004 no more than a word long. */
1006 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1007 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1008 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1009 GET_MODE_BITSIZE (from_mode)))
1011 if (!((GET_CODE (from) == MEM
1012 && ! MEM_VOLATILE_P (from)
1013 && direct_load[(int) to_mode]
1014 && ! mode_dependent_address_p (XEXP (from, 0)))
1015 || GET_CODE (from) == REG
1016 || GET_CODE (from) == SUBREG))
1017 from = force_reg (from_mode, from);
1018 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1019 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1020 from = copy_to_reg (from);
1021 emit_move_insn (to, gen_lowpart (to_mode, from));
1022 return;
1025 /* Handle extension. */
1026 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1028 /* Convert directly if that works. */
1029 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1030 != CODE_FOR_nothing)
1032 emit_unop_insn (code, to, from, equiv_code);
1033 return;
1035 else
1037 enum machine_mode intermediate;
1038 rtx tmp;
1039 tree shift_amount;
1041 /* Search for a mode to convert via. */
1042 for (intermediate = from_mode; intermediate != VOIDmode;
1043 intermediate = GET_MODE_WIDER_MODE (intermediate))
1044 if (((can_extend_p (to_mode, intermediate, unsignedp)
1045 != CODE_FOR_nothing)
1046 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1047 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1048 GET_MODE_BITSIZE (intermediate))))
1049 && (can_extend_p (intermediate, from_mode, unsignedp)
1050 != CODE_FOR_nothing))
1052 convert_move (to, convert_to_mode (intermediate, from,
1053 unsignedp), unsignedp);
1054 return;
1057 /* No suitable intermediate mode.
1058 Generate what we need with shifts. */
1059 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1060 - GET_MODE_BITSIZE (from_mode), 0);
1061 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1062 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1063 to, unsignedp);
1064 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1065 to, unsignedp);
1066 if (tmp != to)
1067 emit_move_insn (to, tmp);
1068 return;
1072 /* Support special truncate insns for certain modes. */
1074 if (from_mode == DImode && to_mode == SImode)
1076 #ifdef HAVE_truncdisi2
1077 if (HAVE_truncdisi2)
1079 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1080 return;
1082 #endif
1083 convert_move (to, force_reg (from_mode, from), unsignedp);
1084 return;
1087 if (from_mode == DImode && to_mode == HImode)
1089 #ifdef HAVE_truncdihi2
1090 if (HAVE_truncdihi2)
1092 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1093 return;
1095 #endif
1096 convert_move (to, force_reg (from_mode, from), unsignedp);
1097 return;
1100 if (from_mode == DImode && to_mode == QImode)
1102 #ifdef HAVE_truncdiqi2
1103 if (HAVE_truncdiqi2)
1105 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1106 return;
1108 #endif
1109 convert_move (to, force_reg (from_mode, from), unsignedp);
1110 return;
1113 if (from_mode == SImode && to_mode == HImode)
1115 #ifdef HAVE_truncsihi2
1116 if (HAVE_truncsihi2)
1118 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1119 return;
1121 #endif
1122 convert_move (to, force_reg (from_mode, from), unsignedp);
1123 return;
1126 if (from_mode == SImode && to_mode == QImode)
1128 #ifdef HAVE_truncsiqi2
1129 if (HAVE_truncsiqi2)
1131 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1132 return;
1134 #endif
1135 convert_move (to, force_reg (from_mode, from), unsignedp);
1136 return;
1139 if (from_mode == HImode && to_mode == QImode)
1141 #ifdef HAVE_trunchiqi2
1142 if (HAVE_trunchiqi2)
1144 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1145 return;
1147 #endif
1148 convert_move (to, force_reg (from_mode, from), unsignedp);
1149 return;
1152 if (from_mode == TImode && to_mode == DImode)
1154 #ifdef HAVE_trunctidi2
1155 if (HAVE_trunctidi2)
1157 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1158 return;
1160 #endif
1161 convert_move (to, force_reg (from_mode, from), unsignedp);
1162 return;
1165 if (from_mode == TImode && to_mode == SImode)
1167 #ifdef HAVE_trunctisi2
1168 if (HAVE_trunctisi2)
1170 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1171 return;
1173 #endif
1174 convert_move (to, force_reg (from_mode, from), unsignedp);
1175 return;
1178 if (from_mode == TImode && to_mode == HImode)
1180 #ifdef HAVE_trunctihi2
1181 if (HAVE_trunctihi2)
1183 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1184 return;
1186 #endif
1187 convert_move (to, force_reg (from_mode, from), unsignedp);
1188 return;
1191 if (from_mode == TImode && to_mode == QImode)
1193 #ifdef HAVE_trunctiqi2
1194 if (HAVE_trunctiqi2)
1196 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1197 return;
1199 #endif
1200 convert_move (to, force_reg (from_mode, from), unsignedp);
1201 return;
1204 /* Handle truncation of volatile memrefs, and so on;
1205 the things that couldn't be truncated directly,
1206 and for which there was no special instruction. */
1207 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1209 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1210 emit_move_insn (to, temp);
1211 return;
1214 /* Mode combination is not recognized. */
1215 abort ();
1218 /* Return an rtx for a value that would result
1219 from converting X to mode MODE.
1220 Both X and MODE may be floating, or both integer.
1221 UNSIGNEDP is nonzero if X is an unsigned value.
1222 This can be done by referring to a part of X in place
1223 or by copying to a new temporary with conversion.
1225 This function *must not* call protect_from_queue
1226 except when putting X into an insn (in which case convert_move does it). */
1229 convert_to_mode (mode, x, unsignedp)
1230 enum machine_mode mode;
1231 rtx x;
1232 int unsignedp;
1234 return convert_modes (mode, VOIDmode, x, unsignedp);
1237 /* Return an rtx for a value that would result
1238 from converting X from mode OLDMODE to mode MODE.
1239 Both modes may be floating, or both integer.
1240 UNSIGNEDP is nonzero if X is an unsigned value.
1242 This can be done by referring to a part of X in place
1243 or by copying to a new temporary with conversion.
1245 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1247 This function *must not* call protect_from_queue
1248 except when putting X into an insn (in which case convert_move does it). */
1251 convert_modes (mode, oldmode, x, unsignedp)
1252 enum machine_mode mode, oldmode;
1253 rtx x;
1254 int unsignedp;
1256 register rtx temp;
1258 /* If FROM is a SUBREG that indicates that we have already done at least
1259 the required extension, strip it. */
1261 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1262 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1263 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1264 x = gen_lowpart (mode, x);
1266 if (GET_MODE (x) != VOIDmode)
1267 oldmode = GET_MODE (x);
1269 if (mode == oldmode)
1270 return x;
1272 /* There is one case that we must handle specially: If we are converting
1273 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1274 we are to interpret the constant as unsigned, gen_lowpart will do
1275 the wrong if the constant appears negative. What we want to do is
1276 make the high-order word of the constant zero, not all ones. */
1278 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1279 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1280 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1282 HOST_WIDE_INT val = INTVAL (x);
1284 if (oldmode != VOIDmode
1285 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1287 int width = GET_MODE_BITSIZE (oldmode);
1289 /* We need to zero extend VAL. */
1290 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1293 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1296 /* We can do this with a gen_lowpart if both desired and current modes
1297 are integer, and this is either a constant integer, a register, or a
1298 non-volatile MEM. Except for the constant case where MODE is no
1299 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1301 if ((GET_CODE (x) == CONST_INT
1302 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1303 || (GET_MODE_CLASS (mode) == MODE_INT
1304 && GET_MODE_CLASS (oldmode) == MODE_INT
1305 && (GET_CODE (x) == CONST_DOUBLE
1306 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1307 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1308 && direct_load[(int) mode])
1309 || (GET_CODE (x) == REG
1310 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1311 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1313 /* ?? If we don't know OLDMODE, we have to assume here that
1314 X does not need sign- or zero-extension. This may not be
1315 the case, but it's the best we can do. */
1316 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1317 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1319 HOST_WIDE_INT val = INTVAL (x);
1320 int width = GET_MODE_BITSIZE (oldmode);
1322 /* We must sign or zero-extend in this case. Start by
1323 zero-extending, then sign extend if we need to. */
1324 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1325 if (! unsignedp
1326 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1327 val |= (HOST_WIDE_INT) (-1) << width;
1329 return GEN_INT (val);
1332 return gen_lowpart (mode, x);
1335 temp = gen_reg_rtx (mode);
1336 convert_move (temp, x, unsignedp);
1337 return temp;
1341 /* This macro is used to determine what the largest unit size that
1342 move_by_pieces can use is. */
1344 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1345 move efficiently, as opposed to MOVE_MAX which is the maximum
1346 number of bhytes we can move with a single instruction. */
1348 #ifndef MOVE_MAX_PIECES
1349 #define MOVE_MAX_PIECES MOVE_MAX
1350 #endif
1352 /* Generate several move instructions to copy LEN bytes
1353 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1354 The caller must pass FROM and TO
1355 through protect_from_queue before calling.
1356 ALIGN (in bytes) is maximum alignment we can assume. */
1358 void
1359 move_by_pieces (to, from, len, align)
1360 rtx to, from;
1361 int len, align;
1363 struct move_by_pieces data;
1364 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1365 int max_size = MOVE_MAX_PIECES + 1;
1366 enum machine_mode mode = VOIDmode, tmode;
1367 enum insn_code icode;
1369 data.offset = 0;
1370 data.to_addr = to_addr;
1371 data.from_addr = from_addr;
1372 data.to = to;
1373 data.from = from;
1374 data.autinc_to
1375 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1376 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1377 data.autinc_from
1378 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1379 || GET_CODE (from_addr) == POST_INC
1380 || GET_CODE (from_addr) == POST_DEC);
1382 data.explicit_inc_from = 0;
1383 data.explicit_inc_to = 0;
1384 data.reverse
1385 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1386 if (data.reverse) data.offset = len;
1387 data.len = len;
1389 data.to_struct = MEM_IN_STRUCT_P (to);
1390 data.from_struct = MEM_IN_STRUCT_P (from);
1391 data.to_readonly = RTX_UNCHANGING_P (to);
1392 data.from_readonly = RTX_UNCHANGING_P (from);
1394 /* If copying requires more than two move insns,
1395 copy addresses to registers (to make displacements shorter)
1396 and use post-increment if available. */
1397 if (!(data.autinc_from && data.autinc_to)
1398 && move_by_pieces_ninsns (len, align) > 2)
1400 /* Find the mode of the largest move... */
1401 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1402 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1403 if (GET_MODE_SIZE (tmode) < max_size)
1404 mode = tmode;
1406 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1408 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1409 data.autinc_from = 1;
1410 data.explicit_inc_from = -1;
1412 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1414 data.from_addr = copy_addr_to_reg (from_addr);
1415 data.autinc_from = 1;
1416 data.explicit_inc_from = 1;
1418 if (!data.autinc_from && CONSTANT_P (from_addr))
1419 data.from_addr = copy_addr_to_reg (from_addr);
1420 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1422 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1423 data.autinc_to = 1;
1424 data.explicit_inc_to = -1;
1426 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1428 data.to_addr = copy_addr_to_reg (to_addr);
1429 data.autinc_to = 1;
1430 data.explicit_inc_to = 1;
1432 if (!data.autinc_to && CONSTANT_P (to_addr))
1433 data.to_addr = copy_addr_to_reg (to_addr);
1436 if (! SLOW_UNALIGNED_ACCESS
1437 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1438 align = MOVE_MAX;
1440 /* First move what we can in the largest integer mode, then go to
1441 successively smaller modes. */
1443 while (max_size > 1)
1445 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1446 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1447 if (GET_MODE_SIZE (tmode) < max_size)
1448 mode = tmode;
1450 if (mode == VOIDmode)
1451 break;
1453 icode = mov_optab->handlers[(int) mode].insn_code;
1454 if (icode != CODE_FOR_nothing
1455 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1456 GET_MODE_SIZE (mode)))
1457 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1459 max_size = GET_MODE_SIZE (mode);
1462 /* The code above should have handled everything. */
1463 if (data.len > 0)
1464 abort ();
1467 /* Return number of insns required to move L bytes by pieces.
1468 ALIGN (in bytes) is maximum alignment we can assume. */
1470 static int
1471 move_by_pieces_ninsns (l, align)
1472 unsigned int l;
1473 int align;
1475 register int n_insns = 0;
1476 int max_size = MOVE_MAX + 1;
1478 if (! SLOW_UNALIGNED_ACCESS
1479 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1480 align = MOVE_MAX;
1482 while (max_size > 1)
1484 enum machine_mode mode = VOIDmode, tmode;
1485 enum insn_code icode;
1487 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1488 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1489 if (GET_MODE_SIZE (tmode) < max_size)
1490 mode = tmode;
1492 if (mode == VOIDmode)
1493 break;
1495 icode = mov_optab->handlers[(int) mode].insn_code;
1496 if (icode != CODE_FOR_nothing
1497 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1498 GET_MODE_SIZE (mode)))
1499 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1501 max_size = GET_MODE_SIZE (mode);
1504 return n_insns;
1507 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1508 with move instructions for mode MODE. GENFUN is the gen_... function
1509 to make a move insn for that mode. DATA has all the other info. */
1511 static void
1512 move_by_pieces_1 (genfun, mode, data)
1513 rtx (*genfun) PROTO ((rtx, ...));
1514 enum machine_mode mode;
1515 struct move_by_pieces *data;
1517 register int size = GET_MODE_SIZE (mode);
1518 register rtx to1, from1;
1520 while (data->len >= size)
1522 if (data->reverse) data->offset -= size;
1524 to1 = (data->autinc_to
1525 ? gen_rtx_MEM (mode, data->to_addr)
1526 : copy_rtx (change_address (data->to, mode,
1527 plus_constant (data->to_addr,
1528 data->offset))));
1529 MEM_IN_STRUCT_P (to1) = data->to_struct;
1530 RTX_UNCHANGING_P (to1) = data->to_readonly;
1532 from1
1533 = (data->autinc_from
1534 ? gen_rtx_MEM (mode, data->from_addr)
1535 : copy_rtx (change_address (data->from, mode,
1536 plus_constant (data->from_addr,
1537 data->offset))));
1538 MEM_IN_STRUCT_P (from1) = data->from_struct;
1539 RTX_UNCHANGING_P (from1) = data->from_readonly;
1541 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1542 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1543 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1544 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1546 emit_insn ((*genfun) (to1, from1));
1547 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1548 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1549 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1550 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1552 if (! data->reverse) data->offset += size;
1554 data->len -= size;
1558 /* Emit code to move a block Y to a block X.
1559 This may be done with string-move instructions,
1560 with multiple scalar move instructions, or with a library call.
1562 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1563 with mode BLKmode.
1564 SIZE is an rtx that says how long they are.
1565 ALIGN is the maximum alignment we can assume they have,
1566 measured in bytes.
1568 Return the address of the new block, if memcpy is called and returns it,
1569 0 otherwise. */
1572 emit_block_move (x, y, size, align)
1573 rtx x, y;
1574 rtx size;
1575 int align;
1577 rtx retval = 0;
1578 #ifdef TARGET_MEM_FUNCTIONS
1579 static tree fn;
1580 tree call_expr, arg_list;
1581 #endif
1583 if (GET_MODE (x) != BLKmode)
1584 abort ();
1586 if (GET_MODE (y) != BLKmode)
1587 abort ();
1589 x = protect_from_queue (x, 1);
1590 y = protect_from_queue (y, 0);
1591 size = protect_from_queue (size, 0);
1593 if (GET_CODE (x) != MEM)
1594 abort ();
1595 if (GET_CODE (y) != MEM)
1596 abort ();
1597 if (size == 0)
1598 abort ();
1600 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1601 move_by_pieces (x, y, INTVAL (size), align);
1602 else
1604 /* Try the most limited insn first, because there's no point
1605 including more than one in the machine description unless
1606 the more limited one has some advantage. */
1608 rtx opalign = GEN_INT (align);
1609 enum machine_mode mode;
1611 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1612 mode = GET_MODE_WIDER_MODE (mode))
1614 enum insn_code code = movstr_optab[(int) mode];
1615 insn_operand_predicate_fn pred;
1617 if (code != CODE_FOR_nothing
1618 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1619 here because if SIZE is less than the mode mask, as it is
1620 returned by the macro, it will definitely be less than the
1621 actual mode mask. */
1622 && ((GET_CODE (size) == CONST_INT
1623 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1624 <= (GET_MODE_MASK (mode) >> 1)))
1625 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1626 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1627 || (*pred) (x, BLKmode))
1628 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1629 || (*pred) (y, BLKmode))
1630 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1631 || (*pred) (opalign, VOIDmode)))
1633 rtx op2;
1634 rtx last = get_last_insn ();
1635 rtx pat;
1637 op2 = convert_to_mode (mode, size, 1);
1638 pred = insn_data[(int) code].operand[2].predicate;
1639 if (pred != 0 && ! (*pred) (op2, mode))
1640 op2 = copy_to_mode_reg (mode, op2);
1642 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1643 if (pat)
1645 emit_insn (pat);
1646 return 0;
1648 else
1649 delete_insns_since (last);
1653 /* X, Y, or SIZE may have been passed through protect_from_queue.
1655 It is unsafe to save the value generated by protect_from_queue
1656 and reuse it later. Consider what happens if emit_queue is
1657 called before the return value from protect_from_queue is used.
1659 Expansion of the CALL_EXPR below will call emit_queue before
1660 we are finished emitting RTL for argument setup. So if we are
1661 not careful we could get the wrong value for an argument.
1663 To avoid this problem we go ahead and emit code to copy X, Y &
1664 SIZE into new pseudos. We can then place those new pseudos
1665 into an RTL_EXPR and use them later, even after a call to
1666 emit_queue.
1668 Note this is not strictly needed for library calls since they
1669 do not call emit_queue before loading their arguments. However,
1670 we may need to have library calls call emit_queue in the future
1671 since failing to do so could cause problems for targets which
1672 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1673 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1674 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1676 #ifdef TARGET_MEM_FUNCTIONS
1677 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1678 #else
1679 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1680 TREE_UNSIGNED (integer_type_node));
1681 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1682 #endif
1684 #ifdef TARGET_MEM_FUNCTIONS
1685 /* It is incorrect to use the libcall calling conventions to call
1686 memcpy in this context.
1688 This could be a user call to memcpy and the user may wish to
1689 examine the return value from memcpy.
1691 For targets where libcalls and normal calls have different conventions
1692 for returning pointers, we could end up generating incorrect code.
1694 So instead of using a libcall sequence we build up a suitable
1695 CALL_EXPR and expand the call in the normal fashion. */
1696 if (fn == NULL_TREE)
1698 tree fntype;
1700 /* This was copied from except.c, I don't know if all this is
1701 necessary in this context or not. */
1702 fn = get_identifier ("memcpy");
1703 push_obstacks_nochange ();
1704 end_temporary_allocation ();
1705 fntype = build_pointer_type (void_type_node);
1706 fntype = build_function_type (fntype, NULL_TREE);
1707 fn = build_decl (FUNCTION_DECL, fn, fntype);
1708 ggc_add_tree_root (&fn, 1);
1709 DECL_EXTERNAL (fn) = 1;
1710 TREE_PUBLIC (fn) = 1;
1711 DECL_ARTIFICIAL (fn) = 1;
1712 make_decl_rtl (fn, NULL_PTR, 1);
1713 assemble_external (fn);
1714 pop_obstacks ();
1717 /* We need to make an argument list for the function call.
1719 memcpy has three arguments, the first two are void * addresses and
1720 the last is a size_t byte count for the copy. */
1721 arg_list
1722 = build_tree_list (NULL_TREE,
1723 make_tree (build_pointer_type (void_type_node), x));
1724 TREE_CHAIN (arg_list)
1725 = build_tree_list (NULL_TREE,
1726 make_tree (build_pointer_type (void_type_node), y));
1727 TREE_CHAIN (TREE_CHAIN (arg_list))
1728 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1729 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1731 /* Now we have to build up the CALL_EXPR itself. */
1732 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1733 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1734 call_expr, arg_list, NULL_TREE);
1735 TREE_SIDE_EFFECTS (call_expr) = 1;
1737 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1738 #else
1739 emit_library_call (bcopy_libfunc, 0,
1740 VOIDmode, 3, y, Pmode, x, Pmode,
1741 convert_to_mode (TYPE_MODE (integer_type_node), size,
1742 TREE_UNSIGNED (integer_type_node)),
1743 TYPE_MODE (integer_type_node));
1744 #endif
1747 return retval;
1750 /* Copy all or part of a value X into registers starting at REGNO.
1751 The number of registers to be filled is NREGS. */
1753 void
1754 move_block_to_reg (regno, x, nregs, mode)
1755 int regno;
1756 rtx x;
1757 int nregs;
1758 enum machine_mode mode;
1760 int i;
1761 #ifdef HAVE_load_multiple
1762 rtx pat;
1763 rtx last;
1764 #endif
1766 if (nregs == 0)
1767 return;
1769 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1770 x = validize_mem (force_const_mem (mode, x));
1772 /* See if the machine can do this with a load multiple insn. */
1773 #ifdef HAVE_load_multiple
1774 if (HAVE_load_multiple)
1776 last = get_last_insn ();
1777 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1778 GEN_INT (nregs));
1779 if (pat)
1781 emit_insn (pat);
1782 return;
1784 else
1785 delete_insns_since (last);
1787 #endif
1789 for (i = 0; i < nregs; i++)
1790 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1791 operand_subword_force (x, i, mode));
1794 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1795 The number of registers to be filled is NREGS. SIZE indicates the number
1796 of bytes in the object X. */
1799 void
1800 move_block_from_reg (regno, x, nregs, size)
1801 int regno;
1802 rtx x;
1803 int nregs;
1804 int size;
1806 int i;
1807 #ifdef HAVE_store_multiple
1808 rtx pat;
1809 rtx last;
1810 #endif
1811 enum machine_mode mode;
1813 /* If SIZE is that of a mode no bigger than a word, just use that
1814 mode's store operation. */
1815 if (size <= UNITS_PER_WORD
1816 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1818 emit_move_insn (change_address (x, mode, NULL),
1819 gen_rtx_REG (mode, regno));
1820 return;
1823 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1824 to the left before storing to memory. Note that the previous test
1825 doesn't handle all cases (e.g. SIZE == 3). */
1826 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1828 rtx tem = operand_subword (x, 0, 1, BLKmode);
1829 rtx shift;
1831 if (tem == 0)
1832 abort ();
1834 shift = expand_shift (LSHIFT_EXPR, word_mode,
1835 gen_rtx_REG (word_mode, regno),
1836 build_int_2 ((UNITS_PER_WORD - size)
1837 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1838 emit_move_insn (tem, shift);
1839 return;
1842 /* See if the machine can do this with a store multiple insn. */
1843 #ifdef HAVE_store_multiple
1844 if (HAVE_store_multiple)
1846 last = get_last_insn ();
1847 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1848 GEN_INT (nregs));
1849 if (pat)
1851 emit_insn (pat);
1852 return;
1854 else
1855 delete_insns_since (last);
1857 #endif
1859 for (i = 0; i < nregs; i++)
1861 rtx tem = operand_subword (x, i, 1, BLKmode);
1863 if (tem == 0)
1864 abort ();
1866 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1870 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1871 registers represented by a PARALLEL. SSIZE represents the total size of
1872 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1873 SRC in bits. */
1874 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1875 the balance will be in what would be the low-order memory addresses, i.e.
1876 left justified for big endian, right justified for little endian. This
1877 happens to be true for the targets currently using this support. If this
1878 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1879 would be needed. */
1881 void
1882 emit_group_load (dst, orig_src, ssize, align)
1883 rtx dst, orig_src;
1884 int align, ssize;
1886 rtx *tmps, src;
1887 int start, i;
1889 if (GET_CODE (dst) != PARALLEL)
1890 abort ();
1892 /* Check for a NULL entry, used to indicate that the parameter goes
1893 both on the stack and in registers. */
1894 if (XEXP (XVECEXP (dst, 0, 0), 0))
1895 start = 0;
1896 else
1897 start = 1;
1899 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (dst, 0));
1901 /* If we won't be loading directly from memory, protect the real source
1902 from strange tricks we might play. */
1903 src = orig_src;
1904 if (GET_CODE (src) != MEM)
1906 src = gen_reg_rtx (GET_MODE (orig_src));
1907 emit_move_insn (src, orig_src);
1910 /* Process the pieces. */
1911 for (i = start; i < XVECLEN (dst, 0); i++)
1913 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1914 int bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1915 int bytelen = GET_MODE_SIZE (mode);
1916 int shift = 0;
1918 /* Handle trailing fragments that run over the size of the struct. */
1919 if (ssize >= 0 && bytepos + bytelen > ssize)
1921 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1922 bytelen = ssize - bytepos;
1923 if (bytelen <= 0)
1924 abort();
1927 /* Optimize the access just a bit. */
1928 if (GET_CODE (src) == MEM
1929 && align*BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
1930 && bytepos*BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1931 && bytelen == GET_MODE_SIZE (mode))
1933 tmps[i] = gen_reg_rtx (mode);
1934 emit_move_insn (tmps[i],
1935 change_address (src, mode,
1936 plus_constant (XEXP (src, 0),
1937 bytepos)));
1939 else if (GET_CODE (src) == CONCAT)
1941 if (bytepos == 0
1942 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
1943 tmps[i] = XEXP (src, 0);
1944 else if (bytepos == GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
1945 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
1946 tmps[i] = XEXP (src, 1);
1947 else
1948 abort ();
1950 else
1952 tmps[i] = extract_bit_field (src, bytelen*BITS_PER_UNIT,
1953 bytepos*BITS_PER_UNIT, 1, NULL_RTX,
1954 mode, mode, align, ssize);
1957 if (BYTES_BIG_ENDIAN && shift)
1959 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
1960 tmps[i], 0, OPTAB_WIDEN);
1963 emit_queue();
1965 /* Copy the extracted pieces into the proper (probable) hard regs. */
1966 for (i = start; i < XVECLEN (dst, 0); i++)
1967 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
1970 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
1971 registers represented by a PARALLEL. SSIZE represents the total size of
1972 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
1974 void
1975 emit_group_store (orig_dst, src, ssize, align)
1976 rtx orig_dst, src;
1977 int ssize, align;
1979 rtx *tmps, dst;
1980 int start, i;
1982 if (GET_CODE (src) != PARALLEL)
1983 abort ();
1985 /* Check for a NULL entry, used to indicate that the parameter goes
1986 both on the stack and in registers. */
1987 if (XEXP (XVECEXP (src, 0, 0), 0))
1988 start = 0;
1989 else
1990 start = 1;
1992 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (src, 0));
1994 /* Copy the (probable) hard regs into pseudos. */
1995 for (i = start; i < XVECLEN (src, 0); i++)
1997 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1998 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1999 emit_move_insn (tmps[i], reg);
2001 emit_queue();
2003 /* If we won't be storing directly into memory, protect the real destination
2004 from strange tricks we might play. */
2005 dst = orig_dst;
2006 if (GET_CODE (dst) == PARALLEL)
2008 rtx temp;
2010 /* We can get a PARALLEL dst if there is a conditional expression in
2011 a return statement. In that case, the dst and src are the same,
2012 so no action is necessary. */
2013 if (rtx_equal_p (dst, src))
2014 return;
2016 /* It is unclear if we can ever reach here, but we may as well handle
2017 it. Allocate a temporary, and split this into a store/load to/from
2018 the temporary. */
2020 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2021 emit_group_store (temp, src, ssize, align);
2022 emit_group_load (dst, temp, ssize, align);
2023 return;
2025 else if (GET_CODE (dst) != MEM)
2027 dst = gen_reg_rtx (GET_MODE (orig_dst));
2028 /* Make life a bit easier for combine. */
2029 emit_move_insn (dst, const0_rtx);
2031 else if (! MEM_IN_STRUCT_P (dst))
2033 /* store_bit_field requires that memory operations have
2034 mem_in_struct_p set; we might not. */
2036 dst = copy_rtx (orig_dst);
2037 MEM_SET_IN_STRUCT_P (dst, 1);
2040 /* Process the pieces. */
2041 for (i = start; i < XVECLEN (src, 0); i++)
2043 int bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2044 enum machine_mode mode = GET_MODE (tmps[i]);
2045 int bytelen = GET_MODE_SIZE (mode);
2047 /* Handle trailing fragments that run over the size of the struct. */
2048 if (ssize >= 0 && bytepos + bytelen > ssize)
2050 if (BYTES_BIG_ENDIAN)
2052 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2053 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2054 tmps[i], 0, OPTAB_WIDEN);
2056 bytelen = ssize - bytepos;
2059 /* Optimize the access just a bit. */
2060 if (GET_CODE (dst) == MEM
2061 && align*BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
2062 && bytepos*BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2063 && bytelen == GET_MODE_SIZE (mode))
2065 emit_move_insn (change_address (dst, mode,
2066 plus_constant (XEXP (dst, 0),
2067 bytepos)),
2068 tmps[i]);
2070 else
2072 store_bit_field (dst, bytelen*BITS_PER_UNIT, bytepos*BITS_PER_UNIT,
2073 mode, tmps[i], align, ssize);
2076 emit_queue();
2078 /* Copy from the pseudo into the (probable) hard reg. */
2079 if (GET_CODE (dst) == REG)
2080 emit_move_insn (orig_dst, dst);
2083 /* Generate code to copy a BLKmode object of TYPE out of a
2084 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2085 is null, a stack temporary is created. TGTBLK is returned.
2087 The primary purpose of this routine is to handle functions
2088 that return BLKmode structures in registers. Some machines
2089 (the PA for example) want to return all small structures
2090 in registers regardless of the structure's alignment.
2094 copy_blkmode_from_reg(tgtblk,srcreg,type)
2095 rtx tgtblk;
2096 rtx srcreg;
2097 tree type;
2099 int bytes = int_size_in_bytes (type);
2100 rtx src = NULL, dst = NULL;
2101 int bitsize = MIN (TYPE_ALIGN (type), (unsigned int) BITS_PER_WORD);
2102 int bitpos, xbitpos, big_endian_correction = 0;
2104 if (tgtblk == 0)
2106 tgtblk = assign_stack_temp (BLKmode, bytes, 0);
2107 MEM_SET_IN_STRUCT_P (tgtblk, AGGREGATE_TYPE_P (type));
2108 preserve_temp_slots (tgtblk);
2111 /* This code assumes srcreg is at least a full word. If it isn't,
2112 copy it into a new pseudo which is a full word. */
2113 if (GET_MODE (srcreg) != BLKmode
2114 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2115 srcreg = convert_to_mode (word_mode, srcreg,
2116 TREE_UNSIGNED (type));
2118 /* Structures whose size is not a multiple of a word are aligned
2119 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2120 machine, this means we must skip the empty high order bytes when
2121 calculating the bit offset. */
2122 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2123 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2124 * BITS_PER_UNIT));
2126 /* Copy the structure BITSIZE bites at a time.
2128 We could probably emit more efficient code for machines
2129 which do not use strict alignment, but it doesn't seem
2130 worth the effort at the current time. */
2131 for (bitpos = 0, xbitpos = big_endian_correction;
2132 bitpos < bytes * BITS_PER_UNIT;
2133 bitpos += bitsize, xbitpos += bitsize)
2136 /* We need a new source operand each time xbitpos is on a
2137 word boundary and when xbitpos == big_endian_correction
2138 (the first time through). */
2139 if (xbitpos % BITS_PER_WORD == 0
2140 || xbitpos == big_endian_correction)
2141 src = operand_subword_force (srcreg,
2142 xbitpos / BITS_PER_WORD,
2143 BLKmode);
2145 /* We need a new destination operand each time bitpos is on
2146 a word boundary. */
2147 if (bitpos % BITS_PER_WORD == 0)
2148 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2150 /* Use xbitpos for the source extraction (right justified) and
2151 xbitpos for the destination store (left justified). */
2152 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2153 extract_bit_field (src, bitsize,
2154 xbitpos % BITS_PER_WORD, 1,
2155 NULL_RTX, word_mode,
2156 word_mode,
2157 bitsize / BITS_PER_UNIT,
2158 BITS_PER_WORD),
2159 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
2161 return tgtblk;
2165 /* Add a USE expression for REG to the (possibly empty) list pointed
2166 to by CALL_FUSAGE. REG must denote a hard register. */
2168 void
2169 use_reg (call_fusage, reg)
2170 rtx *call_fusage, reg;
2172 if (GET_CODE (reg) != REG
2173 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2174 abort();
2176 *call_fusage
2177 = gen_rtx_EXPR_LIST (VOIDmode,
2178 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2181 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2182 starting at REGNO. All of these registers must be hard registers. */
2184 void
2185 use_regs (call_fusage, regno, nregs)
2186 rtx *call_fusage;
2187 int regno;
2188 int nregs;
2190 int i;
2192 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2193 abort ();
2195 for (i = 0; i < nregs; i++)
2196 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2199 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2200 PARALLEL REGS. This is for calls that pass values in multiple
2201 non-contiguous locations. The Irix 6 ABI has examples of this. */
2203 void
2204 use_group_regs (call_fusage, regs)
2205 rtx *call_fusage;
2206 rtx regs;
2208 int i;
2210 for (i = 0; i < XVECLEN (regs, 0); i++)
2212 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2214 /* A NULL entry means the parameter goes both on the stack and in
2215 registers. This can also be a MEM for targets that pass values
2216 partially on the stack and partially in registers. */
2217 if (reg != 0 && GET_CODE (reg) == REG)
2218 use_reg (call_fusage, reg);
2222 /* Generate several move instructions to clear LEN bytes of block TO.
2223 (A MEM rtx with BLKmode). The caller must pass TO through
2224 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
2225 we can assume. */
2227 static void
2228 clear_by_pieces (to, len, align)
2229 rtx to;
2230 int len, align;
2232 struct clear_by_pieces data;
2233 rtx to_addr = XEXP (to, 0);
2234 int max_size = MOVE_MAX_PIECES + 1;
2235 enum machine_mode mode = VOIDmode, tmode;
2236 enum insn_code icode;
2238 data.offset = 0;
2239 data.to_addr = to_addr;
2240 data.to = to;
2241 data.autinc_to
2242 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2243 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2245 data.explicit_inc_to = 0;
2246 data.reverse
2247 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2248 if (data.reverse) data.offset = len;
2249 data.len = len;
2251 data.to_struct = MEM_IN_STRUCT_P (to);
2253 /* If copying requires more than two move insns,
2254 copy addresses to registers (to make displacements shorter)
2255 and use post-increment if available. */
2256 if (!data.autinc_to
2257 && move_by_pieces_ninsns (len, align) > 2)
2259 /* Determine the main mode we'll be using */
2260 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2261 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2262 if (GET_MODE_SIZE (tmode) < max_size)
2263 mode = tmode;
2265 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
2267 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
2268 data.autinc_to = 1;
2269 data.explicit_inc_to = -1;
2271 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
2273 data.to_addr = copy_addr_to_reg (to_addr);
2274 data.autinc_to = 1;
2275 data.explicit_inc_to = 1;
2277 if (!data.autinc_to && CONSTANT_P (to_addr))
2278 data.to_addr = copy_addr_to_reg (to_addr);
2281 if (! SLOW_UNALIGNED_ACCESS
2282 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
2283 align = MOVE_MAX;
2285 /* First move what we can in the largest integer mode, then go to
2286 successively smaller modes. */
2288 while (max_size > 1)
2290 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2291 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2292 if (GET_MODE_SIZE (tmode) < max_size)
2293 mode = tmode;
2295 if (mode == VOIDmode)
2296 break;
2298 icode = mov_optab->handlers[(int) mode].insn_code;
2299 if (icode != CODE_FOR_nothing
2300 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
2301 GET_MODE_SIZE (mode)))
2302 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2304 max_size = GET_MODE_SIZE (mode);
2307 /* The code above should have handled everything. */
2308 if (data.len != 0)
2309 abort ();
2312 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2313 with move instructions for mode MODE. GENFUN is the gen_... function
2314 to make a move insn for that mode. DATA has all the other info. */
2316 static void
2317 clear_by_pieces_1 (genfun, mode, data)
2318 rtx (*genfun) PROTO ((rtx, ...));
2319 enum machine_mode mode;
2320 struct clear_by_pieces *data;
2322 register int size = GET_MODE_SIZE (mode);
2323 register rtx to1;
2325 while (data->len >= size)
2327 if (data->reverse) data->offset -= size;
2329 to1 = (data->autinc_to
2330 ? gen_rtx_MEM (mode, data->to_addr)
2331 : copy_rtx (change_address (data->to, mode,
2332 plus_constant (data->to_addr,
2333 data->offset))));
2334 MEM_IN_STRUCT_P (to1) = data->to_struct;
2336 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2337 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2339 emit_insn ((*genfun) (to1, const0_rtx));
2340 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2341 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2343 if (! data->reverse) data->offset += size;
2345 data->len -= size;
2349 /* Write zeros through the storage of OBJECT.
2350 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
2351 the maximum alignment we can is has, measured in bytes.
2353 If we call a function that returns the length of the block, return it. */
2356 clear_storage (object, size, align)
2357 rtx object;
2358 rtx size;
2359 int align;
2361 #ifdef TARGET_MEM_FUNCTIONS
2362 static tree fn;
2363 tree call_expr, arg_list;
2364 #endif
2365 rtx retval = 0;
2367 if (GET_MODE (object) == BLKmode)
2369 object = protect_from_queue (object, 1);
2370 size = protect_from_queue (size, 0);
2372 if (GET_CODE (size) == CONST_INT
2373 && MOVE_BY_PIECES_P (INTVAL (size), align))
2374 clear_by_pieces (object, INTVAL (size), align);
2376 else
2378 /* Try the most limited insn first, because there's no point
2379 including more than one in the machine description unless
2380 the more limited one has some advantage. */
2382 rtx opalign = GEN_INT (align);
2383 enum machine_mode mode;
2385 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2386 mode = GET_MODE_WIDER_MODE (mode))
2388 enum insn_code code = clrstr_optab[(int) mode];
2389 insn_operand_predicate_fn pred;
2391 if (code != CODE_FOR_nothing
2392 /* We don't need MODE to be narrower than
2393 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2394 the mode mask, as it is returned by the macro, it will
2395 definitely be less than the actual mode mask. */
2396 && ((GET_CODE (size) == CONST_INT
2397 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2398 <= (GET_MODE_MASK (mode) >> 1)))
2399 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2400 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2401 || (*pred) (object, BLKmode))
2402 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2403 || (*pred) (opalign, VOIDmode)))
2405 rtx op1;
2406 rtx last = get_last_insn ();
2407 rtx pat;
2409 op1 = convert_to_mode (mode, size, 1);
2410 pred = insn_data[(int) code].operand[1].predicate;
2411 if (pred != 0 && ! (*pred) (op1, mode))
2412 op1 = copy_to_mode_reg (mode, op1);
2414 pat = GEN_FCN ((int) code) (object, op1, opalign);
2415 if (pat)
2417 emit_insn (pat);
2418 return 0;
2420 else
2421 delete_insns_since (last);
2425 /* OBJECT or SIZE may have been passed through protect_from_queue.
2427 It is unsafe to save the value generated by protect_from_queue
2428 and reuse it later. Consider what happens if emit_queue is
2429 called before the return value from protect_from_queue is used.
2431 Expansion of the CALL_EXPR below will call emit_queue before
2432 we are finished emitting RTL for argument setup. So if we are
2433 not careful we could get the wrong value for an argument.
2435 To avoid this problem we go ahead and emit code to copy OBJECT
2436 and SIZE into new pseudos. We can then place those new pseudos
2437 into an RTL_EXPR and use them later, even after a call to
2438 emit_queue.
2440 Note this is not strictly needed for library calls since they
2441 do not call emit_queue before loading their arguments. However,
2442 we may need to have library calls call emit_queue in the future
2443 since failing to do so could cause problems for targets which
2444 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2445 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2447 #ifdef TARGET_MEM_FUNCTIONS
2448 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2449 #else
2450 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2451 TREE_UNSIGNED (integer_type_node));
2452 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2453 #endif
2456 #ifdef TARGET_MEM_FUNCTIONS
2457 /* It is incorrect to use the libcall calling conventions to call
2458 memset in this context.
2460 This could be a user call to memset and the user may wish to
2461 examine the return value from memset.
2463 For targets where libcalls and normal calls have different
2464 conventions for returning pointers, we could end up generating
2465 incorrect code.
2467 So instead of using a libcall sequence we build up a suitable
2468 CALL_EXPR and expand the call in the normal fashion. */
2469 if (fn == NULL_TREE)
2471 tree fntype;
2473 /* This was copied from except.c, I don't know if all this is
2474 necessary in this context or not. */
2475 fn = get_identifier ("memset");
2476 push_obstacks_nochange ();
2477 end_temporary_allocation ();
2478 fntype = build_pointer_type (void_type_node);
2479 fntype = build_function_type (fntype, NULL_TREE);
2480 fn = build_decl (FUNCTION_DECL, fn, fntype);
2481 ggc_add_tree_root (&fn, 1);
2482 DECL_EXTERNAL (fn) = 1;
2483 TREE_PUBLIC (fn) = 1;
2484 DECL_ARTIFICIAL (fn) = 1;
2485 make_decl_rtl (fn, NULL_PTR, 1);
2486 assemble_external (fn);
2487 pop_obstacks ();
2490 /* We need to make an argument list for the function call.
2492 memset has three arguments, the first is a void * addresses, the
2493 second a integer with the initialization value, the last is a
2494 size_t byte count for the copy. */
2495 arg_list
2496 = build_tree_list (NULL_TREE,
2497 make_tree (build_pointer_type (void_type_node),
2498 object));
2499 TREE_CHAIN (arg_list)
2500 = build_tree_list (NULL_TREE,
2501 make_tree (integer_type_node, const0_rtx));
2502 TREE_CHAIN (TREE_CHAIN (arg_list))
2503 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2504 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2506 /* Now we have to build up the CALL_EXPR itself. */
2507 call_expr = build1 (ADDR_EXPR,
2508 build_pointer_type (TREE_TYPE (fn)), fn);
2509 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2510 call_expr, arg_list, NULL_TREE);
2511 TREE_SIDE_EFFECTS (call_expr) = 1;
2513 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2514 #else
2515 emit_library_call (bzero_libfunc, 0,
2516 VOIDmode, 2, object, Pmode, size,
2517 TYPE_MODE (integer_type_node));
2518 #endif
2521 else
2522 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2524 return retval;
2527 /* Generate code to copy Y into X.
2528 Both Y and X must have the same mode, except that
2529 Y can be a constant with VOIDmode.
2530 This mode cannot be BLKmode; use emit_block_move for that.
2532 Return the last instruction emitted. */
2535 emit_move_insn (x, y)
2536 rtx x, y;
2538 enum machine_mode mode = GET_MODE (x);
2540 x = protect_from_queue (x, 1);
2541 y = protect_from_queue (y, 0);
2543 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2544 abort ();
2546 /* Never force constant_p_rtx to memory. */
2547 if (GET_CODE (y) == CONSTANT_P_RTX)
2549 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2550 y = force_const_mem (mode, y);
2552 /* If X or Y are memory references, verify that their addresses are valid
2553 for the machine. */
2554 if (GET_CODE (x) == MEM
2555 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2556 && ! push_operand (x, GET_MODE (x)))
2557 || (flag_force_addr
2558 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2559 x = change_address (x, VOIDmode, XEXP (x, 0));
2561 if (GET_CODE (y) == MEM
2562 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2563 || (flag_force_addr
2564 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2565 y = change_address (y, VOIDmode, XEXP (y, 0));
2567 if (mode == BLKmode)
2568 abort ();
2570 return emit_move_insn_1 (x, y);
2573 /* Low level part of emit_move_insn.
2574 Called just like emit_move_insn, but assumes X and Y
2575 are basically valid. */
2578 emit_move_insn_1 (x, y)
2579 rtx x, y;
2581 enum machine_mode mode = GET_MODE (x);
2582 enum machine_mode submode;
2583 enum mode_class class = GET_MODE_CLASS (mode);
2584 int i;
2586 if (mode >= MAX_MACHINE_MODE)
2587 abort ();
2589 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2590 return
2591 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2593 /* Expand complex moves by moving real part and imag part, if possible. */
2594 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2595 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2596 * BITS_PER_UNIT),
2597 (class == MODE_COMPLEX_INT
2598 ? MODE_INT : MODE_FLOAT),
2600 && (mov_optab->handlers[(int) submode].insn_code
2601 != CODE_FOR_nothing))
2603 /* Don't split destination if it is a stack push. */
2604 int stack = push_operand (x, GET_MODE (x));
2606 /* If this is a stack, push the highpart first, so it
2607 will be in the argument order.
2609 In that case, change_address is used only to convert
2610 the mode, not to change the address. */
2611 if (stack)
2613 /* Note that the real part always precedes the imag part in memory
2614 regardless of machine's endianness. */
2615 #ifdef STACK_GROWS_DOWNWARD
2616 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2617 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2618 gen_imagpart (submode, y)));
2619 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2620 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2621 gen_realpart (submode, y)));
2622 #else
2623 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2624 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2625 gen_realpart (submode, y)));
2626 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2627 (gen_rtx_MEM (submode, (XEXP (x, 0))),
2628 gen_imagpart (submode, y)));
2629 #endif
2631 else
2633 /* If this is a complex value with each part being smaller than a
2634 word, the usual calling sequence will likely pack the pieces into
2635 a single register. Unfortunately, SUBREG of hard registers only
2636 deals in terms of words, so we have a problem converting input
2637 arguments to the CONCAT of two registers that is used elsewhere
2638 for complex values. If this is before reload, we can copy it into
2639 memory and reload. FIXME, we should see about using extract and
2640 insert on integer registers, but complex short and complex char
2641 variables should be rarely used. */
2642 if (GET_MODE_BITSIZE (mode) < 2*BITS_PER_WORD
2643 && (reload_in_progress | reload_completed) == 0)
2645 int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2646 int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2648 if (packed_dest_p || packed_src_p)
2650 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2651 ? MODE_FLOAT : MODE_INT);
2653 enum machine_mode reg_mode =
2654 mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2656 if (reg_mode != BLKmode)
2658 rtx mem = assign_stack_temp (reg_mode,
2659 GET_MODE_SIZE (mode), 0);
2661 rtx cmem = change_address (mem, mode, NULL_RTX);
2663 current_function->cannot_inline
2664 = "function uses short complex types";
2666 if (packed_dest_p)
2668 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2669 emit_move_insn_1 (cmem, y);
2670 return emit_move_insn_1 (sreg, mem);
2672 else
2674 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2675 emit_move_insn_1 (mem, sreg);
2676 return emit_move_insn_1 (x, cmem);
2682 /* Show the output dies here. This is necessary for pseudos;
2683 hard regs shouldn't appear here except as return values.
2684 We never want to emit such a clobber after reload. */
2685 if (x != y
2686 && ! (reload_in_progress || reload_completed))
2688 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2691 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2692 (gen_realpart (submode, x), gen_realpart (submode, y)));
2693 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2694 (gen_imagpart (submode, x), gen_imagpart (submode, y)));
2697 return get_last_insn ();
2700 /* This will handle any multi-word mode that lacks a move_insn pattern.
2701 However, you will get better code if you define such patterns,
2702 even if they must turn into multiple assembler instructions. */
2703 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2705 rtx last_insn = 0;
2707 #ifdef PUSH_ROUNDING
2709 /* If X is a push on the stack, do the push now and replace
2710 X with a reference to the stack pointer. */
2711 if (push_operand (x, GET_MODE (x)))
2713 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2714 x = change_address (x, VOIDmode, stack_pointer_rtx);
2716 #endif
2718 /* Show the output dies here. This is necessary for pseudos;
2719 hard regs shouldn't appear here except as return values.
2720 We never want to emit such a clobber after reload. */
2721 if (x != y
2722 && ! (reload_in_progress || reload_completed))
2724 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2727 for (i = 0;
2728 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2729 i++)
2731 rtx xpart = operand_subword (x, i, 1, mode);
2732 rtx ypart = operand_subword (y, i, 1, mode);
2734 /* If we can't get a part of Y, put Y into memory if it is a
2735 constant. Otherwise, force it into a register. If we still
2736 can't get a part of Y, abort. */
2737 if (ypart == 0 && CONSTANT_P (y))
2739 y = force_const_mem (mode, y);
2740 ypart = operand_subword (y, i, 1, mode);
2742 else if (ypart == 0)
2743 ypart = operand_subword_force (y, i, mode);
2745 if (xpart == 0 || ypart == 0)
2746 abort ();
2748 last_insn = emit_move_insn (xpart, ypart);
2751 return last_insn;
2753 else
2754 abort ();
2757 /* Pushing data onto the stack. */
2759 /* Push a block of length SIZE (perhaps variable)
2760 and return an rtx to address the beginning of the block.
2761 Note that it is not possible for the value returned to be a QUEUED.
2762 The value may be virtual_outgoing_args_rtx.
2764 EXTRA is the number of bytes of padding to push in addition to SIZE.
2765 BELOW nonzero means this padding comes at low addresses;
2766 otherwise, the padding comes at high addresses. */
2769 push_block (size, extra, below)
2770 rtx size;
2771 int extra, below;
2773 register rtx temp;
2775 size = convert_modes (Pmode, ptr_mode, size, 1);
2776 if (CONSTANT_P (size))
2777 anti_adjust_stack (plus_constant (size, extra));
2778 else if (GET_CODE (size) == REG && extra == 0)
2779 anti_adjust_stack (size);
2780 else
2782 rtx temp = copy_to_mode_reg (Pmode, size);
2783 if (extra != 0)
2784 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2785 temp, 0, OPTAB_LIB_WIDEN);
2786 anti_adjust_stack (temp);
2789 #if defined (STACK_GROWS_DOWNWARD) \
2790 || (defined (ARGS_GROW_DOWNWARD) \
2791 && !defined (ACCUMULATE_OUTGOING_ARGS))
2793 /* Return the lowest stack address when STACK or ARGS grow downward and
2794 we are not aaccumulating outgoing arguments (the c4x port uses such
2795 conventions). */
2796 temp = virtual_outgoing_args_rtx;
2797 if (extra != 0 && below)
2798 temp = plus_constant (temp, extra);
2799 #else
2800 if (GET_CODE (size) == CONST_INT)
2801 temp = plus_constant (virtual_outgoing_args_rtx,
2802 - INTVAL (size) - (below ? 0 : extra));
2803 else if (extra != 0 && !below)
2804 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2805 negate_rtx (Pmode, plus_constant (size, extra)));
2806 else
2807 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2808 negate_rtx (Pmode, size));
2809 #endif
2811 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2815 gen_push_operand ()
2817 return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2820 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2821 block of SIZE bytes. */
2823 static rtx
2824 get_push_address (size)
2825 int size;
2827 register rtx temp;
2829 if (STACK_PUSH_CODE == POST_DEC)
2830 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2831 else if (STACK_PUSH_CODE == POST_INC)
2832 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2833 else
2834 temp = stack_pointer_rtx;
2836 return copy_to_reg (temp);
2839 /* Generate code to push X onto the stack, assuming it has mode MODE and
2840 type TYPE.
2841 MODE is redundant except when X is a CONST_INT (since they don't
2842 carry mode info).
2843 SIZE is an rtx for the size of data to be copied (in bytes),
2844 needed only if X is BLKmode.
2846 ALIGN (in bytes) is maximum alignment we can assume.
2848 If PARTIAL and REG are both nonzero, then copy that many of the first
2849 words of X into registers starting with REG, and push the rest of X.
2850 The amount of space pushed is decreased by PARTIAL words,
2851 rounded *down* to a multiple of PARM_BOUNDARY.
2852 REG must be a hard register in this case.
2853 If REG is zero but PARTIAL is not, take any all others actions for an
2854 argument partially in registers, but do not actually load any
2855 registers.
2857 EXTRA is the amount in bytes of extra space to leave next to this arg.
2858 This is ignored if an argument block has already been allocated.
2860 On a machine that lacks real push insns, ARGS_ADDR is the address of
2861 the bottom of the argument block for this call. We use indexing off there
2862 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2863 argument block has not been preallocated.
2865 ARGS_SO_FAR is the size of args previously pushed for this call.
2867 REG_PARM_STACK_SPACE is nonzero if functions require stack space
2868 for arguments passed in registers. If nonzero, it will be the number
2869 of bytes required. */
2871 void
2872 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2873 args_addr, args_so_far, reg_parm_stack_space)
2874 register rtx x;
2875 enum machine_mode mode;
2876 tree type;
2877 rtx size;
2878 int align;
2879 int partial;
2880 rtx reg;
2881 int extra;
2882 rtx args_addr;
2883 rtx args_so_far;
2884 int reg_parm_stack_space;
2886 rtx xinner;
2887 enum direction stack_direction
2888 #ifdef STACK_GROWS_DOWNWARD
2889 = downward;
2890 #else
2891 = upward;
2892 #endif
2894 /* Decide where to pad the argument: `downward' for below,
2895 `upward' for above, or `none' for don't pad it.
2896 Default is below for small data on big-endian machines; else above. */
2897 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2899 /* Invert direction if stack is post-update. */
2900 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2901 if (where_pad != none)
2902 where_pad = (where_pad == downward ? upward : downward);
2904 xinner = x = protect_from_queue (x, 0);
2906 if (mode == BLKmode)
2908 /* Copy a block into the stack, entirely or partially. */
2910 register rtx temp;
2911 int used = partial * UNITS_PER_WORD;
2912 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2913 int skip;
2915 if (size == 0)
2916 abort ();
2918 used -= offset;
2920 /* USED is now the # of bytes we need not copy to the stack
2921 because registers will take care of them. */
2923 if (partial != 0)
2924 xinner = change_address (xinner, BLKmode,
2925 plus_constant (XEXP (xinner, 0), used));
2927 /* If the partial register-part of the arg counts in its stack size,
2928 skip the part of stack space corresponding to the registers.
2929 Otherwise, start copying to the beginning of the stack space,
2930 by setting SKIP to 0. */
2931 skip = (reg_parm_stack_space == 0) ? 0 : used;
2933 #ifdef PUSH_ROUNDING
2934 /* Do it with several push insns if that doesn't take lots of insns
2935 and if there is no difficulty with push insns that skip bytes
2936 on the stack for alignment purposes. */
2937 if (args_addr == 0
2938 && GET_CODE (size) == CONST_INT
2939 && skip == 0
2940 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
2941 /* Here we avoid the case of a structure whose weak alignment
2942 forces many pushes of a small amount of data,
2943 and such small pushes do rounding that causes trouble. */
2944 && ((! SLOW_UNALIGNED_ACCESS)
2945 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
2946 || PUSH_ROUNDING (align) == align)
2947 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2949 /* Push padding now if padding above and stack grows down,
2950 or if padding below and stack grows up.
2951 But if space already allocated, this has already been done. */
2952 if (extra && args_addr == 0
2953 && where_pad != none && where_pad != stack_direction)
2954 anti_adjust_stack (GEN_INT (extra));
2956 move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
2957 INTVAL (size) - used, align);
2959 if (current_function_check_memory_usage && ! in_check_memory_usage)
2961 rtx temp;
2963 in_check_memory_usage = 1;
2964 temp = get_push_address (INTVAL(size) - used);
2965 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
2966 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2967 temp, Pmode,
2968 XEXP (xinner, 0), Pmode,
2969 GEN_INT (INTVAL(size) - used),
2970 TYPE_MODE (sizetype));
2971 else
2972 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2973 temp, Pmode,
2974 GEN_INT (INTVAL(size) - used),
2975 TYPE_MODE (sizetype),
2976 GEN_INT (MEMORY_USE_RW),
2977 TYPE_MODE (integer_type_node));
2978 in_check_memory_usage = 0;
2981 else
2982 #endif /* PUSH_ROUNDING */
2984 /* Otherwise make space on the stack and copy the data
2985 to the address of that space. */
2987 /* Deduct words put into registers from the size we must copy. */
2988 if (partial != 0)
2990 if (GET_CODE (size) == CONST_INT)
2991 size = GEN_INT (INTVAL (size) - used);
2992 else
2993 size = expand_binop (GET_MODE (size), sub_optab, size,
2994 GEN_INT (used), NULL_RTX, 0,
2995 OPTAB_LIB_WIDEN);
2998 /* Get the address of the stack space.
2999 In this case, we do not deal with EXTRA separately.
3000 A single stack adjust will do. */
3001 if (! args_addr)
3003 temp = push_block (size, extra, where_pad == downward);
3004 extra = 0;
3006 else if (GET_CODE (args_so_far) == CONST_INT)
3007 temp = memory_address (BLKmode,
3008 plus_constant (args_addr,
3009 skip + INTVAL (args_so_far)));
3010 else
3011 temp = memory_address (BLKmode,
3012 plus_constant (gen_rtx_PLUS (Pmode,
3013 args_addr,
3014 args_so_far),
3015 skip));
3016 if (current_function_check_memory_usage && ! in_check_memory_usage)
3018 rtx target;
3020 in_check_memory_usage = 1;
3021 target = copy_to_reg (temp);
3022 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3023 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3024 target, Pmode,
3025 XEXP (xinner, 0), Pmode,
3026 size, TYPE_MODE (sizetype));
3027 else
3028 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3029 target, Pmode,
3030 size, TYPE_MODE (sizetype),
3031 GEN_INT (MEMORY_USE_RW),
3032 TYPE_MODE (integer_type_node));
3033 in_check_memory_usage = 0;
3036 /* TEMP is the address of the block. Copy the data there. */
3037 if (GET_CODE (size) == CONST_INT
3038 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align)))
3040 move_by_pieces (gen_rtx_MEM (BLKmode, temp), xinner,
3041 INTVAL (size), align);
3042 goto ret;
3044 else
3046 rtx opalign = GEN_INT (align);
3047 enum machine_mode mode;
3048 rtx target = gen_rtx_MEM (BLKmode, temp);
3050 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3051 mode != VOIDmode;
3052 mode = GET_MODE_WIDER_MODE (mode))
3054 enum insn_code code = movstr_optab[(int) mode];
3055 insn_operand_predicate_fn pred;
3057 if (code != CODE_FOR_nothing
3058 && ((GET_CODE (size) == CONST_INT
3059 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3060 <= (GET_MODE_MASK (mode) >> 1)))
3061 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3062 && (!(pred = insn_data[(int) code].operand[0].predicate)
3063 || ((*pred) (target, BLKmode)))
3064 && (!(pred = insn_data[(int) code].operand[1].predicate)
3065 || ((*pred) (xinner, BLKmode)))
3066 && (!(pred = insn_data[(int) code].operand[3].predicate)
3067 || ((*pred) (opalign, VOIDmode))))
3069 rtx op2 = convert_to_mode (mode, size, 1);
3070 rtx last = get_last_insn ();
3071 rtx pat;
3073 pred = insn_data[(int) code].operand[2].predicate;
3074 if (pred != 0 && ! (*pred) (op2, mode))
3075 op2 = copy_to_mode_reg (mode, op2);
3077 pat = GEN_FCN ((int) code) (target, xinner,
3078 op2, opalign);
3079 if (pat)
3081 emit_insn (pat);
3082 goto ret;
3084 else
3085 delete_insns_since (last);
3090 #ifndef ACCUMULATE_OUTGOING_ARGS
3091 /* If the source is referenced relative to the stack pointer,
3092 copy it to another register to stabilize it. We do not need
3093 to do this if we know that we won't be changing sp. */
3095 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3096 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3097 temp = copy_to_reg (temp);
3098 #endif
3100 /* Make inhibit_defer_pop nonzero around the library call
3101 to force it to pop the bcopy-arguments right away. */
3102 NO_DEFER_POP;
3103 #ifdef TARGET_MEM_FUNCTIONS
3104 emit_library_call (memcpy_libfunc, 0,
3105 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3106 convert_to_mode (TYPE_MODE (sizetype),
3107 size, TREE_UNSIGNED (sizetype)),
3108 TYPE_MODE (sizetype));
3109 #else
3110 emit_library_call (bcopy_libfunc, 0,
3111 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3112 convert_to_mode (TYPE_MODE (integer_type_node),
3113 size,
3114 TREE_UNSIGNED (integer_type_node)),
3115 TYPE_MODE (integer_type_node));
3116 #endif
3117 OK_DEFER_POP;
3120 else if (partial > 0)
3122 /* Scalar partly in registers. */
3124 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3125 int i;
3126 int not_stack;
3127 /* # words of start of argument
3128 that we must make space for but need not store. */
3129 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3130 int args_offset = INTVAL (args_so_far);
3131 int skip;
3133 /* Push padding now if padding above and stack grows down,
3134 or if padding below and stack grows up.
3135 But if space already allocated, this has already been done. */
3136 if (extra && args_addr == 0
3137 && where_pad != none && where_pad != stack_direction)
3138 anti_adjust_stack (GEN_INT (extra));
3140 /* If we make space by pushing it, we might as well push
3141 the real data. Otherwise, we can leave OFFSET nonzero
3142 and leave the space uninitialized. */
3143 if (args_addr == 0)
3144 offset = 0;
3146 /* Now NOT_STACK gets the number of words that we don't need to
3147 allocate on the stack. */
3148 not_stack = partial - offset;
3150 /* If the partial register-part of the arg counts in its stack size,
3151 skip the part of stack space corresponding to the registers.
3152 Otherwise, start copying to the beginning of the stack space,
3153 by setting SKIP to 0. */
3154 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3156 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3157 x = validize_mem (force_const_mem (mode, x));
3159 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3160 SUBREGs of such registers are not allowed. */
3161 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3162 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3163 x = copy_to_reg (x);
3165 /* Loop over all the words allocated on the stack for this arg. */
3166 /* We can do it by words, because any scalar bigger than a word
3167 has a size a multiple of a word. */
3168 #ifndef PUSH_ARGS_REVERSED
3169 for (i = not_stack; i < size; i++)
3170 #else
3171 for (i = size - 1; i >= not_stack; i--)
3172 #endif
3173 if (i >= not_stack + offset)
3174 emit_push_insn (operand_subword_force (x, i, mode),
3175 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3176 0, args_addr,
3177 GEN_INT (args_offset + ((i - not_stack + skip)
3178 * UNITS_PER_WORD)),
3179 reg_parm_stack_space);
3181 else
3183 rtx addr;
3184 rtx target = NULL_RTX;
3186 /* Push padding now if padding above and stack grows down,
3187 or if padding below and stack grows up.
3188 But if space already allocated, this has already been done. */
3189 if (extra && args_addr == 0
3190 && where_pad != none && where_pad != stack_direction)
3191 anti_adjust_stack (GEN_INT (extra));
3193 #ifdef PUSH_ROUNDING
3194 if (args_addr == 0)
3195 addr = gen_push_operand ();
3196 else
3197 #endif
3199 if (GET_CODE (args_so_far) == CONST_INT)
3200 addr
3201 = memory_address (mode,
3202 plus_constant (args_addr,
3203 INTVAL (args_so_far)));
3204 else
3205 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3206 args_so_far));
3207 target = addr;
3210 emit_move_insn (gen_rtx_MEM (mode, addr), x);
3212 if (current_function_check_memory_usage && ! in_check_memory_usage)
3214 in_check_memory_usage = 1;
3215 if (target == 0)
3216 target = get_push_address (GET_MODE_SIZE (mode));
3218 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3219 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3220 target, Pmode,
3221 XEXP (x, 0), Pmode,
3222 GEN_INT (GET_MODE_SIZE (mode)),
3223 TYPE_MODE (sizetype));
3224 else
3225 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3226 target, Pmode,
3227 GEN_INT (GET_MODE_SIZE (mode)),
3228 TYPE_MODE (sizetype),
3229 GEN_INT (MEMORY_USE_RW),
3230 TYPE_MODE (integer_type_node));
3231 in_check_memory_usage = 0;
3235 ret:
3236 /* If part should go in registers, copy that part
3237 into the appropriate registers. Do this now, at the end,
3238 since mem-to-mem copies above may do function calls. */
3239 if (partial > 0 && reg != 0)
3241 /* Handle calls that pass values in multiple non-contiguous locations.
3242 The Irix 6 ABI has examples of this. */
3243 if (GET_CODE (reg) == PARALLEL)
3244 emit_group_load (reg, x, -1, align); /* ??? size? */
3245 else
3246 move_block_to_reg (REGNO (reg), x, partial, mode);
3249 if (extra && args_addr == 0 && where_pad == stack_direction)
3250 anti_adjust_stack (GEN_INT (extra));
3253 /* Expand an assignment that stores the value of FROM into TO.
3254 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3255 (This may contain a QUEUED rtx;
3256 if the value is constant, this rtx is a constant.)
3257 Otherwise, the returned value is NULL_RTX.
3259 SUGGEST_REG is no longer actually used.
3260 It used to mean, copy the value through a register
3261 and return that register, if that is possible.
3262 We now use WANT_VALUE to decide whether to do this. */
3265 expand_assignment (to, from, want_value, suggest_reg)
3266 tree to, from;
3267 int want_value;
3268 int suggest_reg ATTRIBUTE_UNUSED;
3270 register rtx to_rtx = 0;
3271 rtx result;
3273 /* Don't crash if the lhs of the assignment was erroneous. */
3275 if (TREE_CODE (to) == ERROR_MARK)
3277 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3278 return want_value ? result : NULL_RTX;
3281 /* Assignment of a structure component needs special treatment
3282 if the structure component's rtx is not simply a MEM.
3283 Assignment of an array element at a constant index, and assignment of
3284 an array element in an unaligned packed structure field, has the same
3285 problem. */
3287 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3288 || TREE_CODE (to) == ARRAY_REF)
3290 enum machine_mode mode1;
3291 int bitsize;
3292 int bitpos;
3293 tree offset;
3294 int unsignedp;
3295 int volatilep = 0;
3296 tree tem;
3297 int alignment;
3299 push_temp_slots ();
3300 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3301 &unsignedp, &volatilep, &alignment);
3303 /* If we are going to use store_bit_field and extract_bit_field,
3304 make sure to_rtx will be safe for multiple use. */
3306 if (mode1 == VOIDmode && want_value)
3307 tem = stabilize_reference (tem);
3309 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3310 if (offset != 0)
3312 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3314 if (GET_CODE (to_rtx) != MEM)
3315 abort ();
3317 if (GET_MODE (offset_rtx) != ptr_mode)
3319 #ifdef POINTERS_EXTEND_UNSIGNED
3320 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
3321 #else
3322 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3323 #endif
3326 /* A constant address in TO_RTX can have VOIDmode, we must not try
3327 to call force_reg for that case. Avoid that case. */
3328 if (GET_CODE (to_rtx) == MEM
3329 && GET_MODE (to_rtx) == BLKmode
3330 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3331 && bitsize
3332 && (bitpos % bitsize) == 0
3333 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3334 && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
3336 rtx temp = change_address (to_rtx, mode1,
3337 plus_constant (XEXP (to_rtx, 0),
3338 (bitpos /
3339 BITS_PER_UNIT)));
3340 if (GET_CODE (XEXP (temp, 0)) == REG)
3341 to_rtx = temp;
3342 else
3343 to_rtx = change_address (to_rtx, mode1,
3344 force_reg (GET_MODE (XEXP (temp, 0)),
3345 XEXP (temp, 0)));
3346 bitpos = 0;
3349 to_rtx = change_address (to_rtx, VOIDmode,
3350 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3351 force_reg (ptr_mode,
3352 offset_rtx)));
3355 if (volatilep)
3357 if (GET_CODE (to_rtx) == MEM)
3359 /* When the offset is zero, to_rtx is the address of the
3360 structure we are storing into, and hence may be shared.
3361 We must make a new MEM before setting the volatile bit. */
3362 if (offset == 0)
3363 to_rtx = copy_rtx (to_rtx);
3365 MEM_VOLATILE_P (to_rtx) = 1;
3367 #if 0 /* This was turned off because, when a field is volatile
3368 in an object which is not volatile, the object may be in a register,
3369 and then we would abort over here. */
3370 else
3371 abort ();
3372 #endif
3375 if (TREE_CODE (to) == COMPONENT_REF
3376 && TREE_READONLY (TREE_OPERAND (to, 1)))
3378 if (offset == 0)
3379 to_rtx = copy_rtx (to_rtx);
3381 RTX_UNCHANGING_P (to_rtx) = 1;
3384 /* Check the access. */
3385 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3387 rtx to_addr;
3388 int size;
3389 int best_mode_size;
3390 enum machine_mode best_mode;
3392 best_mode = get_best_mode (bitsize, bitpos,
3393 TYPE_ALIGN (TREE_TYPE (tem)),
3394 mode1, volatilep);
3395 if (best_mode == VOIDmode)
3396 best_mode = QImode;
3398 best_mode_size = GET_MODE_BITSIZE (best_mode);
3399 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3400 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3401 size *= GET_MODE_SIZE (best_mode);
3403 /* Check the access right of the pointer. */
3404 if (size)
3405 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3406 to_addr, Pmode,
3407 GEN_INT (size), TYPE_MODE (sizetype),
3408 GEN_INT (MEMORY_USE_WO),
3409 TYPE_MODE (integer_type_node));
3412 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3413 (want_value
3414 /* Spurious cast makes HPUX compiler happy. */
3415 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
3416 : VOIDmode),
3417 unsignedp,
3418 /* Required alignment of containing datum. */
3419 alignment,
3420 int_size_in_bytes (TREE_TYPE (tem)),
3421 get_alias_set (to));
3422 preserve_temp_slots (result);
3423 free_temp_slots ();
3424 pop_temp_slots ();
3426 /* If the value is meaningful, convert RESULT to the proper mode.
3427 Otherwise, return nothing. */
3428 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3429 TYPE_MODE (TREE_TYPE (from)),
3430 result,
3431 TREE_UNSIGNED (TREE_TYPE (to)))
3432 : NULL_RTX);
3435 /* If the rhs is a function call and its value is not an aggregate,
3436 call the function before we start to compute the lhs.
3437 This is needed for correct code for cases such as
3438 val = setjmp (buf) on machines where reference to val
3439 requires loading up part of an address in a separate insn.
3441 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
3442 a promoted variable where the zero- or sign- extension needs to be done.
3443 Handling this in the normal way is safe because no computation is done
3444 before the call. */
3445 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3446 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3447 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
3449 rtx value;
3451 push_temp_slots ();
3452 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3453 if (to_rtx == 0)
3454 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3456 /* Handle calls that return values in multiple non-contiguous locations.
3457 The Irix 6 ABI has examples of this. */
3458 if (GET_CODE (to_rtx) == PARALLEL)
3459 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3460 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3461 else if (GET_MODE (to_rtx) == BLKmode)
3462 emit_block_move (to_rtx, value, expr_size (from),
3463 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3464 else
3466 #ifdef POINTERS_EXTEND_UNSIGNED
3467 if (TREE_CODE (TREE_TYPE (to)) == REFERENCE_TYPE
3468 || TREE_CODE (TREE_TYPE (to)) == POINTER_TYPE)
3469 value = convert_memory_address (GET_MODE (to_rtx), value);
3470 #endif
3471 emit_move_insn (to_rtx, value);
3473 preserve_temp_slots (to_rtx);
3474 free_temp_slots ();
3475 pop_temp_slots ();
3476 return want_value ? to_rtx : NULL_RTX;
3479 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3480 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3482 if (to_rtx == 0)
3484 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3485 if (GET_CODE (to_rtx) == MEM)
3486 MEM_ALIAS_SET (to_rtx) = get_alias_set (to);
3489 /* Don't move directly into a return register. */
3490 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
3492 rtx temp;
3494 push_temp_slots ();
3495 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3496 emit_move_insn (to_rtx, temp);
3497 preserve_temp_slots (to_rtx);
3498 free_temp_slots ();
3499 pop_temp_slots ();
3500 return want_value ? to_rtx : NULL_RTX;
3503 /* In case we are returning the contents of an object which overlaps
3504 the place the value is being stored, use a safe function when copying
3505 a value through a pointer into a structure value return block. */
3506 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3507 && current_function_returns_struct
3508 && !current_function_returns_pcc_struct)
3510 rtx from_rtx, size;
3512 push_temp_slots ();
3513 size = expr_size (from);
3514 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3515 EXPAND_MEMORY_USE_DONT);
3517 /* Copy the rights of the bitmap. */
3518 if (current_function_check_memory_usage)
3519 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3520 XEXP (to_rtx, 0), Pmode,
3521 XEXP (from_rtx, 0), Pmode,
3522 convert_to_mode (TYPE_MODE (sizetype),
3523 size, TREE_UNSIGNED (sizetype)),
3524 TYPE_MODE (sizetype));
3526 #ifdef TARGET_MEM_FUNCTIONS
3527 emit_library_call (memcpy_libfunc, 0,
3528 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3529 XEXP (from_rtx, 0), Pmode,
3530 convert_to_mode (TYPE_MODE (sizetype),
3531 size, TREE_UNSIGNED (sizetype)),
3532 TYPE_MODE (sizetype));
3533 #else
3534 emit_library_call (bcopy_libfunc, 0,
3535 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3536 XEXP (to_rtx, 0), Pmode,
3537 convert_to_mode (TYPE_MODE (integer_type_node),
3538 size, TREE_UNSIGNED (integer_type_node)),
3539 TYPE_MODE (integer_type_node));
3540 #endif
3542 preserve_temp_slots (to_rtx);
3543 free_temp_slots ();
3544 pop_temp_slots ();
3545 return want_value ? to_rtx : NULL_RTX;
3548 /* Compute FROM and store the value in the rtx we got. */
3550 push_temp_slots ();
3551 result = store_expr (from, to_rtx, want_value);
3552 preserve_temp_slots (result);
3553 free_temp_slots ();
3554 pop_temp_slots ();
3555 return want_value ? result : NULL_RTX;
3558 /* Generate code for computing expression EXP,
3559 and storing the value into TARGET.
3560 TARGET may contain a QUEUED rtx.
3562 If WANT_VALUE is nonzero, return a copy of the value
3563 not in TARGET, so that we can be sure to use the proper
3564 value in a containing expression even if TARGET has something
3565 else stored in it. If possible, we copy the value through a pseudo
3566 and return that pseudo. Or, if the value is constant, we try to
3567 return the constant. In some cases, we return a pseudo
3568 copied *from* TARGET.
3570 If the mode is BLKmode then we may return TARGET itself.
3571 It turns out that in BLKmode it doesn't cause a problem.
3572 because C has no operators that could combine two different
3573 assignments into the same BLKmode object with different values
3574 with no sequence point. Will other languages need this to
3575 be more thorough?
3577 If WANT_VALUE is 0, we return NULL, to make sure
3578 to catch quickly any cases where the caller uses the value
3579 and fails to set WANT_VALUE. */
3582 store_expr (exp, target, want_value)
3583 register tree exp;
3584 register rtx target;
3585 int want_value;
3587 register rtx temp;
3588 int dont_return_target = 0;
3590 if (TREE_CODE (exp) == COMPOUND_EXPR)
3592 /* Perform first part of compound expression, then assign from second
3593 part. */
3594 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3595 emit_queue ();
3596 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3598 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3600 /* For conditional expression, get safe form of the target. Then
3601 test the condition, doing the appropriate assignment on either
3602 side. This avoids the creation of unnecessary temporaries.
3603 For non-BLKmode, it is more efficient not to do this. */
3605 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3607 emit_queue ();
3608 target = protect_from_queue (target, 1);
3610 do_pending_stack_adjust ();
3611 NO_DEFER_POP;
3612 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3613 start_cleanup_deferral ();
3614 store_expr (TREE_OPERAND (exp, 1), target, 0);
3615 end_cleanup_deferral ();
3616 emit_queue ();
3617 emit_jump_insn (gen_jump (lab2));
3618 emit_barrier ();
3619 emit_label (lab1);
3620 start_cleanup_deferral ();
3621 store_expr (TREE_OPERAND (exp, 2), target, 0);
3622 end_cleanup_deferral ();
3623 emit_queue ();
3624 emit_label (lab2);
3625 OK_DEFER_POP;
3627 return want_value ? target : NULL_RTX;
3629 else if (queued_subexp_p (target))
3630 /* If target contains a postincrement, let's not risk
3631 using it as the place to generate the rhs. */
3633 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3635 /* Expand EXP into a new pseudo. */
3636 temp = gen_reg_rtx (GET_MODE (target));
3637 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3639 else
3640 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3642 /* If target is volatile, ANSI requires accessing the value
3643 *from* the target, if it is accessed. So make that happen.
3644 In no case return the target itself. */
3645 if (! MEM_VOLATILE_P (target) && want_value)
3646 dont_return_target = 1;
3648 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3649 && GET_MODE (target) != BLKmode)
3650 /* If target is in memory and caller wants value in a register instead,
3651 arrange that. Pass TARGET as target for expand_expr so that,
3652 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3653 We know expand_expr will not use the target in that case.
3654 Don't do this if TARGET is volatile because we are supposed
3655 to write it and then read it. */
3657 temp = expand_expr (exp, target, GET_MODE (target), 0);
3658 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3659 temp = copy_to_reg (temp);
3660 dont_return_target = 1;
3662 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3663 /* If this is an scalar in a register that is stored in a wider mode
3664 than the declared mode, compute the result into its declared mode
3665 and then convert to the wider mode. Our value is the computed
3666 expression. */
3668 /* If we don't want a value, we can do the conversion inside EXP,
3669 which will often result in some optimizations. Do the conversion
3670 in two steps: first change the signedness, if needed, then
3671 the extend. But don't do this if the type of EXP is a subtype
3672 of something else since then the conversion might involve
3673 more than just converting modes. */
3674 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3675 && TREE_TYPE (TREE_TYPE (exp)) == 0)
3677 if (TREE_UNSIGNED (TREE_TYPE (exp))
3678 != SUBREG_PROMOTED_UNSIGNED_P (target))
3680 = convert
3681 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3682 TREE_TYPE (exp)),
3683 exp);
3685 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3686 SUBREG_PROMOTED_UNSIGNED_P (target)),
3687 exp);
3690 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3692 /* If TEMP is a volatile MEM and we want a result value, make
3693 the access now so it gets done only once. Likewise if
3694 it contains TARGET. */
3695 if (GET_CODE (temp) == MEM && want_value
3696 && (MEM_VOLATILE_P (temp)
3697 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
3698 temp = copy_to_reg (temp);
3700 /* If TEMP is a VOIDmode constant, use convert_modes to make
3701 sure that we properly convert it. */
3702 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3703 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3704 TYPE_MODE (TREE_TYPE (exp)), temp,
3705 SUBREG_PROMOTED_UNSIGNED_P (target));
3707 convert_move (SUBREG_REG (target), temp,
3708 SUBREG_PROMOTED_UNSIGNED_P (target));
3710 /* If we promoted a constant, change the mode back down to match
3711 target. Otherwise, the caller might get confused by a result whose
3712 mode is larger than expected. */
3714 if (want_value && GET_MODE (temp) != GET_MODE (target)
3715 && GET_MODE (temp) != VOIDmode)
3717 temp = gen_rtx_SUBREG (GET_MODE (target), temp, 0);
3718 SUBREG_PROMOTED_VAR_P (temp) = 1;
3719 SUBREG_PROMOTED_UNSIGNED_P (temp)
3720 = SUBREG_PROMOTED_UNSIGNED_P (target);
3723 return want_value ? temp : NULL_RTX;
3725 else
3727 temp = expand_expr (exp, target, GET_MODE (target), 0);
3728 /* Return TARGET if it's a specified hardware register.
3729 If TARGET is a volatile mem ref, either return TARGET
3730 or return a reg copied *from* TARGET; ANSI requires this.
3732 Otherwise, if TEMP is not TARGET, return TEMP
3733 if it is constant (for efficiency),
3734 or if we really want the correct value. */
3735 if (!(target && GET_CODE (target) == REG
3736 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3737 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3738 && ! rtx_equal_p (temp, target)
3739 && (CONSTANT_P (temp) || want_value))
3740 dont_return_target = 1;
3743 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3744 the same as that of TARGET, adjust the constant. This is needed, for
3745 example, in case it is a CONST_DOUBLE and we want only a word-sized
3746 value. */
3747 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3748 && TREE_CODE (exp) != ERROR_MARK
3749 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3750 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3751 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3753 if (current_function_check_memory_usage
3754 && GET_CODE (target) == MEM
3755 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
3757 if (GET_CODE (temp) == MEM)
3758 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3759 XEXP (target, 0), Pmode,
3760 XEXP (temp, 0), Pmode,
3761 expr_size (exp), TYPE_MODE (sizetype));
3762 else
3763 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3764 XEXP (target, 0), Pmode,
3765 expr_size (exp), TYPE_MODE (sizetype),
3766 GEN_INT (MEMORY_USE_WO),
3767 TYPE_MODE (integer_type_node));
3770 /* If value was not generated in the target, store it there.
3771 Convert the value to TARGET's type first if nec. */
3772 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
3773 one or both of them are volatile memory refs, we have to distinguish
3774 two cases:
3775 - expand_expr has used TARGET. In this case, we must not generate
3776 another copy. This can be detected by TARGET being equal according
3777 to == .
3778 - expand_expr has not used TARGET - that means that the source just
3779 happens to have the same RTX form. Since temp will have been created
3780 by expand_expr, it will compare unequal according to == .
3781 We must generate a copy in this case, to reach the correct number
3782 of volatile memory references. */
3784 if ((! rtx_equal_p (temp, target)
3785 || (temp != target && (side_effects_p (temp)
3786 || side_effects_p (target))))
3787 && TREE_CODE (exp) != ERROR_MARK)
3789 target = protect_from_queue (target, 1);
3790 if (GET_MODE (temp) != GET_MODE (target)
3791 && GET_MODE (temp) != VOIDmode)
3793 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3794 if (dont_return_target)
3796 /* In this case, we will return TEMP,
3797 so make sure it has the proper mode.
3798 But don't forget to store the value into TARGET. */
3799 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3800 emit_move_insn (target, temp);
3802 else
3803 convert_move (target, temp, unsignedp);
3806 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3808 /* Handle copying a string constant into an array.
3809 The string constant may be shorter than the array.
3810 So copy just the string's actual length, and clear the rest. */
3811 rtx size;
3812 rtx addr;
3814 /* Get the size of the data type of the string,
3815 which is actually the size of the target. */
3816 size = expr_size (exp);
3817 if (GET_CODE (size) == CONST_INT
3818 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3819 emit_block_move (target, temp, size,
3820 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3821 else
3823 /* Compute the size of the data to copy from the string. */
3824 tree copy_size
3825 = size_binop (MIN_EXPR,
3826 make_tree (sizetype, size),
3827 convert (sizetype,
3828 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
3829 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3830 VOIDmode, 0);
3831 rtx label = 0;
3833 /* Copy that much. */
3834 emit_block_move (target, temp, copy_size_rtx,
3835 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3837 /* Figure out how much is left in TARGET that we have to clear.
3838 Do all calculations in ptr_mode. */
3840 addr = XEXP (target, 0);
3841 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3843 if (GET_CODE (copy_size_rtx) == CONST_INT)
3845 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
3846 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
3848 else
3850 addr = force_reg (ptr_mode, addr);
3851 addr = expand_binop (ptr_mode, add_optab, addr,
3852 copy_size_rtx, NULL_RTX, 0,
3853 OPTAB_LIB_WIDEN);
3855 size = expand_binop (ptr_mode, sub_optab, size,
3856 copy_size_rtx, NULL_RTX, 0,
3857 OPTAB_LIB_WIDEN);
3859 label = gen_label_rtx ();
3860 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
3861 GET_MODE (size), 0, 0, label);
3864 if (size != const0_rtx)
3866 /* Be sure we can write on ADDR. */
3867 if (current_function_check_memory_usage)
3868 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3869 addr, Pmode,
3870 size, TYPE_MODE (sizetype),
3871 GEN_INT (MEMORY_USE_WO),
3872 TYPE_MODE (integer_type_node));
3873 #ifdef TARGET_MEM_FUNCTIONS
3874 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
3875 addr, ptr_mode,
3876 const0_rtx, TYPE_MODE (integer_type_node),
3877 convert_to_mode (TYPE_MODE (sizetype),
3878 size,
3879 TREE_UNSIGNED (sizetype)),
3880 TYPE_MODE (sizetype));
3881 #else
3882 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
3883 addr, ptr_mode,
3884 convert_to_mode (TYPE_MODE (integer_type_node),
3885 size,
3886 TREE_UNSIGNED (integer_type_node)),
3887 TYPE_MODE (integer_type_node));
3888 #endif
3891 if (label)
3892 emit_label (label);
3895 /* Handle calls that return values in multiple non-contiguous locations.
3896 The Irix 6 ABI has examples of this. */
3897 else if (GET_CODE (target) == PARALLEL)
3898 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
3899 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3900 else if (GET_MODE (temp) == BLKmode)
3901 emit_block_move (target, temp, expr_size (exp),
3902 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3903 else
3904 emit_move_insn (target, temp);
3907 /* If we don't want a value, return NULL_RTX. */
3908 if (! want_value)
3909 return NULL_RTX;
3911 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3912 ??? The latter test doesn't seem to make sense. */
3913 else if (dont_return_target && GET_CODE (temp) != MEM)
3914 return temp;
3916 /* Return TARGET itself if it is a hard register. */
3917 else if (want_value && GET_MODE (target) != BLKmode
3918 && ! (GET_CODE (target) == REG
3919 && REGNO (target) < FIRST_PSEUDO_REGISTER))
3920 return copy_to_reg (target);
3922 else
3923 return target;
3926 /* Return 1 if EXP just contains zeros. */
3928 static int
3929 is_zeros_p (exp)
3930 tree exp;
3932 tree elt;
3934 switch (TREE_CODE (exp))
3936 case CONVERT_EXPR:
3937 case NOP_EXPR:
3938 case NON_LVALUE_EXPR:
3939 return is_zeros_p (TREE_OPERAND (exp, 0));
3941 case INTEGER_CST:
3942 return TREE_INT_CST_LOW (exp) == 0 && TREE_INT_CST_HIGH (exp) == 0;
3944 case COMPLEX_CST:
3945 return
3946 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
3948 case REAL_CST:
3949 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
3951 case CONSTRUCTOR:
3952 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3953 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
3954 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3955 if (! is_zeros_p (TREE_VALUE (elt)))
3956 return 0;
3958 return 1;
3960 default:
3961 return 0;
3965 /* Return 1 if EXP contains mostly (3/4) zeros. */
3967 static int
3968 mostly_zeros_p (exp)
3969 tree exp;
3971 if (TREE_CODE (exp) == CONSTRUCTOR)
3973 int elts = 0, zeros = 0;
3974 tree elt = CONSTRUCTOR_ELTS (exp);
3975 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3977 /* If there are no ranges of true bits, it is all zero. */
3978 return elt == NULL_TREE;
3980 for (; elt; elt = TREE_CHAIN (elt))
3982 /* We do not handle the case where the index is a RANGE_EXPR,
3983 so the statistic will be somewhat inaccurate.
3984 We do make a more accurate count in store_constructor itself,
3985 so since this function is only used for nested array elements,
3986 this should be close enough. */
3987 if (mostly_zeros_p (TREE_VALUE (elt)))
3988 zeros++;
3989 elts++;
3992 return 4 * zeros >= 3 * elts;
3995 return is_zeros_p (exp);
3998 /* Helper function for store_constructor.
3999 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4000 TYPE is the type of the CONSTRUCTOR, not the element type.
4001 ALIGN and CLEARED are as for store_constructor.
4003 This provides a recursive shortcut back to store_constructor when it isn't
4004 necessary to go through store_field. This is so that we can pass through
4005 the cleared field to let store_constructor know that we may not have to
4006 clear a substructure if the outer structure has already been cleared. */
4008 static void
4009 store_constructor_field (target, bitsize, bitpos,
4010 mode, exp, type, align, cleared)
4011 rtx target;
4012 int bitsize, bitpos;
4013 enum machine_mode mode;
4014 tree exp, type;
4015 int align;
4016 int cleared;
4018 if (TREE_CODE (exp) == CONSTRUCTOR
4019 && bitpos % BITS_PER_UNIT == 0
4020 /* If we have a non-zero bitpos for a register target, then we just
4021 let store_field do the bitfield handling. This is unlikely to
4022 generate unnecessary clear instructions anyways. */
4023 && (bitpos == 0 || GET_CODE (target) == MEM))
4025 if (bitpos != 0)
4026 target = change_address (target, VOIDmode,
4027 plus_constant (XEXP (target, 0),
4028 bitpos / BITS_PER_UNIT));
4029 store_constructor (exp, target, align, cleared);
4031 else
4032 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0,
4033 (align + BITS_PER_UNIT - 1) / BITS_PER_UNIT,
4034 int_size_in_bytes (type), cleared);
4037 /* Store the value of constructor EXP into the rtx TARGET.
4038 TARGET is either a REG or a MEM.
4039 ALIGN is the maximum known alignment for TARGET, in bits.
4040 CLEARED is true if TARGET is known to have been zero'd. */
4042 static void
4043 store_constructor (exp, target, align, cleared)
4044 tree exp;
4045 rtx target;
4046 int align;
4047 int cleared;
4049 tree type = TREE_TYPE (exp);
4050 #ifdef WORD_REGISTER_OPERATIONS
4051 rtx exp_size = expr_size (exp);
4052 #endif
4054 /* We know our target cannot conflict, since safe_from_p has been called. */
4055 #if 0
4056 /* Don't try copying piece by piece into a hard register
4057 since that is vulnerable to being clobbered by EXP.
4058 Instead, construct in a pseudo register and then copy it all. */
4059 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
4061 rtx temp = gen_reg_rtx (GET_MODE (target));
4062 store_constructor (exp, temp, 0);
4063 emit_move_insn (target, temp);
4064 return;
4066 #endif
4068 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4069 || TREE_CODE (type) == QUAL_UNION_TYPE)
4071 register tree elt;
4073 /* Inform later passes that the whole union value is dead. */
4074 if (TREE_CODE (type) == UNION_TYPE
4075 || TREE_CODE (type) == QUAL_UNION_TYPE)
4076 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4078 /* If we are building a static constructor into a register,
4079 set the initial value as zero so we can fold the value into
4080 a constant. But if more than one register is involved,
4081 this probably loses. */
4082 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4083 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4085 if (! cleared)
4086 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4088 cleared = 1;
4091 /* If the constructor has fewer fields than the structure
4092 or if we are initializing the structure to mostly zeros,
4093 clear the whole structure first. */
4094 else if ((list_length (CONSTRUCTOR_ELTS (exp))
4095 != list_length (TYPE_FIELDS (type)))
4096 || mostly_zeros_p (exp))
4098 if (! cleared)
4099 clear_storage (target, expr_size (exp),
4100 (align + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
4102 cleared = 1;
4104 else
4105 /* Inform later passes that the old value is dead. */
4106 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4108 /* Store each element of the constructor into
4109 the corresponding field of TARGET. */
4111 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4113 register tree field = TREE_PURPOSE (elt);
4114 #ifdef WORD_REGISTER_OPERATIONS
4115 tree value = TREE_VALUE (elt);
4116 #endif
4117 register enum machine_mode mode;
4118 int bitsize;
4119 int bitpos = 0;
4120 int unsignedp;
4121 tree pos, constant = 0, offset = 0;
4122 rtx to_rtx = target;
4124 /* Just ignore missing fields.
4125 We cleared the whole structure, above,
4126 if any fields are missing. */
4127 if (field == 0)
4128 continue;
4130 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4131 continue;
4133 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
4134 unsignedp = TREE_UNSIGNED (field);
4135 mode = DECL_MODE (field);
4136 if (DECL_BIT_FIELD (field))
4137 mode = VOIDmode;
4139 pos = DECL_FIELD_BITPOS (field);
4140 if (TREE_CODE (pos) == INTEGER_CST)
4141 constant = pos;
4142 else if (TREE_CODE (pos) == PLUS_EXPR
4143 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4144 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
4145 else
4146 offset = pos;
4148 if (constant)
4149 bitpos = TREE_INT_CST_LOW (constant);
4151 if (offset)
4153 rtx offset_rtx;
4155 if (contains_placeholder_p (offset))
4156 offset = build (WITH_RECORD_EXPR, sizetype,
4157 offset, make_tree (TREE_TYPE (exp), target));
4159 offset = size_binop (FLOOR_DIV_EXPR, offset,
4160 size_int (BITS_PER_UNIT));
4162 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4163 if (GET_CODE (to_rtx) != MEM)
4164 abort ();
4166 if (GET_MODE (offset_rtx) != ptr_mode)
4168 #ifdef POINTERS_EXTEND_UNSIGNED
4169 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
4170 #else
4171 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4172 #endif
4175 to_rtx
4176 = change_address (to_rtx, VOIDmode,
4177 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
4178 force_reg (ptr_mode,
4179 offset_rtx)));
4182 if (TREE_READONLY (field))
4184 if (GET_CODE (to_rtx) == MEM)
4185 to_rtx = copy_rtx (to_rtx);
4187 RTX_UNCHANGING_P (to_rtx) = 1;
4190 #ifdef WORD_REGISTER_OPERATIONS
4191 /* If this initializes a field that is smaller than a word, at the
4192 start of a word, try to widen it to a full word.
4193 This special case allows us to output C++ member function
4194 initializations in a form that the optimizers can understand. */
4195 if (constant
4196 && GET_CODE (target) == REG
4197 && bitsize < BITS_PER_WORD
4198 && bitpos % BITS_PER_WORD == 0
4199 && GET_MODE_CLASS (mode) == MODE_INT
4200 && TREE_CODE (value) == INTEGER_CST
4201 && GET_CODE (exp_size) == CONST_INT
4202 && bitpos + BITS_PER_WORD <= INTVAL (exp_size) * BITS_PER_UNIT)
4204 tree type = TREE_TYPE (value);
4205 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4207 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4208 value = convert (type, value);
4210 if (BYTES_BIG_ENDIAN)
4211 value
4212 = fold (build (LSHIFT_EXPR, type, value,
4213 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4214 bitsize = BITS_PER_WORD;
4215 mode = word_mode;
4217 #endif
4218 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4219 TREE_VALUE (elt), type,
4220 MIN (align,
4221 DECL_ALIGN (TREE_PURPOSE (elt))),
4222 cleared);
4225 else if (TREE_CODE (type) == ARRAY_TYPE)
4227 register tree elt;
4228 register int i;
4229 int need_to_clear;
4230 tree domain = TYPE_DOMAIN (type);
4231 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
4232 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
4233 tree elttype = TREE_TYPE (type);
4235 /* If the constructor has fewer elements than the array,
4236 clear the whole array first. Similarly if this is
4237 static constructor of a non-BLKmode object. */
4238 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4239 need_to_clear = 1;
4240 else
4242 HOST_WIDE_INT count = 0, zero_count = 0;
4243 need_to_clear = 0;
4244 /* This loop is a more accurate version of the loop in
4245 mostly_zeros_p (it handles RANGE_EXPR in an index).
4246 It is also needed to check for missing elements. */
4247 for (elt = CONSTRUCTOR_ELTS (exp);
4248 elt != NULL_TREE;
4249 elt = TREE_CHAIN (elt))
4251 tree index = TREE_PURPOSE (elt);
4252 HOST_WIDE_INT this_node_count;
4253 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4255 tree lo_index = TREE_OPERAND (index, 0);
4256 tree hi_index = TREE_OPERAND (index, 1);
4257 if (TREE_CODE (lo_index) != INTEGER_CST
4258 || TREE_CODE (hi_index) != INTEGER_CST)
4260 need_to_clear = 1;
4261 break;
4263 this_node_count = TREE_INT_CST_LOW (hi_index)
4264 - TREE_INT_CST_LOW (lo_index) + 1;
4266 else
4267 this_node_count = 1;
4268 count += this_node_count;
4269 if (mostly_zeros_p (TREE_VALUE (elt)))
4270 zero_count += this_node_count;
4272 /* Clear the entire array first if there are any missing elements,
4273 or if the incidence of zero elements is >= 75%. */
4274 if (count < maxelt - minelt + 1
4275 || 4 * zero_count >= 3 * count)
4276 need_to_clear = 1;
4278 if (need_to_clear)
4280 if (! cleared)
4281 clear_storage (target, expr_size (exp),
4282 (align + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
4283 cleared = 1;
4285 else
4286 /* Inform later passes that the old value is dead. */
4287 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4289 /* Store each element of the constructor into
4290 the corresponding element of TARGET, determined
4291 by counting the elements. */
4292 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4293 elt;
4294 elt = TREE_CHAIN (elt), i++)
4296 register enum machine_mode mode;
4297 int bitsize;
4298 int bitpos;
4299 int unsignedp;
4300 tree value = TREE_VALUE (elt);
4301 int align = TYPE_ALIGN (TREE_TYPE (value));
4302 tree index = TREE_PURPOSE (elt);
4303 rtx xtarget = target;
4305 if (cleared && is_zeros_p (value))
4306 continue;
4308 mode = TYPE_MODE (elttype);
4309 bitsize = GET_MODE_BITSIZE (mode);
4310 unsignedp = TREE_UNSIGNED (elttype);
4312 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4314 tree lo_index = TREE_OPERAND (index, 0);
4315 tree hi_index = TREE_OPERAND (index, 1);
4316 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4317 struct nesting *loop;
4318 HOST_WIDE_INT lo, hi, count;
4319 tree position;
4321 /* If the range is constant and "small", unroll the loop. */
4322 if (TREE_CODE (lo_index) == INTEGER_CST
4323 && TREE_CODE (hi_index) == INTEGER_CST
4324 && (lo = TREE_INT_CST_LOW (lo_index),
4325 hi = TREE_INT_CST_LOW (hi_index),
4326 count = hi - lo + 1,
4327 (GET_CODE (target) != MEM
4328 || count <= 2
4329 || (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
4330 && TREE_INT_CST_LOW (TYPE_SIZE (elttype)) * count
4331 <= 40 * 8))))
4333 lo -= minelt; hi -= minelt;
4334 for (; lo <= hi; lo++)
4336 bitpos = lo * TREE_INT_CST_LOW (TYPE_SIZE (elttype));
4337 store_constructor_field (target, bitsize, bitpos, mode,
4338 value, type, align, cleared);
4341 else
4343 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4344 loop_top = gen_label_rtx ();
4345 loop_end = gen_label_rtx ();
4347 unsignedp = TREE_UNSIGNED (domain);
4349 index = build_decl (VAR_DECL, NULL_TREE, domain);
4351 DECL_RTL (index) = index_r
4352 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4353 &unsignedp, 0));
4355 if (TREE_CODE (value) == SAVE_EXPR
4356 && SAVE_EXPR_RTL (value) == 0)
4358 /* Make sure value gets expanded once before the
4359 loop. */
4360 expand_expr (value, const0_rtx, VOIDmode, 0);
4361 emit_queue ();
4363 store_expr (lo_index, index_r, 0);
4364 loop = expand_start_loop (0);
4366 /* Assign value to element index. */
4367 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
4368 size_int (BITS_PER_UNIT));
4369 position = size_binop (MULT_EXPR,
4370 size_binop (MINUS_EXPR, index,
4371 TYPE_MIN_VALUE (domain)),
4372 position);
4373 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4374 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4375 xtarget = change_address (target, mode, addr);
4376 if (TREE_CODE (value) == CONSTRUCTOR)
4377 store_constructor (value, xtarget, align, cleared);
4378 else
4379 store_expr (value, xtarget, 0);
4381 expand_exit_loop_if_false (loop,
4382 build (LT_EXPR, integer_type_node,
4383 index, hi_index));
4385 expand_increment (build (PREINCREMENT_EXPR,
4386 TREE_TYPE (index),
4387 index, integer_one_node), 0, 0);
4388 expand_end_loop ();
4389 emit_label (loop_end);
4391 /* Needed by stupid register allocation. to extend the
4392 lifetime of pseudo-regs used by target past the end
4393 of the loop. */
4394 emit_insn (gen_rtx_USE (GET_MODE (target), target));
4397 else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
4398 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
4400 rtx pos_rtx, addr;
4401 tree position;
4403 if (index == 0)
4404 index = size_int (i);
4406 if (minelt)
4407 index = size_binop (MINUS_EXPR, index,
4408 TYPE_MIN_VALUE (domain));
4409 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
4410 size_int (BITS_PER_UNIT));
4411 position = size_binop (MULT_EXPR, index, position);
4412 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4413 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4414 xtarget = change_address (target, mode, addr);
4415 store_expr (value, xtarget, 0);
4417 else
4419 if (index != 0)
4420 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
4421 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
4422 else
4423 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
4424 store_constructor_field (target, bitsize, bitpos, mode, value,
4425 type, align, cleared);
4429 /* set constructor assignments */
4430 else if (TREE_CODE (type) == SET_TYPE)
4432 tree elt = CONSTRUCTOR_ELTS (exp);
4433 int nbytes = int_size_in_bytes (type), nbits;
4434 tree domain = TYPE_DOMAIN (type);
4435 tree domain_min, domain_max, bitlength;
4437 /* The default implementation strategy is to extract the constant
4438 parts of the constructor, use that to initialize the target,
4439 and then "or" in whatever non-constant ranges we need in addition.
4441 If a large set is all zero or all ones, it is
4442 probably better to set it using memset (if available) or bzero.
4443 Also, if a large set has just a single range, it may also be
4444 better to first clear all the first clear the set (using
4445 bzero/memset), and set the bits we want. */
4447 /* Check for all zeros. */
4448 if (elt == NULL_TREE)
4450 if (!cleared)
4451 clear_storage (target, expr_size (exp),
4452 TYPE_ALIGN (type) / BITS_PER_UNIT);
4453 return;
4456 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4457 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4458 bitlength = size_binop (PLUS_EXPR,
4459 size_binop (MINUS_EXPR, domain_max, domain_min),
4460 size_one_node);
4462 if (nbytes < 0 || TREE_CODE (bitlength) != INTEGER_CST)
4463 abort ();
4464 nbits = TREE_INT_CST_LOW (bitlength);
4466 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4467 are "complicated" (more than one range), initialize (the
4468 constant parts) by copying from a constant. */
4469 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4470 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4472 int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4473 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4474 char *bit_buffer = (char *) alloca (nbits);
4475 HOST_WIDE_INT word = 0;
4476 int bit_pos = 0;
4477 int ibit = 0;
4478 int offset = 0; /* In bytes from beginning of set. */
4479 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4480 for (;;)
4482 if (bit_buffer[ibit])
4484 if (BYTES_BIG_ENDIAN)
4485 word |= (1 << (set_word_size - 1 - bit_pos));
4486 else
4487 word |= 1 << bit_pos;
4489 bit_pos++; ibit++;
4490 if (bit_pos >= set_word_size || ibit == nbits)
4492 if (word != 0 || ! cleared)
4494 rtx datum = GEN_INT (word);
4495 rtx to_rtx;
4496 /* The assumption here is that it is safe to use
4497 XEXP if the set is multi-word, but not if
4498 it's single-word. */
4499 if (GET_CODE (target) == MEM)
4501 to_rtx = plus_constant (XEXP (target, 0), offset);
4502 to_rtx = change_address (target, mode, to_rtx);
4504 else if (offset == 0)
4505 to_rtx = target;
4506 else
4507 abort ();
4508 emit_move_insn (to_rtx, datum);
4510 if (ibit == nbits)
4511 break;
4512 word = 0;
4513 bit_pos = 0;
4514 offset += set_word_size / BITS_PER_UNIT;
4518 else if (!cleared)
4520 /* Don't bother clearing storage if the set is all ones. */
4521 if (TREE_CHAIN (elt) != NULL_TREE
4522 || (TREE_PURPOSE (elt) == NULL_TREE
4523 ? nbits != 1
4524 : (TREE_CODE (TREE_VALUE (elt)) != INTEGER_CST
4525 || TREE_CODE (TREE_PURPOSE (elt)) != INTEGER_CST
4526 || (TREE_INT_CST_LOW (TREE_VALUE (elt))
4527 - TREE_INT_CST_LOW (TREE_PURPOSE (elt)) + 1
4528 != nbits))))
4529 clear_storage (target, expr_size (exp),
4530 TYPE_ALIGN (type) / BITS_PER_UNIT);
4533 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4535 /* start of range of element or NULL */
4536 tree startbit = TREE_PURPOSE (elt);
4537 /* end of range of element, or element value */
4538 tree endbit = TREE_VALUE (elt);
4539 #ifdef TARGET_MEM_FUNCTIONS
4540 HOST_WIDE_INT startb, endb;
4541 #endif
4542 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4544 bitlength_rtx = expand_expr (bitlength,
4545 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4547 /* handle non-range tuple element like [ expr ] */
4548 if (startbit == NULL_TREE)
4550 startbit = save_expr (endbit);
4551 endbit = startbit;
4553 startbit = convert (sizetype, startbit);
4554 endbit = convert (sizetype, endbit);
4555 if (! integer_zerop (domain_min))
4557 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4558 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4560 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4561 EXPAND_CONST_ADDRESS);
4562 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4563 EXPAND_CONST_ADDRESS);
4565 if (REG_P (target))
4567 targetx = assign_stack_temp (GET_MODE (target),
4568 GET_MODE_SIZE (GET_MODE (target)),
4570 emit_move_insn (targetx, target);
4572 else if (GET_CODE (target) == MEM)
4573 targetx = target;
4574 else
4575 abort ();
4577 #ifdef TARGET_MEM_FUNCTIONS
4578 /* Optimization: If startbit and endbit are
4579 constants divisible by BITS_PER_UNIT,
4580 call memset instead. */
4581 if (TREE_CODE (startbit) == INTEGER_CST
4582 && TREE_CODE (endbit) == INTEGER_CST
4583 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4584 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4586 emit_library_call (memset_libfunc, 0,
4587 VOIDmode, 3,
4588 plus_constant (XEXP (targetx, 0),
4589 startb / BITS_PER_UNIT),
4590 Pmode,
4591 constm1_rtx, TYPE_MODE (integer_type_node),
4592 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4593 TYPE_MODE (sizetype));
4595 else
4596 #endif
4598 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4599 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
4600 bitlength_rtx, TYPE_MODE (sizetype),
4601 startbit_rtx, TYPE_MODE (sizetype),
4602 endbit_rtx, TYPE_MODE (sizetype));
4604 if (REG_P (target))
4605 emit_move_insn (target, targetx);
4609 else
4610 abort ();
4613 /* Store the value of EXP (an expression tree)
4614 into a subfield of TARGET which has mode MODE and occupies
4615 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4616 If MODE is VOIDmode, it means that we are storing into a bit-field.
4618 If VALUE_MODE is VOIDmode, return nothing in particular.
4619 UNSIGNEDP is not used in this case.
4621 Otherwise, return an rtx for the value stored. This rtx
4622 has mode VALUE_MODE if that is convenient to do.
4623 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4625 ALIGN is the alignment that TARGET is known to have, measured in bytes.
4626 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
4628 ALIAS_SET is the alias set for the destination. This value will
4629 (in general) be different from that for TARGET, since TARGET is a
4630 reference to the containing structure. */
4632 static rtx
4633 store_field (target, bitsize, bitpos, mode, exp, value_mode,
4634 unsignedp, align, total_size, alias_set)
4635 rtx target;
4636 int bitsize, bitpos;
4637 enum machine_mode mode;
4638 tree exp;
4639 enum machine_mode value_mode;
4640 int unsignedp;
4641 int align;
4642 int total_size;
4643 int alias_set;
4645 HOST_WIDE_INT width_mask = 0;
4647 if (TREE_CODE (exp) == ERROR_MARK)
4648 return const0_rtx;
4650 if (bitsize < HOST_BITS_PER_WIDE_INT)
4651 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
4653 /* If we are storing into an unaligned field of an aligned union that is
4654 in a register, we may have the mode of TARGET being an integer mode but
4655 MODE == BLKmode. In that case, get an aligned object whose size and
4656 alignment are the same as TARGET and store TARGET into it (we can avoid
4657 the store if the field being stored is the entire width of TARGET). Then
4658 call ourselves recursively to store the field into a BLKmode version of
4659 that object. Finally, load from the object into TARGET. This is not
4660 very efficient in general, but should only be slightly more expensive
4661 than the otherwise-required unaligned accesses. Perhaps this can be
4662 cleaned up later. */
4664 if (mode == BLKmode
4665 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4667 rtx object = assign_stack_temp (GET_MODE (target),
4668 GET_MODE_SIZE (GET_MODE (target)), 0);
4669 rtx blk_object = copy_rtx (object);
4671 MEM_SET_IN_STRUCT_P (object, 1);
4672 MEM_SET_IN_STRUCT_P (blk_object, 1);
4673 PUT_MODE (blk_object, BLKmode);
4675 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4676 emit_move_insn (object, target);
4678 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4679 align, total_size, alias_set);
4681 /* Even though we aren't returning target, we need to
4682 give it the updated value. */
4683 emit_move_insn (target, object);
4685 return blk_object;
4688 /* If the structure is in a register or if the component
4689 is a bit field, we cannot use addressing to access it.
4690 Use bit-field techniques or SUBREG to store in it. */
4692 if (mode == VOIDmode
4693 || (mode != BLKmode && ! direct_store[(int) mode]
4694 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
4695 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
4696 || GET_CODE (target) == REG
4697 || GET_CODE (target) == SUBREG
4698 /* If the field isn't aligned enough to store as an ordinary memref,
4699 store it as a bit field. */
4700 || (SLOW_UNALIGNED_ACCESS
4701 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
4702 || (SLOW_UNALIGNED_ACCESS && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
4704 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4706 /* If BITSIZE is narrower than the size of the type of EXP
4707 we will be narrowing TEMP. Normally, what's wanted are the
4708 low-order bits. However, if EXP's type is a record and this is
4709 big-endian machine, we want the upper BITSIZE bits. */
4710 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4711 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4712 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4713 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4714 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4715 - bitsize),
4716 temp, 1);
4718 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4719 MODE. */
4720 if (mode != VOIDmode && mode != BLKmode
4721 && mode != TYPE_MODE (TREE_TYPE (exp)))
4722 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4724 /* If the modes of TARGET and TEMP are both BLKmode, both
4725 must be in memory and BITPOS must be aligned on a byte
4726 boundary. If so, we simply do a block copy. */
4727 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4729 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4730 || bitpos % BITS_PER_UNIT != 0)
4731 abort ();
4733 target = change_address (target, VOIDmode,
4734 plus_constant (XEXP (target, 0),
4735 bitpos / BITS_PER_UNIT));
4737 emit_block_move (target, temp,
4738 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4739 / BITS_PER_UNIT),
4742 return value_mode == VOIDmode ? const0_rtx : target;
4745 /* Store the value in the bitfield. */
4746 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4747 if (value_mode != VOIDmode)
4749 /* The caller wants an rtx for the value. */
4750 /* If possible, avoid refetching from the bitfield itself. */
4751 if (width_mask != 0
4752 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
4754 tree count;
4755 enum machine_mode tmode;
4757 if (unsignedp)
4758 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4759 tmode = GET_MODE (temp);
4760 if (tmode == VOIDmode)
4761 tmode = value_mode;
4762 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4763 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4764 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4766 return extract_bit_field (target, bitsize, bitpos, unsignedp,
4767 NULL_RTX, value_mode, 0, align,
4768 total_size);
4770 return const0_rtx;
4772 else
4774 rtx addr = XEXP (target, 0);
4775 rtx to_rtx;
4777 /* If a value is wanted, it must be the lhs;
4778 so make the address stable for multiple use. */
4780 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4781 && ! CONSTANT_ADDRESS_P (addr)
4782 /* A frame-pointer reference is already stable. */
4783 && ! (GET_CODE (addr) == PLUS
4784 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4785 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4786 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4787 addr = copy_to_reg (addr);
4789 /* Now build a reference to just the desired component. */
4791 to_rtx = copy_rtx (change_address (target, mode,
4792 plus_constant (addr,
4793 (bitpos
4794 / BITS_PER_UNIT))));
4795 MEM_SET_IN_STRUCT_P (to_rtx, 1);
4796 MEM_ALIAS_SET (to_rtx) = alias_set;
4798 return store_expr (exp, to_rtx, value_mode != VOIDmode);
4802 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4803 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
4804 ARRAY_REFs and find the ultimate containing object, which we return.
4806 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4807 bit position, and *PUNSIGNEDP to the signedness of the field.
4808 If the position of the field is variable, we store a tree
4809 giving the variable offset (in units) in *POFFSET.
4810 This offset is in addition to the bit position.
4811 If the position is not variable, we store 0 in *POFFSET.
4812 We set *PALIGNMENT to the alignment in bytes of the address that will be
4813 computed. This is the alignment of the thing we return if *POFFSET
4814 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
4816 If any of the extraction expressions is volatile,
4817 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4819 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4820 is a mode that can be used to access the field. In that case, *PBITSIZE
4821 is redundant.
4823 If the field describes a variable-sized object, *PMODE is set to
4824 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
4825 this case, but the address of the object can be found. */
4827 tree
4828 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
4829 punsignedp, pvolatilep, palignment)
4830 tree exp;
4831 int *pbitsize;
4832 int *pbitpos;
4833 tree *poffset;
4834 enum machine_mode *pmode;
4835 int *punsignedp;
4836 int *pvolatilep;
4837 int *palignment;
4839 tree orig_exp = exp;
4840 tree size_tree = 0;
4841 enum machine_mode mode = VOIDmode;
4842 tree offset = integer_zero_node;
4843 unsigned int alignment = BIGGEST_ALIGNMENT;
4845 if (TREE_CODE (exp) == COMPONENT_REF)
4847 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4848 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4849 mode = DECL_MODE (TREE_OPERAND (exp, 1));
4850 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
4852 else if (TREE_CODE (exp) == BIT_FIELD_REF)
4854 size_tree = TREE_OPERAND (exp, 1);
4855 *punsignedp = TREE_UNSIGNED (exp);
4857 else
4859 mode = TYPE_MODE (TREE_TYPE (exp));
4860 if (mode == BLKmode)
4861 size_tree = TYPE_SIZE (TREE_TYPE (exp));
4863 *pbitsize = GET_MODE_BITSIZE (mode);
4864 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4867 if (size_tree)
4869 if (TREE_CODE (size_tree) != INTEGER_CST)
4870 mode = BLKmode, *pbitsize = -1;
4871 else
4872 *pbitsize = TREE_INT_CST_LOW (size_tree);
4875 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4876 and find the ultimate containing object. */
4878 *pbitpos = 0;
4880 while (1)
4882 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
4884 tree pos = (TREE_CODE (exp) == COMPONENT_REF
4885 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
4886 : TREE_OPERAND (exp, 2));
4887 tree constant = integer_zero_node, var = pos;
4889 /* If this field hasn't been filled in yet, don't go
4890 past it. This should only happen when folding expressions
4891 made during type construction. */
4892 if (pos == 0)
4893 break;
4895 /* Assume here that the offset is a multiple of a unit.
4896 If not, there should be an explicitly added constant. */
4897 if (TREE_CODE (pos) == PLUS_EXPR
4898 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4899 constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
4900 else if (TREE_CODE (pos) == INTEGER_CST)
4901 constant = pos, var = integer_zero_node;
4903 *pbitpos += TREE_INT_CST_LOW (constant);
4904 offset = size_binop (PLUS_EXPR, offset,
4905 size_binop (EXACT_DIV_EXPR, var,
4906 size_int (BITS_PER_UNIT)));
4909 else if (TREE_CODE (exp) == ARRAY_REF)
4911 /* This code is based on the code in case ARRAY_REF in expand_expr
4912 below. We assume here that the size of an array element is
4913 always an integral multiple of BITS_PER_UNIT. */
4915 tree index = TREE_OPERAND (exp, 1);
4916 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
4917 tree low_bound
4918 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4919 tree index_type = TREE_TYPE (index);
4920 tree xindex;
4922 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
4924 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
4925 index);
4926 index_type = TREE_TYPE (index);
4929 /* Optimize the special-case of a zero lower bound.
4931 We convert the low_bound to sizetype to avoid some problems
4932 with constant folding. (E.g. suppose the lower bound is 1,
4933 and its mode is QI. Without the conversion, (ARRAY
4934 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
4935 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
4937 But sizetype isn't quite right either (especially if
4938 the lowbound is negative). FIXME */
4940 if (! integer_zerop (low_bound))
4941 index = fold (build (MINUS_EXPR, index_type, index,
4942 convert (sizetype, low_bound)));
4944 if (TREE_CODE (index) == INTEGER_CST)
4946 index = convert (sbitsizetype, index);
4947 index_type = TREE_TYPE (index);
4950 xindex = fold (build (MULT_EXPR, sbitsizetype, index,
4951 convert (sbitsizetype,
4952 TYPE_SIZE (TREE_TYPE (exp)))));
4954 if (TREE_CODE (xindex) == INTEGER_CST
4955 && TREE_INT_CST_HIGH (xindex) == 0)
4956 *pbitpos += TREE_INT_CST_LOW (xindex);
4957 else
4959 /* Either the bit offset calculated above is not constant, or
4960 it overflowed. In either case, redo the multiplication
4961 against the size in units. This is especially important
4962 in the non-constant case to avoid a division at runtime. */
4963 xindex = fold (build (MULT_EXPR, ssizetype, index,
4964 convert (ssizetype,
4965 TYPE_SIZE_UNIT (TREE_TYPE (exp)))));
4967 if (contains_placeholder_p (xindex))
4968 xindex = build (WITH_RECORD_EXPR, sizetype, xindex, exp);
4970 offset = size_binop (PLUS_EXPR, offset, xindex);
4973 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
4974 && ! ((TREE_CODE (exp) == NOP_EXPR
4975 || TREE_CODE (exp) == CONVERT_EXPR)
4976 && ! (TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
4977 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0)))
4978 != UNION_TYPE))
4979 && (TYPE_MODE (TREE_TYPE (exp))
4980 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
4981 break;
4983 /* If any reference in the chain is volatile, the effect is volatile. */
4984 if (TREE_THIS_VOLATILE (exp))
4985 *pvolatilep = 1;
4987 /* If the offset is non-constant already, then we can't assume any
4988 alignment more than the alignment here. */
4989 if (! integer_zerop (offset))
4990 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4992 exp = TREE_OPERAND (exp, 0);
4995 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
4996 alignment = MIN (alignment, DECL_ALIGN (exp));
4997 else if (TREE_TYPE (exp) != 0)
4998 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5000 if (integer_zerop (offset))
5001 offset = 0;
5003 if (offset != 0 && contains_placeholder_p (offset))
5004 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
5006 *pmode = mode;
5007 *poffset = offset;
5008 *palignment = alignment / BITS_PER_UNIT;
5009 return exp;
5012 /* Subroutine of expand_exp: compute memory_usage from modifier. */
5013 static enum memory_use_mode
5014 get_memory_usage_from_modifier (modifier)
5015 enum expand_modifier modifier;
5017 switch (modifier)
5019 case EXPAND_NORMAL:
5020 case EXPAND_SUM:
5021 return MEMORY_USE_RO;
5022 break;
5023 case EXPAND_MEMORY_USE_WO:
5024 return MEMORY_USE_WO;
5025 break;
5026 case EXPAND_MEMORY_USE_RW:
5027 return MEMORY_USE_RW;
5028 break;
5029 case EXPAND_MEMORY_USE_DONT:
5030 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5031 MEMORY_USE_DONT, because they are modifiers to a call of
5032 expand_expr in the ADDR_EXPR case of expand_expr. */
5033 case EXPAND_CONST_ADDRESS:
5034 case EXPAND_INITIALIZER:
5035 return MEMORY_USE_DONT;
5036 case EXPAND_MEMORY_USE_BAD:
5037 default:
5038 abort ();
5042 /* Given an rtx VALUE that may contain additions and multiplications,
5043 return an equivalent value that just refers to a register or memory.
5044 This is done by generating instructions to perform the arithmetic
5045 and returning a pseudo-register containing the value.
5047 The returned value may be a REG, SUBREG, MEM or constant. */
5050 force_operand (value, target)
5051 rtx value, target;
5053 register optab binoptab = 0;
5054 /* Use a temporary to force order of execution of calls to
5055 `force_operand'. */
5056 rtx tmp;
5057 register rtx op2;
5058 /* Use subtarget as the target for operand 0 of a binary operation. */
5059 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
5061 /* Check for a PIC address load. */
5062 if (flag_pic
5063 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5064 && XEXP (value, 0) == pic_offset_table_rtx
5065 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5066 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5067 || GET_CODE (XEXP (value, 1)) == CONST))
5069 if (!subtarget)
5070 subtarget = gen_reg_rtx (GET_MODE (value));
5071 emit_move_insn (subtarget, value);
5072 return subtarget;
5075 if (GET_CODE (value) == PLUS)
5076 binoptab = add_optab;
5077 else if (GET_CODE (value) == MINUS)
5078 binoptab = sub_optab;
5079 else if (GET_CODE (value) == MULT)
5081 op2 = XEXP (value, 1);
5082 if (!CONSTANT_P (op2)
5083 && !(GET_CODE (op2) == REG && op2 != subtarget))
5084 subtarget = 0;
5085 tmp = force_operand (XEXP (value, 0), subtarget);
5086 return expand_mult (GET_MODE (value), tmp,
5087 force_operand (op2, NULL_RTX),
5088 target, 0);
5091 if (binoptab)
5093 op2 = XEXP (value, 1);
5094 if (!CONSTANT_P (op2)
5095 && !(GET_CODE (op2) == REG && op2 != subtarget))
5096 subtarget = 0;
5097 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5099 binoptab = add_optab;
5100 op2 = negate_rtx (GET_MODE (value), op2);
5103 /* Check for an addition with OP2 a constant integer and our first
5104 operand a PLUS of a virtual register and something else. In that
5105 case, we want to emit the sum of the virtual register and the
5106 constant first and then add the other value. This allows virtual
5107 register instantiation to simply modify the constant rather than
5108 creating another one around this addition. */
5109 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5110 && GET_CODE (XEXP (value, 0)) == PLUS
5111 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5112 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5113 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5115 rtx temp = expand_binop (GET_MODE (value), binoptab,
5116 XEXP (XEXP (value, 0), 0), op2,
5117 subtarget, 0, OPTAB_LIB_WIDEN);
5118 return expand_binop (GET_MODE (value), binoptab, temp,
5119 force_operand (XEXP (XEXP (value, 0), 1), 0),
5120 target, 0, OPTAB_LIB_WIDEN);
5123 tmp = force_operand (XEXP (value, 0), subtarget);
5124 return expand_binop (GET_MODE (value), binoptab, tmp,
5125 force_operand (op2, NULL_RTX),
5126 target, 0, OPTAB_LIB_WIDEN);
5127 /* We give UNSIGNEDP = 0 to expand_binop
5128 because the only operations we are expanding here are signed ones. */
5130 return value;
5133 /* Subroutine of expand_expr:
5134 save the non-copied parts (LIST) of an expr (LHS), and return a list
5135 which can restore these values to their previous values,
5136 should something modify their storage. */
5138 static tree
5139 save_noncopied_parts (lhs, list)
5140 tree lhs;
5141 tree list;
5143 tree tail;
5144 tree parts = 0;
5146 for (tail = list; tail; tail = TREE_CHAIN (tail))
5147 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5148 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
5149 else
5151 tree part = TREE_VALUE (tail);
5152 tree part_type = TREE_TYPE (part);
5153 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
5154 rtx target = assign_temp (part_type, 0, 1, 1);
5155 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
5156 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
5157 parts = tree_cons (to_be_saved,
5158 build (RTL_EXPR, part_type, NULL_TREE,
5159 (tree) target),
5160 parts);
5161 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
5163 return parts;
5166 /* Subroutine of expand_expr:
5167 record the non-copied parts (LIST) of an expr (LHS), and return a list
5168 which specifies the initial values of these parts. */
5170 static tree
5171 init_noncopied_parts (lhs, list)
5172 tree lhs;
5173 tree list;
5175 tree tail;
5176 tree parts = 0;
5178 for (tail = list; tail; tail = TREE_CHAIN (tail))
5179 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5180 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
5181 else if (TREE_PURPOSE (tail))
5183 tree part = TREE_VALUE (tail);
5184 tree part_type = TREE_TYPE (part);
5185 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
5186 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5188 return parts;
5191 /* Subroutine of expand_expr: return nonzero iff there is no way that
5192 EXP can reference X, which is being modified. TOP_P is nonzero if this
5193 call is going to be used to determine whether we need a temporary
5194 for EXP, as opposed to a recursive call to this function.
5196 It is always safe for this routine to return zero since it merely
5197 searches for optimization opportunities. */
5199 static int
5200 safe_from_p (x, exp, top_p)
5201 rtx x;
5202 tree exp;
5203 int top_p;
5205 rtx exp_rtl = 0;
5206 int i, nops;
5207 static int save_expr_count;
5208 static int save_expr_size = 0;
5209 static tree *save_expr_rewritten;
5210 static tree save_expr_trees[256];
5212 if (x == 0
5213 /* If EXP has varying size, we MUST use a target since we currently
5214 have no way of allocating temporaries of variable size
5215 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5216 So we assume here that something at a higher level has prevented a
5217 clash. This is somewhat bogus, but the best we can do. Only
5218 do this when X is BLKmode and when we are at the top level. */
5219 || (top_p && TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
5220 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5221 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5222 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5223 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5224 != INTEGER_CST)
5225 && GET_MODE (x) == BLKmode))
5226 return 1;
5228 if (top_p && save_expr_size == 0)
5230 int rtn;
5232 save_expr_count = 0;
5233 save_expr_size = sizeof (save_expr_trees) / sizeof (save_expr_trees[0]);
5234 save_expr_rewritten = &save_expr_trees[0];
5236 rtn = safe_from_p (x, exp, 1);
5238 for (i = 0; i < save_expr_count; ++i)
5240 if (TREE_CODE (save_expr_trees[i]) != ERROR_MARK)
5241 abort ();
5242 TREE_SET_CODE (save_expr_trees[i], SAVE_EXPR);
5245 save_expr_size = 0;
5247 return rtn;
5250 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5251 find the underlying pseudo. */
5252 if (GET_CODE (x) == SUBREG)
5254 x = SUBREG_REG (x);
5255 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5256 return 0;
5259 /* If X is a location in the outgoing argument area, it is always safe. */
5260 if (GET_CODE (x) == MEM
5261 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5262 || (GET_CODE (XEXP (x, 0)) == PLUS
5263 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
5264 return 1;
5266 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5268 case 'd':
5269 exp_rtl = DECL_RTL (exp);
5270 break;
5272 case 'c':
5273 return 1;
5275 case 'x':
5276 if (TREE_CODE (exp) == TREE_LIST)
5277 return ((TREE_VALUE (exp) == 0
5278 || safe_from_p (x, TREE_VALUE (exp), 0))
5279 && (TREE_CHAIN (exp) == 0
5280 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5281 else if (TREE_CODE (exp) == ERROR_MARK)
5282 return 1; /* An already-visited SAVE_EXPR? */
5283 else
5284 return 0;
5286 case '1':
5287 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5289 case '2':
5290 case '<':
5291 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5292 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5294 case 'e':
5295 case 'r':
5296 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5297 the expression. If it is set, we conflict iff we are that rtx or
5298 both are in memory. Otherwise, we check all operands of the
5299 expression recursively. */
5301 switch (TREE_CODE (exp))
5303 case ADDR_EXPR:
5304 return (staticp (TREE_OPERAND (exp, 0))
5305 || safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5306 || TREE_STATIC (exp));
5308 case INDIRECT_REF:
5309 if (GET_CODE (x) == MEM)
5310 return 0;
5311 break;
5313 case CALL_EXPR:
5314 exp_rtl = CALL_EXPR_RTL (exp);
5315 if (exp_rtl == 0)
5317 /* Assume that the call will clobber all hard registers and
5318 all of memory. */
5319 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5320 || GET_CODE (x) == MEM)
5321 return 0;
5324 break;
5326 case RTL_EXPR:
5327 /* If a sequence exists, we would have to scan every instruction
5328 in the sequence to see if it was safe. This is probably not
5329 worthwhile. */
5330 if (RTL_EXPR_SEQUENCE (exp))
5331 return 0;
5333 exp_rtl = RTL_EXPR_RTL (exp);
5334 break;
5336 case WITH_CLEANUP_EXPR:
5337 exp_rtl = RTL_EXPR_RTL (exp);
5338 break;
5340 case CLEANUP_POINT_EXPR:
5341 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5343 case SAVE_EXPR:
5344 exp_rtl = SAVE_EXPR_RTL (exp);
5345 if (exp_rtl)
5346 break;
5348 /* This SAVE_EXPR might appear many times in the top-level
5349 safe_from_p() expression, and if it has a complex
5350 subexpression, examining it multiple times could result
5351 in a combinatorial explosion. E.g. on an Alpha
5352 running at least 200MHz, a Fortran test case compiled with
5353 optimization took about 28 minutes to compile -- even though
5354 it was only a few lines long, and the complicated line causing
5355 so much time to be spent in the earlier version of safe_from_p()
5356 had only 293 or so unique nodes.
5358 So, turn this SAVE_EXPR into an ERROR_MARK for now, but remember
5359 where it is so we can turn it back in the top-level safe_from_p()
5360 when we're done. */
5362 /* For now, don't bother re-sizing the array. */
5363 if (save_expr_count >= save_expr_size)
5364 return 0;
5365 save_expr_rewritten[save_expr_count++] = exp;
5367 nops = tree_code_length[(int) SAVE_EXPR];
5368 for (i = 0; i < nops; i++)
5370 tree operand = TREE_OPERAND (exp, i);
5371 if (operand == NULL_TREE)
5372 continue;
5373 TREE_SET_CODE (exp, ERROR_MARK);
5374 if (!safe_from_p (x, operand, 0))
5375 return 0;
5376 TREE_SET_CODE (exp, SAVE_EXPR);
5378 TREE_SET_CODE (exp, ERROR_MARK);
5379 return 1;
5381 case BIND_EXPR:
5382 /* The only operand we look at is operand 1. The rest aren't
5383 part of the expression. */
5384 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5386 case METHOD_CALL_EXPR:
5387 /* This takes a rtx argument, but shouldn't appear here. */
5388 abort ();
5390 default:
5391 break;
5394 /* If we have an rtx, we do not need to scan our operands. */
5395 if (exp_rtl)
5396 break;
5398 nops = tree_code_length[(int) TREE_CODE (exp)];
5399 for (i = 0; i < nops; i++)
5400 if (TREE_OPERAND (exp, i) != 0
5401 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5402 return 0;
5405 /* If we have an rtl, find any enclosed object. Then see if we conflict
5406 with it. */
5407 if (exp_rtl)
5409 if (GET_CODE (exp_rtl) == SUBREG)
5411 exp_rtl = SUBREG_REG (exp_rtl);
5412 if (GET_CODE (exp_rtl) == REG
5413 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5414 return 0;
5417 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5418 are memory and EXP is not readonly. */
5419 return ! (rtx_equal_p (x, exp_rtl)
5420 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5421 && ! TREE_READONLY (exp)));
5424 /* If we reach here, it is safe. */
5425 return 1;
5428 /* Subroutine of expand_expr: return nonzero iff EXP is an
5429 expression whose type is statically determinable. */
5431 static int
5432 fixed_type_p (exp)
5433 tree exp;
5435 if (TREE_CODE (exp) == PARM_DECL
5436 || TREE_CODE (exp) == VAR_DECL
5437 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5438 || TREE_CODE (exp) == COMPONENT_REF
5439 || TREE_CODE (exp) == ARRAY_REF)
5440 return 1;
5441 return 0;
5444 /* Subroutine of expand_expr: return rtx if EXP is a
5445 variable or parameter; else return 0. */
5447 static rtx
5448 var_rtx (exp)
5449 tree exp;
5451 STRIP_NOPS (exp);
5452 switch (TREE_CODE (exp))
5454 case PARM_DECL:
5455 case VAR_DECL:
5456 return DECL_RTL (exp);
5457 default:
5458 return 0;
5462 #ifdef MAX_INTEGER_COMPUTATION_MODE
5463 void
5464 check_max_integer_computation_mode (exp)
5465 tree exp;
5467 enum tree_code code;
5468 enum machine_mode mode;
5470 /* Strip any NOPs that don't change the mode. */
5471 STRIP_NOPS (exp);
5472 code = TREE_CODE (exp);
5474 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5475 if (code == NOP_EXPR
5476 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5477 return;
5479 /* First check the type of the overall operation. We need only look at
5480 unary, binary and relational operations. */
5481 if (TREE_CODE_CLASS (code) == '1'
5482 || TREE_CODE_CLASS (code) == '2'
5483 || TREE_CODE_CLASS (code) == '<')
5485 mode = TYPE_MODE (TREE_TYPE (exp));
5486 if (GET_MODE_CLASS (mode) == MODE_INT
5487 && mode > MAX_INTEGER_COMPUTATION_MODE)
5488 fatal ("unsupported wide integer operation");
5491 /* Check operand of a unary op. */
5492 if (TREE_CODE_CLASS (code) == '1')
5494 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5495 if (GET_MODE_CLASS (mode) == MODE_INT
5496 && mode > MAX_INTEGER_COMPUTATION_MODE)
5497 fatal ("unsupported wide integer operation");
5500 /* Check operands of a binary/comparison op. */
5501 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5503 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5504 if (GET_MODE_CLASS (mode) == MODE_INT
5505 && mode > MAX_INTEGER_COMPUTATION_MODE)
5506 fatal ("unsupported wide integer operation");
5508 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5509 if (GET_MODE_CLASS (mode) == MODE_INT
5510 && mode > MAX_INTEGER_COMPUTATION_MODE)
5511 fatal ("unsupported wide integer operation");
5514 #endif
5517 /* expand_expr: generate code for computing expression EXP.
5518 An rtx for the computed value is returned. The value is never null.
5519 In the case of a void EXP, const0_rtx is returned.
5521 The value may be stored in TARGET if TARGET is nonzero.
5522 TARGET is just a suggestion; callers must assume that
5523 the rtx returned may not be the same as TARGET.
5525 If TARGET is CONST0_RTX, it means that the value will be ignored.
5527 If TMODE is not VOIDmode, it suggests generating the
5528 result in mode TMODE. But this is done only when convenient.
5529 Otherwise, TMODE is ignored and the value generated in its natural mode.
5530 TMODE is just a suggestion; callers must assume that
5531 the rtx returned may not have mode TMODE.
5533 Note that TARGET may have neither TMODE nor MODE. In that case, it
5534 probably will not be used.
5536 If MODIFIER is EXPAND_SUM then when EXP is an addition
5537 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5538 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5539 products as above, or REG or MEM, or constant.
5540 Ordinarily in such cases we would output mul or add instructions
5541 and then return a pseudo reg containing the sum.
5543 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5544 it also marks a label as absolutely required (it can't be dead).
5545 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
5546 This is used for outputting expressions used in initializers.
5548 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5549 with a constant address even if that address is not normally legitimate.
5550 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
5553 expand_expr (exp, target, tmode, modifier)
5554 register tree exp;
5555 rtx target;
5556 enum machine_mode tmode;
5557 enum expand_modifier modifier;
5559 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
5560 This is static so it will be accessible to our recursive callees. */
5561 static tree placeholder_list = 0;
5562 register rtx op0, op1, temp;
5563 tree type = TREE_TYPE (exp);
5564 int unsignedp = TREE_UNSIGNED (type);
5565 register enum machine_mode mode;
5566 register enum tree_code code = TREE_CODE (exp);
5567 optab this_optab;
5568 rtx subtarget, original_target;
5569 int ignore;
5570 tree context;
5571 /* Used by check-memory-usage to make modifier read only. */
5572 enum expand_modifier ro_modifier;
5574 /* Handle ERROR_MARK before anybody tries to access its type. */
5575 if (TREE_CODE (exp) == ERROR_MARK)
5577 op0 = CONST0_RTX (tmode);
5578 if (op0 != 0)
5579 return op0;
5580 return const0_rtx;
5583 mode = TYPE_MODE (type);
5584 /* Use subtarget as the target for operand 0 of a binary operation. */
5585 subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
5586 original_target = target;
5587 ignore = (target == const0_rtx
5588 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
5589 || code == CONVERT_EXPR || code == REFERENCE_EXPR
5590 || code == COND_EXPR)
5591 && TREE_CODE (type) == VOID_TYPE));
5593 /* Make a read-only version of the modifier. */
5594 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
5595 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
5596 ro_modifier = modifier;
5597 else
5598 ro_modifier = EXPAND_NORMAL;
5600 /* Don't use hard regs as subtargets, because the combiner
5601 can only handle pseudo regs. */
5602 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
5603 subtarget = 0;
5604 /* Avoid subtargets inside loops,
5605 since they hide some invariant expressions. */
5606 if (preserve_subexpressions_p ())
5607 subtarget = 0;
5609 /* If we are going to ignore this result, we need only do something
5610 if there is a side-effect somewhere in the expression. If there
5611 is, short-circuit the most common cases here. Note that we must
5612 not call expand_expr with anything but const0_rtx in case this
5613 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
5615 if (ignore)
5617 if (! TREE_SIDE_EFFECTS (exp))
5618 return const0_rtx;
5620 /* Ensure we reference a volatile object even if value is ignored. */
5621 if (TREE_THIS_VOLATILE (exp)
5622 && TREE_CODE (exp) != FUNCTION_DECL
5623 && mode != VOIDmode && mode != BLKmode)
5625 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
5626 if (GET_CODE (temp) == MEM)
5627 temp = copy_to_reg (temp);
5628 return const0_rtx;
5631 if (TREE_CODE_CLASS (code) == '1')
5632 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5633 VOIDmode, ro_modifier);
5634 else if (TREE_CODE_CLASS (code) == '2'
5635 || TREE_CODE_CLASS (code) == '<')
5637 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5638 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5639 return const0_rtx;
5641 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
5642 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
5643 /* If the second operand has no side effects, just evaluate
5644 the first. */
5645 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5646 VOIDmode, ro_modifier);
5648 target = 0;
5651 #ifdef MAX_INTEGER_COMPUTATION_MODE
5652 /* Only check stuff here if the mode we want is different from the mode
5653 of the expression; if it's the same, check_max_integer_computiation_mode
5654 will handle it. Do we really need to check this stuff at all? */
5656 if (target
5657 && GET_MODE (target) != mode
5658 && TREE_CODE (exp) != INTEGER_CST
5659 && TREE_CODE (exp) != PARM_DECL
5660 && TREE_CODE (exp) != ARRAY_REF
5661 && TREE_CODE (exp) != COMPONENT_REF
5662 && TREE_CODE (exp) != BIT_FIELD_REF
5663 && TREE_CODE (exp) != INDIRECT_REF
5664 && TREE_CODE (exp) != CALL_EXPR
5665 && TREE_CODE (exp) != VAR_DECL
5666 && TREE_CODE (exp) != RTL_EXPR)
5668 enum machine_mode mode = GET_MODE (target);
5670 if (GET_MODE_CLASS (mode) == MODE_INT
5671 && mode > MAX_INTEGER_COMPUTATION_MODE)
5672 fatal ("unsupported wide integer operation");
5675 if (tmode != mode
5676 && TREE_CODE (exp) != INTEGER_CST
5677 && TREE_CODE (exp) != PARM_DECL
5678 && TREE_CODE (exp) != ARRAY_REF
5679 && TREE_CODE (exp) != COMPONENT_REF
5680 && TREE_CODE (exp) != BIT_FIELD_REF
5681 && TREE_CODE (exp) != INDIRECT_REF
5682 && TREE_CODE (exp) != VAR_DECL
5683 && TREE_CODE (exp) != CALL_EXPR
5684 && TREE_CODE (exp) != RTL_EXPR
5685 && GET_MODE_CLASS (tmode) == MODE_INT
5686 && tmode > MAX_INTEGER_COMPUTATION_MODE)
5687 fatal ("unsupported wide integer operation");
5689 check_max_integer_computation_mode (exp);
5690 #endif
5692 /* If will do cse, generate all results into pseudo registers
5693 since 1) that allows cse to find more things
5694 and 2) otherwise cse could produce an insn the machine
5695 cannot support. */
5697 if (! cse_not_expected && mode != BLKmode && target
5698 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
5699 target = subtarget;
5701 switch (code)
5703 case LABEL_DECL:
5705 tree function = decl_function_context (exp);
5706 /* Handle using a label in a containing function. */
5707 if (function != current_function_decl
5708 && function != inline_function_decl && function != 0)
5710 struct function *p = find_function_data (function);
5711 /* Allocate in the memory associated with the function
5712 that the label is in. */
5713 push_obstacks (p->function_obstack,
5714 p->function_maybepermanent_obstack);
5716 p->expr->x_forced_labels
5717 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
5718 p->expr->x_forced_labels);
5719 pop_obstacks ();
5721 else
5723 if (modifier == EXPAND_INITIALIZER)
5724 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5725 label_rtx (exp),
5726 forced_labels);
5729 temp = gen_rtx_MEM (FUNCTION_MODE,
5730 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
5731 if (function != current_function_decl
5732 && function != inline_function_decl && function != 0)
5733 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
5734 return temp;
5737 case PARM_DECL:
5738 if (DECL_RTL (exp) == 0)
5740 error_with_decl (exp, "prior parameter's size depends on `%s'");
5741 return CONST0_RTX (mode);
5744 /* ... fall through ... */
5746 case VAR_DECL:
5747 /* If a static var's type was incomplete when the decl was written,
5748 but the type is complete now, lay out the decl now. */
5749 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
5750 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
5752 push_obstacks_nochange ();
5753 end_temporary_allocation ();
5754 layout_decl (exp, 0);
5755 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
5756 pop_obstacks ();
5759 /* Although static-storage variables start off initialized, according to
5760 ANSI C, a memcpy could overwrite them with uninitialized values. So
5761 we check them too. This also lets us check for read-only variables
5762 accessed via a non-const declaration, in case it won't be detected
5763 any other way (e.g., in an embedded system or OS kernel without
5764 memory protection).
5766 Aggregates are not checked here; they're handled elsewhere. */
5767 if (current_function && current_function_check_memory_usage
5768 && code == VAR_DECL
5769 && GET_CODE (DECL_RTL (exp)) == MEM
5770 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5772 enum memory_use_mode memory_usage;
5773 memory_usage = get_memory_usage_from_modifier (modifier);
5775 if (memory_usage != MEMORY_USE_DONT)
5776 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5777 XEXP (DECL_RTL (exp), 0), Pmode,
5778 GEN_INT (int_size_in_bytes (type)),
5779 TYPE_MODE (sizetype),
5780 GEN_INT (memory_usage),
5781 TYPE_MODE (integer_type_node));
5784 /* ... fall through ... */
5786 case FUNCTION_DECL:
5787 case RESULT_DECL:
5788 if (DECL_RTL (exp) == 0)
5789 abort ();
5791 /* Ensure variable marked as used even if it doesn't go through
5792 a parser. If it hasn't be used yet, write out an external
5793 definition. */
5794 if (! TREE_USED (exp))
5796 assemble_external (exp);
5797 TREE_USED (exp) = 1;
5800 /* Show we haven't gotten RTL for this yet. */
5801 temp = 0;
5803 /* Handle variables inherited from containing functions. */
5804 context = decl_function_context (exp);
5806 /* We treat inline_function_decl as an alias for the current function
5807 because that is the inline function whose vars, types, etc.
5808 are being merged into the current function.
5809 See expand_inline_function. */
5811 if (context != 0 && context != current_function_decl
5812 && context != inline_function_decl
5813 /* If var is static, we don't need a static chain to access it. */
5814 && ! (GET_CODE (DECL_RTL (exp)) == MEM
5815 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
5817 rtx addr;
5819 /* Mark as non-local and addressable. */
5820 DECL_NONLOCAL (exp) = 1;
5821 if (DECL_NO_STATIC_CHAIN (current_function_decl))
5822 abort ();
5823 mark_addressable (exp);
5824 if (GET_CODE (DECL_RTL (exp)) != MEM)
5825 abort ();
5826 addr = XEXP (DECL_RTL (exp), 0);
5827 if (GET_CODE (addr) == MEM)
5828 addr = gen_rtx_MEM (Pmode,
5829 fix_lexical_addr (XEXP (addr, 0), exp));
5830 else
5831 addr = fix_lexical_addr (addr, exp);
5832 temp = change_address (DECL_RTL (exp), mode, addr);
5835 /* This is the case of an array whose size is to be determined
5836 from its initializer, while the initializer is still being parsed.
5837 See expand_decl. */
5839 else if (GET_CODE (DECL_RTL (exp)) == MEM
5840 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
5841 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
5842 XEXP (DECL_RTL (exp), 0));
5844 /* If DECL_RTL is memory, we are in the normal case and either
5845 the address is not valid or it is not a register and -fforce-addr
5846 is specified, get the address into a register. */
5848 else if (GET_CODE (DECL_RTL (exp)) == MEM
5849 && modifier != EXPAND_CONST_ADDRESS
5850 && modifier != EXPAND_SUM
5851 && modifier != EXPAND_INITIALIZER
5852 && (! memory_address_p (DECL_MODE (exp),
5853 XEXP (DECL_RTL (exp), 0))
5854 || (flag_force_addr
5855 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
5856 temp = change_address (DECL_RTL (exp), VOIDmode,
5857 copy_rtx (XEXP (DECL_RTL (exp), 0)));
5859 /* If we got something, return it. But first, set the alignment
5860 the address is a register. */
5861 if (temp != 0)
5863 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
5864 mark_reg_pointer (XEXP (temp, 0),
5865 DECL_ALIGN (exp) / BITS_PER_UNIT);
5867 return temp;
5870 /* If the mode of DECL_RTL does not match that of the decl, it
5871 must be a promoted value. We return a SUBREG of the wanted mode,
5872 but mark it so that we know that it was already extended. */
5874 if (GET_CODE (DECL_RTL (exp)) == REG
5875 && GET_MODE (DECL_RTL (exp)) != mode)
5877 /* Get the signedness used for this variable. Ensure we get the
5878 same mode we got when the variable was declared. */
5879 if (GET_MODE (DECL_RTL (exp))
5880 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
5881 abort ();
5883 temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
5884 SUBREG_PROMOTED_VAR_P (temp) = 1;
5885 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5886 return temp;
5889 return DECL_RTL (exp);
5891 case INTEGER_CST:
5892 return immed_double_const (TREE_INT_CST_LOW (exp),
5893 TREE_INT_CST_HIGH (exp),
5894 mode);
5896 case CONST_DECL:
5897 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
5898 EXPAND_MEMORY_USE_BAD);
5900 case REAL_CST:
5901 /* If optimized, generate immediate CONST_DOUBLE
5902 which will be turned into memory by reload if necessary.
5904 We used to force a register so that loop.c could see it. But
5905 this does not allow gen_* patterns to perform optimizations with
5906 the constants. It also produces two insns in cases like "x = 1.0;".
5907 On most machines, floating-point constants are not permitted in
5908 many insns, so we'd end up copying it to a register in any case.
5910 Now, we do the copying in expand_binop, if appropriate. */
5911 return immed_real_const (exp);
5913 case COMPLEX_CST:
5914 case STRING_CST:
5915 if (! TREE_CST_RTL (exp))
5916 output_constant_def (exp);
5918 /* TREE_CST_RTL probably contains a constant address.
5919 On RISC machines where a constant address isn't valid,
5920 make some insns to get that address into a register. */
5921 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
5922 && modifier != EXPAND_CONST_ADDRESS
5923 && modifier != EXPAND_INITIALIZER
5924 && modifier != EXPAND_SUM
5925 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
5926 || (flag_force_addr
5927 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
5928 return change_address (TREE_CST_RTL (exp), VOIDmode,
5929 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
5930 return TREE_CST_RTL (exp);
5932 case EXPR_WITH_FILE_LOCATION:
5934 rtx to_return;
5935 char *saved_input_filename = input_filename;
5936 int saved_lineno = lineno;
5937 input_filename = EXPR_WFL_FILENAME (exp);
5938 lineno = EXPR_WFL_LINENO (exp);
5939 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
5940 emit_line_note (input_filename, lineno);
5941 /* Possibly avoid switching back and force here */
5942 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
5943 input_filename = saved_input_filename;
5944 lineno = saved_lineno;
5945 return to_return;
5948 case SAVE_EXPR:
5949 context = decl_function_context (exp);
5951 /* If this SAVE_EXPR was at global context, assume we are an
5952 initialization function and move it into our context. */
5953 if (context == 0)
5954 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
5956 /* We treat inline_function_decl as an alias for the current function
5957 because that is the inline function whose vars, types, etc.
5958 are being merged into the current function.
5959 See expand_inline_function. */
5960 if (context == current_function_decl || context == inline_function_decl)
5961 context = 0;
5963 /* If this is non-local, handle it. */
5964 if (context)
5966 /* The following call just exists to abort if the context is
5967 not of a containing function. */
5968 find_function_data (context);
5970 temp = SAVE_EXPR_RTL (exp);
5971 if (temp && GET_CODE (temp) == REG)
5973 put_var_into_stack (exp);
5974 temp = SAVE_EXPR_RTL (exp);
5976 if (temp == 0 || GET_CODE (temp) != MEM)
5977 abort ();
5978 return change_address (temp, mode,
5979 fix_lexical_addr (XEXP (temp, 0), exp));
5981 if (SAVE_EXPR_RTL (exp) == 0)
5983 if (mode == VOIDmode)
5984 temp = const0_rtx;
5985 else
5986 temp = assign_temp (type, 3, 0, 0);
5988 SAVE_EXPR_RTL (exp) = temp;
5989 if (!optimize && GET_CODE (temp) == REG)
5990 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
5991 save_expr_regs);
5993 /* If the mode of TEMP does not match that of the expression, it
5994 must be a promoted value. We pass store_expr a SUBREG of the
5995 wanted mode but mark it so that we know that it was already
5996 extended. Note that `unsignedp' was modified above in
5997 this case. */
5999 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6001 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6002 SUBREG_PROMOTED_VAR_P (temp) = 1;
6003 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6006 if (temp == const0_rtx)
6007 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6008 EXPAND_MEMORY_USE_BAD);
6009 else
6010 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6012 TREE_USED (exp) = 1;
6015 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6016 must be a promoted value. We return a SUBREG of the wanted mode,
6017 but mark it so that we know that it was already extended. */
6019 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6020 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6022 /* Compute the signedness and make the proper SUBREG. */
6023 promote_mode (type, mode, &unsignedp, 0);
6024 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6025 SUBREG_PROMOTED_VAR_P (temp) = 1;
6026 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6027 return temp;
6030 return SAVE_EXPR_RTL (exp);
6032 case UNSAVE_EXPR:
6034 rtx temp;
6035 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6036 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6037 return temp;
6040 case PLACEHOLDER_EXPR:
6042 tree placeholder_expr;
6044 /* If there is an object on the head of the placeholder list,
6045 see if some object in it of type TYPE or a pointer to it. For
6046 further information, see tree.def. */
6047 for (placeholder_expr = placeholder_list;
6048 placeholder_expr != 0;
6049 placeholder_expr = TREE_CHAIN (placeholder_expr))
6051 tree need_type = TYPE_MAIN_VARIANT (type);
6052 tree object = 0;
6053 tree old_list = placeholder_list;
6054 tree elt;
6056 /* Find the outermost reference that is of the type we want.
6057 If none, see if any object has a type that is a pointer to
6058 the type we want. */
6059 for (elt = TREE_PURPOSE (placeholder_expr);
6060 elt != 0 && object == 0;
6062 = ((TREE_CODE (elt) == COMPOUND_EXPR
6063 || TREE_CODE (elt) == COND_EXPR)
6064 ? TREE_OPERAND (elt, 1)
6065 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6066 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6067 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6068 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6069 ? TREE_OPERAND (elt, 0) : 0))
6070 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6071 object = elt;
6073 for (elt = TREE_PURPOSE (placeholder_expr);
6074 elt != 0 && object == 0;
6076 = ((TREE_CODE (elt) == COMPOUND_EXPR
6077 || TREE_CODE (elt) == COND_EXPR)
6078 ? TREE_OPERAND (elt, 1)
6079 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6080 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6081 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6082 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6083 ? TREE_OPERAND (elt, 0) : 0))
6084 if (POINTER_TYPE_P (TREE_TYPE (elt))
6085 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6086 == need_type))
6087 object = build1 (INDIRECT_REF, need_type, elt);
6089 if (object != 0)
6091 /* Expand this object skipping the list entries before
6092 it was found in case it is also a PLACEHOLDER_EXPR.
6093 In that case, we want to translate it using subsequent
6094 entries. */
6095 placeholder_list = TREE_CHAIN (placeholder_expr);
6096 temp = expand_expr (object, original_target, tmode,
6097 ro_modifier);
6098 placeholder_list = old_list;
6099 return temp;
6104 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6105 abort ();
6107 case WITH_RECORD_EXPR:
6108 /* Put the object on the placeholder list, expand our first operand,
6109 and pop the list. */
6110 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6111 placeholder_list);
6112 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
6113 tmode, ro_modifier);
6114 placeholder_list = TREE_CHAIN (placeholder_list);
6115 return target;
6117 case GOTO_EXPR:
6118 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6119 expand_goto (TREE_OPERAND (exp, 0));
6120 else
6121 expand_computed_goto (TREE_OPERAND (exp, 0));
6122 return const0_rtx;
6124 case EXIT_EXPR:
6125 expand_exit_loop_if_false (NULL_PTR,
6126 invert_truthvalue (TREE_OPERAND (exp, 0)));
6127 return const0_rtx;
6129 case LABELED_BLOCK_EXPR:
6130 if (LABELED_BLOCK_BODY (exp))
6131 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6132 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6133 return const0_rtx;
6135 case EXIT_BLOCK_EXPR:
6136 if (EXIT_BLOCK_RETURN (exp))
6137 sorry ("returned value in block_exit_expr");
6138 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6139 return const0_rtx;
6141 case LOOP_EXPR:
6142 push_temp_slots ();
6143 expand_start_loop (1);
6144 expand_expr_stmt (TREE_OPERAND (exp, 0));
6145 expand_end_loop ();
6146 pop_temp_slots ();
6148 return const0_rtx;
6150 case BIND_EXPR:
6152 tree vars = TREE_OPERAND (exp, 0);
6153 int vars_need_expansion = 0;
6155 /* Need to open a binding contour here because
6156 if there are any cleanups they must be contained here. */
6157 expand_start_bindings (2);
6159 /* Mark the corresponding BLOCK for output in its proper place. */
6160 if (TREE_OPERAND (exp, 2) != 0
6161 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6162 insert_block (TREE_OPERAND (exp, 2));
6164 /* If VARS have not yet been expanded, expand them now. */
6165 while (vars)
6167 if (DECL_RTL (vars) == 0)
6169 vars_need_expansion = 1;
6170 expand_decl (vars);
6172 expand_decl_init (vars);
6173 vars = TREE_CHAIN (vars);
6176 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6178 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6180 return temp;
6183 case RTL_EXPR:
6184 if (RTL_EXPR_SEQUENCE (exp))
6186 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6187 abort ();
6188 emit_insns (RTL_EXPR_SEQUENCE (exp));
6189 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6191 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6192 free_temps_for_rtl_expr (exp);
6193 return RTL_EXPR_RTL (exp);
6195 case CONSTRUCTOR:
6196 /* If we don't need the result, just ensure we evaluate any
6197 subexpressions. */
6198 if (ignore)
6200 tree elt;
6201 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6202 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6203 EXPAND_MEMORY_USE_BAD);
6204 return const0_rtx;
6207 /* All elts simple constants => refer to a constant in memory. But
6208 if this is a non-BLKmode mode, let it store a field at a time
6209 since that should make a CONST_INT or CONST_DOUBLE when we
6210 fold. Likewise, if we have a target we can use, it is best to
6211 store directly into the target unless the type is large enough
6212 that memcpy will be used. If we are making an initializer and
6213 all operands are constant, put it in memory as well. */
6214 else if ((TREE_STATIC (exp)
6215 && ((mode == BLKmode
6216 && ! (target != 0 && safe_from_p (target, exp, 1)))
6217 || TREE_ADDRESSABLE (exp)
6218 || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
6219 && (!MOVE_BY_PIECES_P
6220 (TREE_INT_CST_LOW (TYPE_SIZE (type))/BITS_PER_UNIT,
6221 TYPE_ALIGN (type) / BITS_PER_UNIT))
6222 && ! mostly_zeros_p (exp))))
6223 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6225 rtx constructor = output_constant_def (exp);
6226 if (modifier != EXPAND_CONST_ADDRESS
6227 && modifier != EXPAND_INITIALIZER
6228 && modifier != EXPAND_SUM
6229 && (! memory_address_p (GET_MODE (constructor),
6230 XEXP (constructor, 0))
6231 || (flag_force_addr
6232 && GET_CODE (XEXP (constructor, 0)) != REG)))
6233 constructor = change_address (constructor, VOIDmode,
6234 XEXP (constructor, 0));
6235 return constructor;
6238 else
6240 /* Handle calls that pass values in multiple non-contiguous
6241 locations. The Irix 6 ABI has examples of this. */
6242 if (target == 0 || ! safe_from_p (target, exp, 1)
6243 || GET_CODE (target) == PARALLEL)
6245 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
6246 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6247 else
6248 target = assign_temp (type, 0, 1, 1);
6251 if (TREE_READONLY (exp))
6253 if (GET_CODE (target) == MEM)
6254 target = copy_rtx (target);
6256 RTX_UNCHANGING_P (target) = 1;
6259 store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0);
6260 return target;
6263 case INDIRECT_REF:
6265 tree exp1 = TREE_OPERAND (exp, 0);
6266 tree exp2;
6267 tree index;
6268 tree string = string_constant (exp1, &index);
6269 int i;
6271 /* Try to optimize reads from const strings. */
6272 if (string
6273 && TREE_CODE (string) == STRING_CST
6274 && TREE_CODE (index) == INTEGER_CST
6275 && !TREE_INT_CST_HIGH (index)
6276 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (string)
6277 && GET_MODE_CLASS (mode) == MODE_INT
6278 && GET_MODE_SIZE (mode) == 1
6279 && modifier != EXPAND_MEMORY_USE_WO)
6280 return GEN_INT (TREE_STRING_POINTER (string)[i]);
6282 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6283 op0 = memory_address (mode, op0);
6285 if (current_function && current_function_check_memory_usage
6286 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6288 enum memory_use_mode memory_usage;
6289 memory_usage = get_memory_usage_from_modifier (modifier);
6291 if (memory_usage != MEMORY_USE_DONT)
6293 in_check_memory_usage = 1;
6294 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6295 op0, Pmode,
6296 GEN_INT (int_size_in_bytes (type)),
6297 TYPE_MODE (sizetype),
6298 GEN_INT (memory_usage),
6299 TYPE_MODE (integer_type_node));
6300 in_check_memory_usage = 0;
6304 temp = gen_rtx_MEM (mode, op0);
6305 /* If address was computed by addition,
6306 mark this as an element of an aggregate. */
6307 if (TREE_CODE (exp1) == PLUS_EXPR
6308 || (TREE_CODE (exp1) == SAVE_EXPR
6309 && TREE_CODE (TREE_OPERAND (exp1, 0)) == PLUS_EXPR)
6310 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
6311 || (TREE_CODE (exp1) == ADDR_EXPR
6312 && (exp2 = TREE_OPERAND (exp1, 0))
6313 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
6314 MEM_SET_IN_STRUCT_P (temp, 1);
6316 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
6317 MEM_ALIAS_SET (temp) = get_alias_set (exp);
6319 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6320 here, because, in C and C++, the fact that a location is accessed
6321 through a pointer to const does not mean that the value there can
6322 never change. Languages where it can never change should
6323 also set TREE_STATIC. */
6324 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
6325 return temp;
6328 case ARRAY_REF:
6329 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6330 abort ();
6333 tree array = TREE_OPERAND (exp, 0);
6334 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6335 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6336 tree index = TREE_OPERAND (exp, 1);
6337 tree index_type = TREE_TYPE (index);
6338 HOST_WIDE_INT i;
6340 /* Optimize the special-case of a zero lower bound.
6342 We convert the low_bound to sizetype to avoid some problems
6343 with constant folding. (E.g. suppose the lower bound is 1,
6344 and its mode is QI. Without the conversion, (ARRAY
6345 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6346 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
6348 But sizetype isn't quite right either (especially if
6349 the lowbound is negative). FIXME */
6351 if (! integer_zerop (low_bound))
6352 index = fold (build (MINUS_EXPR, index_type, index,
6353 convert (sizetype, low_bound)));
6355 /* Fold an expression like: "foo"[2].
6356 This is not done in fold so it won't happen inside &.
6357 Don't fold if this is for wide characters since it's too
6358 difficult to do correctly and this is a very rare case. */
6360 if (TREE_CODE (array) == STRING_CST
6361 && TREE_CODE (index) == INTEGER_CST
6362 && !TREE_INT_CST_HIGH (index)
6363 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
6364 && GET_MODE_CLASS (mode) == MODE_INT
6365 && GET_MODE_SIZE (mode) == 1)
6366 return GEN_INT (TREE_STRING_POINTER (array)[i]);
6368 /* If this is a constant index into a constant array,
6369 just get the value from the array. Handle both the cases when
6370 we have an explicit constructor and when our operand is a variable
6371 that was declared const. */
6373 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
6375 if (TREE_CODE (index) == INTEGER_CST
6376 && TREE_INT_CST_HIGH (index) == 0)
6378 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
6380 i = TREE_INT_CST_LOW (index);
6381 while (elem && i--)
6382 elem = TREE_CHAIN (elem);
6383 if (elem)
6384 return expand_expr (fold (TREE_VALUE (elem)), target,
6385 tmode, ro_modifier);
6389 else if (optimize >= 1
6390 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6391 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6392 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6394 if (TREE_CODE (index) == INTEGER_CST)
6396 tree init = DECL_INITIAL (array);
6398 i = TREE_INT_CST_LOW (index);
6399 if (TREE_CODE (init) == CONSTRUCTOR)
6401 tree elem = CONSTRUCTOR_ELTS (init);
6403 while (elem
6404 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
6405 elem = TREE_CHAIN (elem);
6406 if (elem)
6407 return expand_expr (fold (TREE_VALUE (elem)), target,
6408 tmode, ro_modifier);
6410 else if (TREE_CODE (init) == STRING_CST
6411 && TREE_INT_CST_HIGH (index) == 0
6412 && (TREE_INT_CST_LOW (index)
6413 < TREE_STRING_LENGTH (init)))
6414 return (GEN_INT
6415 (TREE_STRING_POINTER
6416 (init)[TREE_INT_CST_LOW (index)]));
6421 /* ... fall through ... */
6423 case COMPONENT_REF:
6424 case BIT_FIELD_REF:
6425 /* If the operand is a CONSTRUCTOR, we can just extract the
6426 appropriate field if it is present. Don't do this if we have
6427 already written the data since we want to refer to that copy
6428 and varasm.c assumes that's what we'll do. */
6429 if (code != ARRAY_REF
6430 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6431 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6433 tree elt;
6435 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6436 elt = TREE_CHAIN (elt))
6437 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6438 /* We can normally use the value of the field in the
6439 CONSTRUCTOR. However, if this is a bitfield in
6440 an integral mode that we can fit in a HOST_WIDE_INT,
6441 we must mask only the number of bits in the bitfield,
6442 since this is done implicitly by the constructor. If
6443 the bitfield does not meet either of those conditions,
6444 we can't do this optimization. */
6445 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6446 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6447 == MODE_INT)
6448 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6449 <= HOST_BITS_PER_WIDE_INT))))
6451 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6452 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6454 int bitsize = DECL_FIELD_SIZE (TREE_PURPOSE (elt));
6456 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6458 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6459 op0 = expand_and (op0, op1, target);
6461 else
6463 enum machine_mode imode
6464 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6465 tree count
6466 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6469 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6470 target, 0);
6471 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6472 target, 0);
6476 return op0;
6481 enum machine_mode mode1;
6482 int bitsize;
6483 int bitpos;
6484 tree offset;
6485 int volatilep = 0;
6486 int alignment;
6487 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6488 &mode1, &unsignedp, &volatilep,
6489 &alignment);
6491 /* If we got back the original object, something is wrong. Perhaps
6492 we are evaluating an expression too early. In any event, don't
6493 infinitely recurse. */
6494 if (tem == exp)
6495 abort ();
6497 /* If TEM's type is a union of variable size, pass TARGET to the inner
6498 computation, since it will need a temporary and TARGET is known
6499 to have to do. This occurs in unchecked conversion in Ada. */
6501 op0 = expand_expr (tem,
6502 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6503 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6504 != INTEGER_CST)
6505 ? target : NULL_RTX),
6506 VOIDmode,
6507 modifier == EXPAND_INITIALIZER
6508 ? modifier : EXPAND_NORMAL);
6510 /* If this is a constant, put it into a register if it is a
6511 legitimate constant and memory if it isn't. */
6512 if (CONSTANT_P (op0))
6514 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6515 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
6516 op0 = force_reg (mode, op0);
6517 else
6518 op0 = validize_mem (force_const_mem (mode, op0));
6521 if (offset != 0)
6523 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
6525 if (GET_CODE (op0) != MEM)
6526 abort ();
6528 if (GET_MODE (offset_rtx) != ptr_mode)
6530 #ifdef POINTERS_EXTEND_UNSIGNED
6531 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
6532 #else
6533 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6534 #endif
6537 /* A constant address in TO_RTX can have VOIDmode, we must not try
6538 to call force_reg for that case. Avoid that case. */
6539 if (GET_CODE (op0) == MEM
6540 && GET_MODE (op0) == BLKmode
6541 && GET_MODE (XEXP (op0, 0)) != VOIDmode
6542 && bitsize
6543 && (bitpos % bitsize) == 0
6544 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6545 && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
6547 rtx temp = change_address (op0, mode1,
6548 plus_constant (XEXP (op0, 0),
6549 (bitpos /
6550 BITS_PER_UNIT)));
6551 if (GET_CODE (XEXP (temp, 0)) == REG)
6552 op0 = temp;
6553 else
6554 op0 = change_address (op0, mode1,
6555 force_reg (GET_MODE (XEXP (temp, 0)),
6556 XEXP (temp, 0)));
6557 bitpos = 0;
6561 op0 = change_address (op0, VOIDmode,
6562 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
6563 force_reg (ptr_mode,
6564 offset_rtx)));
6567 /* Don't forget about volatility even if this is a bitfield. */
6568 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6570 op0 = copy_rtx (op0);
6571 MEM_VOLATILE_P (op0) = 1;
6574 /* Check the access. */
6575 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
6577 enum memory_use_mode memory_usage;
6578 memory_usage = get_memory_usage_from_modifier (modifier);
6580 if (memory_usage != MEMORY_USE_DONT)
6582 rtx to;
6583 int size;
6585 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
6586 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
6588 /* Check the access right of the pointer. */
6589 if (size > BITS_PER_UNIT)
6590 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6591 to, Pmode,
6592 GEN_INT (size / BITS_PER_UNIT),
6593 TYPE_MODE (sizetype),
6594 GEN_INT (memory_usage),
6595 TYPE_MODE (integer_type_node));
6599 /* In cases where an aligned union has an unaligned object
6600 as a field, we might be extracting a BLKmode value from
6601 an integer-mode (e.g., SImode) object. Handle this case
6602 by doing the extract into an object as wide as the field
6603 (which we know to be the width of a basic mode), then
6604 storing into memory, and changing the mode to BLKmode.
6605 If we ultimately want the address (EXPAND_CONST_ADDRESS or
6606 EXPAND_INITIALIZER), then we must not copy to a temporary. */
6607 if (mode1 == VOIDmode
6608 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6609 || (modifier != EXPAND_CONST_ADDRESS
6610 && modifier != EXPAND_INITIALIZER
6611 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
6612 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6613 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6614 /* If the field isn't aligned enough to fetch as a memref,
6615 fetch it as a bit field. */
6616 || (SLOW_UNALIGNED_ACCESS
6617 && ((TYPE_ALIGN (TREE_TYPE (tem)) < (unsigned int) GET_MODE_ALIGNMENT (mode))
6618 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))))))
6620 enum machine_mode ext_mode = mode;
6622 if (ext_mode == BLKmode)
6623 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6625 if (ext_mode == BLKmode)
6627 /* In this case, BITPOS must start at a byte boundary and
6628 TARGET, if specified, must be a MEM. */
6629 if (GET_CODE (op0) != MEM
6630 || (target != 0 && GET_CODE (target) != MEM)
6631 || bitpos % BITS_PER_UNIT != 0)
6632 abort ();
6634 op0 = change_address (op0, VOIDmode,
6635 plus_constant (XEXP (op0, 0),
6636 bitpos / BITS_PER_UNIT));
6637 if (target == 0)
6638 target = assign_temp (type, 0, 1, 1);
6640 emit_block_move (target, op0,
6641 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6642 / BITS_PER_UNIT),
6645 return target;
6648 op0 = validize_mem (op0);
6650 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
6651 mark_reg_pointer (XEXP (op0, 0), alignment);
6653 op0 = extract_bit_field (op0, bitsize, bitpos,
6654 unsignedp, target, ext_mode, ext_mode,
6655 alignment,
6656 int_size_in_bytes (TREE_TYPE (tem)));
6658 /* If the result is a record type and BITSIZE is narrower than
6659 the mode of OP0, an integral mode, and this is a big endian
6660 machine, we must put the field into the high-order bits. */
6661 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
6662 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
6663 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
6664 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
6665 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
6666 - bitsize),
6667 op0, 1);
6669 if (mode == BLKmode)
6671 rtx new = assign_stack_temp (ext_mode,
6672 bitsize / BITS_PER_UNIT, 0);
6674 emit_move_insn (new, op0);
6675 op0 = copy_rtx (new);
6676 PUT_MODE (op0, BLKmode);
6677 MEM_SET_IN_STRUCT_P (op0, 1);
6680 return op0;
6683 /* If the result is BLKmode, use that to access the object
6684 now as well. */
6685 if (mode == BLKmode)
6686 mode1 = BLKmode;
6688 /* Get a reference to just this component. */
6689 if (modifier == EXPAND_CONST_ADDRESS
6690 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6691 op0 = gen_rtx_MEM (mode1, plus_constant (XEXP (op0, 0),
6692 (bitpos / BITS_PER_UNIT)));
6693 else
6694 op0 = change_address (op0, mode1,
6695 plus_constant (XEXP (op0, 0),
6696 (bitpos / BITS_PER_UNIT)));
6698 if (GET_CODE (op0) == MEM)
6699 MEM_ALIAS_SET (op0) = get_alias_set (exp);
6701 if (GET_CODE (XEXP (op0, 0)) == REG)
6702 mark_reg_pointer (XEXP (op0, 0), alignment);
6704 MEM_SET_IN_STRUCT_P (op0, 1);
6705 MEM_VOLATILE_P (op0) |= volatilep;
6706 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
6707 || modifier == EXPAND_CONST_ADDRESS
6708 || modifier == EXPAND_INITIALIZER)
6709 return op0;
6710 else if (target == 0)
6711 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6713 convert_move (target, op0, unsignedp);
6714 return target;
6717 /* Intended for a reference to a buffer of a file-object in Pascal.
6718 But it's not certain that a special tree code will really be
6719 necessary for these. INDIRECT_REF might work for them. */
6720 case BUFFER_REF:
6721 abort ();
6723 case IN_EXPR:
6725 /* Pascal set IN expression.
6727 Algorithm:
6728 rlo = set_low - (set_low%bits_per_word);
6729 the_word = set [ (index - rlo)/bits_per_word ];
6730 bit_index = index % bits_per_word;
6731 bitmask = 1 << bit_index;
6732 return !!(the_word & bitmask); */
6734 tree set = TREE_OPERAND (exp, 0);
6735 tree index = TREE_OPERAND (exp, 1);
6736 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
6737 tree set_type = TREE_TYPE (set);
6738 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
6739 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
6740 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
6741 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
6742 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
6743 rtx setval = expand_expr (set, 0, VOIDmode, 0);
6744 rtx setaddr = XEXP (setval, 0);
6745 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
6746 rtx rlow;
6747 rtx diff, quo, rem, addr, bit, result;
6749 preexpand_calls (exp);
6751 /* If domain is empty, answer is no. Likewise if index is constant
6752 and out of bounds. */
6753 if (((TREE_CODE (set_high_bound) == INTEGER_CST
6754 && TREE_CODE (set_low_bound) == INTEGER_CST
6755 && tree_int_cst_lt (set_high_bound, set_low_bound))
6756 || (TREE_CODE (index) == INTEGER_CST
6757 && TREE_CODE (set_low_bound) == INTEGER_CST
6758 && tree_int_cst_lt (index, set_low_bound))
6759 || (TREE_CODE (set_high_bound) == INTEGER_CST
6760 && TREE_CODE (index) == INTEGER_CST
6761 && tree_int_cst_lt (set_high_bound, index))))
6762 return const0_rtx;
6764 if (target == 0)
6765 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6767 /* If we get here, we have to generate the code for both cases
6768 (in range and out of range). */
6770 op0 = gen_label_rtx ();
6771 op1 = gen_label_rtx ();
6773 if (! (GET_CODE (index_val) == CONST_INT
6774 && GET_CODE (lo_r) == CONST_INT))
6776 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
6777 GET_MODE (index_val), iunsignedp, 0, op1);
6780 if (! (GET_CODE (index_val) == CONST_INT
6781 && GET_CODE (hi_r) == CONST_INT))
6783 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
6784 GET_MODE (index_val), iunsignedp, 0, op1);
6787 /* Calculate the element number of bit zero in the first word
6788 of the set. */
6789 if (GET_CODE (lo_r) == CONST_INT)
6790 rlow = GEN_INT (INTVAL (lo_r)
6791 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
6792 else
6793 rlow = expand_binop (index_mode, and_optab, lo_r,
6794 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
6795 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6797 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
6798 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
6800 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
6801 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6802 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
6803 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6805 addr = memory_address (byte_mode,
6806 expand_binop (index_mode, add_optab, diff,
6807 setaddr, NULL_RTX, iunsignedp,
6808 OPTAB_LIB_WIDEN));
6810 /* Extract the bit we want to examine */
6811 bit = expand_shift (RSHIFT_EXPR, byte_mode,
6812 gen_rtx_MEM (byte_mode, addr),
6813 make_tree (TREE_TYPE (index), rem),
6814 NULL_RTX, 1);
6815 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
6816 GET_MODE (target) == byte_mode ? target : 0,
6817 1, OPTAB_LIB_WIDEN);
6819 if (result != target)
6820 convert_move (target, result, 1);
6822 /* Output the code to handle the out-of-range case. */
6823 emit_jump (op0);
6824 emit_label (op1);
6825 emit_move_insn (target, const0_rtx);
6826 emit_label (op0);
6827 return target;
6830 case WITH_CLEANUP_EXPR:
6831 if (RTL_EXPR_RTL (exp) == 0)
6833 RTL_EXPR_RTL (exp)
6834 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
6835 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
6837 /* That's it for this cleanup. */
6838 TREE_OPERAND (exp, 2) = 0;
6840 return RTL_EXPR_RTL (exp);
6842 case CLEANUP_POINT_EXPR:
6844 /* Start a new binding layer that will keep track of all cleanup
6845 actions to be performed. */
6846 expand_start_bindings (2);
6848 target_temp_slot_level = temp_slot_level;
6850 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
6851 /* If we're going to use this value, load it up now. */
6852 if (! ignore)
6853 op0 = force_not_mem (op0);
6854 preserve_temp_slots (op0);
6855 expand_end_bindings (NULL_TREE, 0, 0);
6857 return op0;
6859 case CALL_EXPR:
6860 /* Check for a built-in function. */
6861 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
6862 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6863 == FUNCTION_DECL)
6864 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6865 return expand_builtin (exp, target, subtarget, tmode, ignore);
6867 /* If this call was expanded already by preexpand_calls,
6868 just return the result we got. */
6869 if (CALL_EXPR_RTL (exp) != 0)
6870 return CALL_EXPR_RTL (exp);
6872 return expand_call (exp, target, ignore);
6874 case NON_LVALUE_EXPR:
6875 case NOP_EXPR:
6876 case CONVERT_EXPR:
6877 case REFERENCE_EXPR:
6878 if (TREE_CODE (type) == UNION_TYPE)
6880 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
6881 if (target == 0)
6883 if (mode != BLKmode)
6884 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6885 else
6886 target = assign_temp (type, 0, 1, 1);
6889 if (GET_CODE (target) == MEM)
6890 /* Store data into beginning of memory target. */
6891 store_expr (TREE_OPERAND (exp, 0),
6892 change_address (target, TYPE_MODE (valtype), 0), 0);
6894 else if (GET_CODE (target) == REG)
6895 /* Store this field into a union of the proper type. */
6896 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
6897 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
6898 VOIDmode, 0, 1,
6899 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))),
6901 else
6902 abort ();
6904 /* Return the entire union. */
6905 return target;
6908 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6910 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
6911 ro_modifier);
6913 /* If the signedness of the conversion differs and OP0 is
6914 a promoted SUBREG, clear that indication since we now
6915 have to do the proper extension. */
6916 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
6917 && GET_CODE (op0) == SUBREG)
6918 SUBREG_PROMOTED_VAR_P (op0) = 0;
6920 return op0;
6923 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
6924 if (GET_MODE (op0) == mode)
6925 return op0;
6927 /* If OP0 is a constant, just convert it into the proper mode. */
6928 if (CONSTANT_P (op0))
6929 return
6930 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6931 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6933 if (modifier == EXPAND_INITIALIZER)
6934 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
6936 if (target == 0)
6937 return
6938 convert_to_mode (mode, op0,
6939 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6940 else
6941 convert_move (target, op0,
6942 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6943 return target;
6945 case PLUS_EXPR:
6946 /* We come here from MINUS_EXPR when the second operand is a
6947 constant. */
6948 plus_expr:
6949 this_optab = add_optab;
6951 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
6952 something else, make sure we add the register to the constant and
6953 then to the other thing. This case can occur during strength
6954 reduction and doing it this way will produce better code if the
6955 frame pointer or argument pointer is eliminated.
6957 fold-const.c will ensure that the constant is always in the inner
6958 PLUS_EXPR, so the only case we need to do anything about is if
6959 sp, ap, or fp is our second argument, in which case we must swap
6960 the innermost first argument and our second argument. */
6962 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
6963 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
6964 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
6965 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
6966 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
6967 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
6969 tree t = TREE_OPERAND (exp, 1);
6971 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6972 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
6975 /* If the result is to be ptr_mode and we are adding an integer to
6976 something, we might be forming a constant. So try to use
6977 plus_constant. If it produces a sum and we can't accept it,
6978 use force_operand. This allows P = &ARR[const] to generate
6979 efficient code on machines where a SYMBOL_REF is not a valid
6980 address.
6982 If this is an EXPAND_SUM call, always return the sum. */
6983 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
6984 || mode == ptr_mode)
6986 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
6987 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
6988 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
6990 rtx constant_part;
6992 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
6993 EXPAND_SUM);
6994 /* Use immed_double_const to ensure that the constant is
6995 truncated according to the mode of OP1, then sign extended
6996 to a HOST_WIDE_INT. Using the constant directly can result
6997 in non-canonical RTL in a 64x32 cross compile. */
6998 constant_part
6999 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7000 (HOST_WIDE_INT) 0,
7001 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7002 op1 = plus_constant (op1, INTVAL (constant_part));
7003 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7004 op1 = force_operand (op1, target);
7005 return op1;
7008 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7009 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7010 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7012 rtx constant_part;
7014 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7015 EXPAND_SUM);
7016 if (! CONSTANT_P (op0))
7018 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7019 VOIDmode, modifier);
7020 /* Don't go to both_summands if modifier
7021 says it's not right to return a PLUS. */
7022 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7023 goto binop2;
7024 goto both_summands;
7026 /* Use immed_double_const to ensure that the constant is
7027 truncated according to the mode of OP1, then sign extended
7028 to a HOST_WIDE_INT. Using the constant directly can result
7029 in non-canonical RTL in a 64x32 cross compile. */
7030 constant_part
7031 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7032 (HOST_WIDE_INT) 0,
7033 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7034 op0 = plus_constant (op0, INTVAL (constant_part));
7035 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7036 op0 = force_operand (op0, target);
7037 return op0;
7041 /* No sense saving up arithmetic to be done
7042 if it's all in the wrong mode to form part of an address.
7043 And force_operand won't know whether to sign-extend or
7044 zero-extend. */
7045 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7046 || mode != ptr_mode)
7047 goto binop;
7049 preexpand_calls (exp);
7050 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7051 subtarget = 0;
7053 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7054 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
7056 both_summands:
7057 /* Make sure any term that's a sum with a constant comes last. */
7058 if (GET_CODE (op0) == PLUS
7059 && CONSTANT_P (XEXP (op0, 1)))
7061 temp = op0;
7062 op0 = op1;
7063 op1 = temp;
7065 /* If adding to a sum including a constant,
7066 associate it to put the constant outside. */
7067 if (GET_CODE (op1) == PLUS
7068 && CONSTANT_P (XEXP (op1, 1)))
7070 rtx constant_term = const0_rtx;
7072 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7073 if (temp != 0)
7074 op0 = temp;
7075 /* Ensure that MULT comes first if there is one. */
7076 else if (GET_CODE (op0) == MULT)
7077 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7078 else
7079 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7081 /* Let's also eliminate constants from op0 if possible. */
7082 op0 = eliminate_constant_term (op0, &constant_term);
7084 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7085 their sum should be a constant. Form it into OP1, since the
7086 result we want will then be OP0 + OP1. */
7088 temp = simplify_binary_operation (PLUS, mode, constant_term,
7089 XEXP (op1, 1));
7090 if (temp != 0)
7091 op1 = temp;
7092 else
7093 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7096 /* Put a constant term last and put a multiplication first. */
7097 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7098 temp = op1, op1 = op0, op0 = temp;
7100 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7101 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7103 case MINUS_EXPR:
7104 /* For initializers, we are allowed to return a MINUS of two
7105 symbolic constants. Here we handle all cases when both operands
7106 are constant. */
7107 /* Handle difference of two symbolic constants,
7108 for the sake of an initializer. */
7109 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7110 && really_constant_p (TREE_OPERAND (exp, 0))
7111 && really_constant_p (TREE_OPERAND (exp, 1)))
7113 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
7114 VOIDmode, ro_modifier);
7115 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7116 VOIDmode, ro_modifier);
7118 /* If the last operand is a CONST_INT, use plus_constant of
7119 the negated constant. Else make the MINUS. */
7120 if (GET_CODE (op1) == CONST_INT)
7121 return plus_constant (op0, - INTVAL (op1));
7122 else
7123 return gen_rtx_MINUS (mode, op0, op1);
7125 /* Convert A - const to A + (-const). */
7126 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7128 tree negated = fold (build1 (NEGATE_EXPR, type,
7129 TREE_OPERAND (exp, 1)));
7131 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7132 /* If we can't negate the constant in TYPE, leave it alone and
7133 expand_binop will negate it for us. We used to try to do it
7134 here in the signed version of TYPE, but that doesn't work
7135 on POINTER_TYPEs. */;
7136 else
7138 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7139 goto plus_expr;
7142 this_optab = sub_optab;
7143 goto binop;
7145 case MULT_EXPR:
7146 preexpand_calls (exp);
7147 /* If first operand is constant, swap them.
7148 Thus the following special case checks need only
7149 check the second operand. */
7150 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7152 register tree t1 = TREE_OPERAND (exp, 0);
7153 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7154 TREE_OPERAND (exp, 1) = t1;
7157 /* Attempt to return something suitable for generating an
7158 indexed address, for machines that support that. */
7160 if (modifier == EXPAND_SUM && mode == ptr_mode
7161 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7162 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7164 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7165 EXPAND_SUM);
7167 /* Apply distributive law if OP0 is x+c. */
7168 if (GET_CODE (op0) == PLUS
7169 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7170 return
7171 gen_rtx_PLUS
7172 (mode,
7173 gen_rtx_MULT
7174 (mode, XEXP (op0, 0),
7175 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7176 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7177 * INTVAL (XEXP (op0, 1))));
7179 if (GET_CODE (op0) != REG)
7180 op0 = force_operand (op0, NULL_RTX);
7181 if (GET_CODE (op0) != REG)
7182 op0 = copy_to_mode_reg (mode, op0);
7184 return
7185 gen_rtx_MULT (mode, op0,
7186 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7189 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7190 subtarget = 0;
7192 /* Check for multiplying things that have been extended
7193 from a narrower type. If this machine supports multiplying
7194 in that narrower type with a result in the desired type,
7195 do it that way, and avoid the explicit type-conversion. */
7196 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7197 && TREE_CODE (type) == INTEGER_TYPE
7198 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7199 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7200 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7201 && int_fits_type_p (TREE_OPERAND (exp, 1),
7202 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7203 /* Don't use a widening multiply if a shift will do. */
7204 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7205 > HOST_BITS_PER_WIDE_INT)
7206 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7208 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7209 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7211 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7212 /* If both operands are extended, they must either both
7213 be zero-extended or both be sign-extended. */
7214 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7216 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7218 enum machine_mode innermode
7219 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7220 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7221 ? smul_widen_optab : umul_widen_optab);
7222 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7223 ? umul_widen_optab : smul_widen_optab);
7224 if (mode == GET_MODE_WIDER_MODE (innermode))
7226 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7228 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7229 NULL_RTX, VOIDmode, 0);
7230 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7231 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7232 VOIDmode, 0);
7233 else
7234 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7235 NULL_RTX, VOIDmode, 0);
7236 goto binop2;
7238 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7239 && innermode == word_mode)
7241 rtx htem;
7242 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7243 NULL_RTX, VOIDmode, 0);
7244 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7245 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7246 VOIDmode, 0);
7247 else
7248 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7249 NULL_RTX, VOIDmode, 0);
7250 temp = expand_binop (mode, other_optab, op0, op1, target,
7251 unsignedp, OPTAB_LIB_WIDEN);
7252 htem = expand_mult_highpart_adjust (innermode,
7253 gen_highpart (innermode, temp),
7254 op0, op1,
7255 gen_highpart (innermode, temp),
7256 unsignedp);
7257 emit_move_insn (gen_highpart (innermode, temp), htem);
7258 return temp;
7262 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7263 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7264 return expand_mult (mode, op0, op1, target, unsignedp);
7266 case TRUNC_DIV_EXPR:
7267 case FLOOR_DIV_EXPR:
7268 case CEIL_DIV_EXPR:
7269 case ROUND_DIV_EXPR:
7270 case EXACT_DIV_EXPR:
7271 preexpand_calls (exp);
7272 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7273 subtarget = 0;
7274 /* Possible optimization: compute the dividend with EXPAND_SUM
7275 then if the divisor is constant can optimize the case
7276 where some terms of the dividend have coeffs divisible by it. */
7277 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7278 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7279 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7281 case RDIV_EXPR:
7282 this_optab = flodiv_optab;
7283 goto binop;
7285 case TRUNC_MOD_EXPR:
7286 case FLOOR_MOD_EXPR:
7287 case CEIL_MOD_EXPR:
7288 case ROUND_MOD_EXPR:
7289 preexpand_calls (exp);
7290 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7291 subtarget = 0;
7292 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7293 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7294 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7296 case FIX_ROUND_EXPR:
7297 case FIX_FLOOR_EXPR:
7298 case FIX_CEIL_EXPR:
7299 abort (); /* Not used for C. */
7301 case FIX_TRUNC_EXPR:
7302 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7303 if (target == 0)
7304 target = gen_reg_rtx (mode);
7305 expand_fix (target, op0, unsignedp);
7306 return target;
7308 case FLOAT_EXPR:
7309 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7310 if (target == 0)
7311 target = gen_reg_rtx (mode);
7312 /* expand_float can't figure out what to do if FROM has VOIDmode.
7313 So give it the correct mode. With -O, cse will optimize this. */
7314 if (GET_MODE (op0) == VOIDmode)
7315 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7316 op0);
7317 expand_float (target, op0,
7318 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7319 return target;
7321 case NEGATE_EXPR:
7322 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7323 temp = expand_unop (mode, neg_optab, op0, target, 0);
7324 if (temp == 0)
7325 abort ();
7326 return temp;
7328 case ABS_EXPR:
7329 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7331 /* Handle complex values specially. */
7332 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7333 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7334 return expand_complex_abs (mode, op0, target, unsignedp);
7336 /* Unsigned abs is simply the operand. Testing here means we don't
7337 risk generating incorrect code below. */
7338 if (TREE_UNSIGNED (type))
7339 return op0;
7341 return expand_abs (mode, op0, target,
7342 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7344 case MAX_EXPR:
7345 case MIN_EXPR:
7346 target = original_target;
7347 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7348 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7349 || GET_MODE (target) != mode
7350 || (GET_CODE (target) == REG
7351 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7352 target = gen_reg_rtx (mode);
7353 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7354 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7356 /* First try to do it with a special MIN or MAX instruction.
7357 If that does not win, use a conditional jump to select the proper
7358 value. */
7359 this_optab = (TREE_UNSIGNED (type)
7360 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7361 : (code == MIN_EXPR ? smin_optab : smax_optab));
7363 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7364 OPTAB_WIDEN);
7365 if (temp != 0)
7366 return temp;
7368 /* At this point, a MEM target is no longer useful; we will get better
7369 code without it. */
7371 if (GET_CODE (target) == MEM)
7372 target = gen_reg_rtx (mode);
7374 if (target != op0)
7375 emit_move_insn (target, op0);
7377 op0 = gen_label_rtx ();
7379 /* If this mode is an integer too wide to compare properly,
7380 compare word by word. Rely on cse to optimize constant cases. */
7381 if (GET_MODE_CLASS (mode) == MODE_INT && ! can_compare_p (mode, ccp_jump))
7383 if (code == MAX_EXPR)
7384 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7385 target, op1, NULL_RTX, op0);
7386 else
7387 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7388 op1, target, NULL_RTX, op0);
7390 else
7392 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7393 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7394 unsignedp, mode, NULL_RTX, 0, NULL_RTX,
7395 op0);
7397 emit_move_insn (target, op1);
7398 emit_label (op0);
7399 return target;
7401 case BIT_NOT_EXPR:
7402 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7403 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7404 if (temp == 0)
7405 abort ();
7406 return temp;
7408 case FFS_EXPR:
7409 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7410 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7411 if (temp == 0)
7412 abort ();
7413 return temp;
7415 /* ??? Can optimize bitwise operations with one arg constant.
7416 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7417 and (a bitwise1 b) bitwise2 b (etc)
7418 but that is probably not worth while. */
7420 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7421 boolean values when we want in all cases to compute both of them. In
7422 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7423 as actual zero-or-1 values and then bitwise anding. In cases where
7424 there cannot be any side effects, better code would be made by
7425 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7426 how to recognize those cases. */
7428 case TRUTH_AND_EXPR:
7429 case BIT_AND_EXPR:
7430 this_optab = and_optab;
7431 goto binop;
7433 case TRUTH_OR_EXPR:
7434 case BIT_IOR_EXPR:
7435 this_optab = ior_optab;
7436 goto binop;
7438 case TRUTH_XOR_EXPR:
7439 case BIT_XOR_EXPR:
7440 this_optab = xor_optab;
7441 goto binop;
7443 case LSHIFT_EXPR:
7444 case RSHIFT_EXPR:
7445 case LROTATE_EXPR:
7446 case RROTATE_EXPR:
7447 preexpand_calls (exp);
7448 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7449 subtarget = 0;
7450 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7451 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7452 unsignedp);
7454 /* Could determine the answer when only additive constants differ. Also,
7455 the addition of one can be handled by changing the condition. */
7456 case LT_EXPR:
7457 case LE_EXPR:
7458 case GT_EXPR:
7459 case GE_EXPR:
7460 case EQ_EXPR:
7461 case NE_EXPR:
7462 preexpand_calls (exp);
7463 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7464 if (temp != 0)
7465 return temp;
7467 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7468 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7469 && original_target
7470 && GET_CODE (original_target) == REG
7471 && (GET_MODE (original_target)
7472 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7474 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7475 VOIDmode, 0);
7477 if (temp != original_target)
7478 temp = copy_to_reg (temp);
7480 op1 = gen_label_rtx ();
7481 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7482 GET_MODE (temp), unsignedp, 0, op1);
7483 emit_move_insn (temp, const1_rtx);
7484 emit_label (op1);
7485 return temp;
7488 /* If no set-flag instruction, must generate a conditional
7489 store into a temporary variable. Drop through
7490 and handle this like && and ||. */
7492 case TRUTH_ANDIF_EXPR:
7493 case TRUTH_ORIF_EXPR:
7494 if (! ignore
7495 && (target == 0 || ! safe_from_p (target, exp, 1)
7496 /* Make sure we don't have a hard reg (such as function's return
7497 value) live across basic blocks, if not optimizing. */
7498 || (!optimize && GET_CODE (target) == REG
7499 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7500 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7502 if (target)
7503 emit_clr_insn (target);
7505 op1 = gen_label_rtx ();
7506 jumpifnot (exp, op1);
7508 if (target)
7509 emit_0_to_1_insn (target);
7511 emit_label (op1);
7512 return ignore ? const0_rtx : target;
7514 case TRUTH_NOT_EXPR:
7515 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7516 /* The parser is careful to generate TRUTH_NOT_EXPR
7517 only with operands that are always zero or one. */
7518 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7519 target, 1, OPTAB_LIB_WIDEN);
7520 if (temp == 0)
7521 abort ();
7522 return temp;
7524 case COMPOUND_EXPR:
7525 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7526 emit_queue ();
7527 return expand_expr (TREE_OPERAND (exp, 1),
7528 (ignore ? const0_rtx : target),
7529 VOIDmode, 0);
7531 case COND_EXPR:
7532 /* If we would have a "singleton" (see below) were it not for a
7533 conversion in each arm, bring that conversion back out. */
7534 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7535 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
7536 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
7537 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
7539 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
7540 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
7542 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
7543 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7544 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
7545 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
7546 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
7547 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7548 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
7549 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
7550 return expand_expr (build1 (NOP_EXPR, type,
7551 build (COND_EXPR, TREE_TYPE (true),
7552 TREE_OPERAND (exp, 0),
7553 true, false)),
7554 target, tmode, modifier);
7558 /* Note that COND_EXPRs whose type is a structure or union
7559 are required to be constructed to contain assignments of
7560 a temporary variable, so that we can evaluate them here
7561 for side effect only. If type is void, we must do likewise. */
7563 /* If an arm of the branch requires a cleanup,
7564 only that cleanup is performed. */
7566 tree singleton = 0;
7567 tree binary_op = 0, unary_op = 0;
7569 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
7570 convert it to our mode, if necessary. */
7571 if (integer_onep (TREE_OPERAND (exp, 1))
7572 && integer_zerop (TREE_OPERAND (exp, 2))
7573 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7575 if (ignore)
7577 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
7578 ro_modifier);
7579 return const0_rtx;
7582 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
7583 if (GET_MODE (op0) == mode)
7584 return op0;
7586 if (target == 0)
7587 target = gen_reg_rtx (mode);
7588 convert_move (target, op0, unsignedp);
7589 return target;
7592 /* Check for X ? A + B : A. If we have this, we can copy A to the
7593 output and conditionally add B. Similarly for unary operations.
7594 Don't do this if X has side-effects because those side effects
7595 might affect A or B and the "?" operation is a sequence point in
7596 ANSI. (operand_equal_p tests for side effects.) */
7598 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
7599 && operand_equal_p (TREE_OPERAND (exp, 2),
7600 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7601 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
7602 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
7603 && operand_equal_p (TREE_OPERAND (exp, 1),
7604 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7605 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
7606 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
7607 && operand_equal_p (TREE_OPERAND (exp, 2),
7608 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7609 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
7610 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
7611 && operand_equal_p (TREE_OPERAND (exp, 1),
7612 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7613 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
7615 /* If we are not to produce a result, we have no target. Otherwise,
7616 if a target was specified use it; it will not be used as an
7617 intermediate target unless it is safe. If no target, use a
7618 temporary. */
7620 if (ignore)
7621 temp = 0;
7622 else if (original_target
7623 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
7624 || (singleton && GET_CODE (original_target) == REG
7625 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
7626 && original_target == var_rtx (singleton)))
7627 && GET_MODE (original_target) == mode
7628 #ifdef HAVE_conditional_move
7629 && (! can_conditionally_move_p (mode)
7630 || GET_CODE (original_target) == REG
7631 || TREE_ADDRESSABLE (type))
7632 #endif
7633 && ! (GET_CODE (original_target) == MEM
7634 && MEM_VOLATILE_P (original_target)))
7635 temp = original_target;
7636 else if (TREE_ADDRESSABLE (type))
7637 abort ();
7638 else
7639 temp = assign_temp (type, 0, 0, 1);
7641 /* If we had X ? A + C : A, with C a constant power of 2, and we can
7642 do the test of X as a store-flag operation, do this as
7643 A + ((X != 0) << log C). Similarly for other simple binary
7644 operators. Only do for C == 1 if BRANCH_COST is low. */
7645 if (temp && singleton && binary_op
7646 && (TREE_CODE (binary_op) == PLUS_EXPR
7647 || TREE_CODE (binary_op) == MINUS_EXPR
7648 || TREE_CODE (binary_op) == BIT_IOR_EXPR
7649 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
7650 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
7651 : integer_onep (TREE_OPERAND (binary_op, 1)))
7652 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7654 rtx result;
7655 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
7656 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
7657 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
7658 : xor_optab);
7660 /* If we had X ? A : A + 1, do this as A + (X == 0).
7662 We have to invert the truth value here and then put it
7663 back later if do_store_flag fails. We cannot simply copy
7664 TREE_OPERAND (exp, 0) to another variable and modify that
7665 because invert_truthvalue can modify the tree pointed to
7666 by its argument. */
7667 if (singleton == TREE_OPERAND (exp, 1))
7668 TREE_OPERAND (exp, 0)
7669 = invert_truthvalue (TREE_OPERAND (exp, 0));
7671 result = do_store_flag (TREE_OPERAND (exp, 0),
7672 (safe_from_p (temp, singleton, 1)
7673 ? temp : NULL_RTX),
7674 mode, BRANCH_COST <= 1);
7676 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
7677 result = expand_shift (LSHIFT_EXPR, mode, result,
7678 build_int_2 (tree_log2
7679 (TREE_OPERAND
7680 (binary_op, 1)),
7682 (safe_from_p (temp, singleton, 1)
7683 ? temp : NULL_RTX), 0);
7685 if (result)
7687 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
7688 return expand_binop (mode, boptab, op1, result, temp,
7689 unsignedp, OPTAB_LIB_WIDEN);
7691 else if (singleton == TREE_OPERAND (exp, 1))
7692 TREE_OPERAND (exp, 0)
7693 = invert_truthvalue (TREE_OPERAND (exp, 0));
7696 do_pending_stack_adjust ();
7697 NO_DEFER_POP;
7698 op0 = gen_label_rtx ();
7700 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
7702 if (temp != 0)
7704 /* If the target conflicts with the other operand of the
7705 binary op, we can't use it. Also, we can't use the target
7706 if it is a hard register, because evaluating the condition
7707 might clobber it. */
7708 if ((binary_op
7709 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
7710 || (GET_CODE (temp) == REG
7711 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
7712 temp = gen_reg_rtx (mode);
7713 store_expr (singleton, temp, 0);
7715 else
7716 expand_expr (singleton,
7717 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7718 if (singleton == TREE_OPERAND (exp, 1))
7719 jumpif (TREE_OPERAND (exp, 0), op0);
7720 else
7721 jumpifnot (TREE_OPERAND (exp, 0), op0);
7723 start_cleanup_deferral ();
7724 if (binary_op && temp == 0)
7725 /* Just touch the other operand. */
7726 expand_expr (TREE_OPERAND (binary_op, 1),
7727 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7728 else if (binary_op)
7729 store_expr (build (TREE_CODE (binary_op), type,
7730 make_tree (type, temp),
7731 TREE_OPERAND (binary_op, 1)),
7732 temp, 0);
7733 else
7734 store_expr (build1 (TREE_CODE (unary_op), type,
7735 make_tree (type, temp)),
7736 temp, 0);
7737 op1 = op0;
7739 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
7740 comparison operator. If we have one of these cases, set the
7741 output to A, branch on A (cse will merge these two references),
7742 then set the output to FOO. */
7743 else if (temp
7744 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7745 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7746 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7747 TREE_OPERAND (exp, 1), 0)
7748 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7749 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
7750 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
7752 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7753 temp = gen_reg_rtx (mode);
7754 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7755 jumpif (TREE_OPERAND (exp, 0), op0);
7757 start_cleanup_deferral ();
7758 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7759 op1 = op0;
7761 else if (temp
7762 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7763 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7764 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7765 TREE_OPERAND (exp, 2), 0)
7766 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7767 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
7768 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
7770 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7771 temp = gen_reg_rtx (mode);
7772 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7773 jumpifnot (TREE_OPERAND (exp, 0), op0);
7775 start_cleanup_deferral ();
7776 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7777 op1 = op0;
7779 else
7781 op1 = gen_label_rtx ();
7782 jumpifnot (TREE_OPERAND (exp, 0), op0);
7784 start_cleanup_deferral ();
7786 /* One branch of the cond can be void, if it never returns. For
7787 example A ? throw : E */
7788 if (temp != 0
7789 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
7790 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7791 else
7792 expand_expr (TREE_OPERAND (exp, 1),
7793 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7794 end_cleanup_deferral ();
7795 emit_queue ();
7796 emit_jump_insn (gen_jump (op1));
7797 emit_barrier ();
7798 emit_label (op0);
7799 start_cleanup_deferral ();
7800 if (temp != 0
7801 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
7802 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7803 else
7804 expand_expr (TREE_OPERAND (exp, 2),
7805 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
7808 end_cleanup_deferral ();
7810 emit_queue ();
7811 emit_label (op1);
7812 OK_DEFER_POP;
7814 return temp;
7817 case TARGET_EXPR:
7819 /* Something needs to be initialized, but we didn't know
7820 where that thing was when building the tree. For example,
7821 it could be the return value of a function, or a parameter
7822 to a function which lays down in the stack, or a temporary
7823 variable which must be passed by reference.
7825 We guarantee that the expression will either be constructed
7826 or copied into our original target. */
7828 tree slot = TREE_OPERAND (exp, 0);
7829 tree cleanups = NULL_TREE;
7830 tree exp1;
7832 if (TREE_CODE (slot) != VAR_DECL)
7833 abort ();
7835 if (! ignore)
7836 target = original_target;
7838 /* Set this here so that if we get a target that refers to a
7839 register variable that's already been used, put_reg_into_stack
7840 knows that it should fix up those uses. */
7841 TREE_USED (slot) = 1;
7843 if (target == 0)
7845 if (DECL_RTL (slot) != 0)
7847 target = DECL_RTL (slot);
7848 /* If we have already expanded the slot, so don't do
7849 it again. (mrs) */
7850 if (TREE_OPERAND (exp, 1) == NULL_TREE)
7851 return target;
7853 else
7855 target = assign_temp (type, 2, 0, 1);
7856 /* All temp slots at this level must not conflict. */
7857 preserve_temp_slots (target);
7858 DECL_RTL (slot) = target;
7859 if (TREE_ADDRESSABLE (slot))
7861 TREE_ADDRESSABLE (slot) = 0;
7862 mark_addressable (slot);
7865 /* Since SLOT is not known to the called function
7866 to belong to its stack frame, we must build an explicit
7867 cleanup. This case occurs when we must build up a reference
7868 to pass the reference as an argument. In this case,
7869 it is very likely that such a reference need not be
7870 built here. */
7872 if (TREE_OPERAND (exp, 2) == 0)
7873 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
7874 cleanups = TREE_OPERAND (exp, 2);
7877 else
7879 /* This case does occur, when expanding a parameter which
7880 needs to be constructed on the stack. The target
7881 is the actual stack address that we want to initialize.
7882 The function we call will perform the cleanup in this case. */
7884 /* If we have already assigned it space, use that space,
7885 not target that we were passed in, as our target
7886 parameter is only a hint. */
7887 if (DECL_RTL (slot) != 0)
7889 target = DECL_RTL (slot);
7890 /* If we have already expanded the slot, so don't do
7891 it again. (mrs) */
7892 if (TREE_OPERAND (exp, 1) == NULL_TREE)
7893 return target;
7895 else
7897 DECL_RTL (slot) = target;
7898 /* If we must have an addressable slot, then make sure that
7899 the RTL that we just stored in slot is OK. */
7900 if (TREE_ADDRESSABLE (slot))
7902 TREE_ADDRESSABLE (slot) = 0;
7903 mark_addressable (slot);
7908 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
7909 /* Mark it as expanded. */
7910 TREE_OPERAND (exp, 1) = NULL_TREE;
7912 store_expr (exp1, target, 0);
7914 expand_decl_cleanup (NULL_TREE, cleanups);
7916 return target;
7919 case INIT_EXPR:
7921 tree lhs = TREE_OPERAND (exp, 0);
7922 tree rhs = TREE_OPERAND (exp, 1);
7923 tree noncopied_parts = 0;
7924 tree lhs_type = TREE_TYPE (lhs);
7926 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7927 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
7928 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
7929 TYPE_NONCOPIED_PARTS (lhs_type));
7930 while (noncopied_parts != 0)
7932 expand_assignment (TREE_VALUE (noncopied_parts),
7933 TREE_PURPOSE (noncopied_parts), 0, 0);
7934 noncopied_parts = TREE_CHAIN (noncopied_parts);
7936 return temp;
7939 case MODIFY_EXPR:
7941 /* If lhs is complex, expand calls in rhs before computing it.
7942 That's so we don't compute a pointer and save it over a call.
7943 If lhs is simple, compute it first so we can give it as a
7944 target if the rhs is just a call. This avoids an extra temp and copy
7945 and that prevents a partial-subsumption which makes bad code.
7946 Actually we could treat component_ref's of vars like vars. */
7948 tree lhs = TREE_OPERAND (exp, 0);
7949 tree rhs = TREE_OPERAND (exp, 1);
7950 tree noncopied_parts = 0;
7951 tree lhs_type = TREE_TYPE (lhs);
7953 temp = 0;
7955 if (TREE_CODE (lhs) != VAR_DECL
7956 && TREE_CODE (lhs) != RESULT_DECL
7957 && TREE_CODE (lhs) != PARM_DECL
7958 && ! (TREE_CODE (lhs) == INDIRECT_REF
7959 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
7960 preexpand_calls (exp);
7962 /* Check for |= or &= of a bitfield of size one into another bitfield
7963 of size 1. In this case, (unless we need the result of the
7964 assignment) we can do this more efficiently with a
7965 test followed by an assignment, if necessary.
7967 ??? At this point, we can't get a BIT_FIELD_REF here. But if
7968 things change so we do, this code should be enhanced to
7969 support it. */
7970 if (ignore
7971 && TREE_CODE (lhs) == COMPONENT_REF
7972 && (TREE_CODE (rhs) == BIT_IOR_EXPR
7973 || TREE_CODE (rhs) == BIT_AND_EXPR)
7974 && TREE_OPERAND (rhs, 0) == lhs
7975 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
7976 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
7977 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
7979 rtx label = gen_label_rtx ();
7981 do_jump (TREE_OPERAND (rhs, 1),
7982 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
7983 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
7984 expand_assignment (lhs, convert (TREE_TYPE (rhs),
7985 (TREE_CODE (rhs) == BIT_IOR_EXPR
7986 ? integer_one_node
7987 : integer_zero_node)),
7988 0, 0);
7989 do_pending_stack_adjust ();
7990 emit_label (label);
7991 return const0_rtx;
7994 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
7995 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
7996 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
7997 TYPE_NONCOPIED_PARTS (lhs_type));
7999 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8000 while (noncopied_parts != 0)
8002 expand_assignment (TREE_PURPOSE (noncopied_parts),
8003 TREE_VALUE (noncopied_parts), 0, 0);
8004 noncopied_parts = TREE_CHAIN (noncopied_parts);
8006 return temp;
8009 case RETURN_EXPR:
8010 if (!TREE_OPERAND (exp, 0))
8011 expand_null_return ();
8012 else
8013 expand_return (TREE_OPERAND (exp, 0));
8014 return const0_rtx;
8016 case PREINCREMENT_EXPR:
8017 case PREDECREMENT_EXPR:
8018 return expand_increment (exp, 0, ignore);
8020 case POSTINCREMENT_EXPR:
8021 case POSTDECREMENT_EXPR:
8022 /* Faster to treat as pre-increment if result is not used. */
8023 return expand_increment (exp, ! ignore, ignore);
8025 case ADDR_EXPR:
8026 /* If nonzero, TEMP will be set to the address of something that might
8027 be a MEM corresponding to a stack slot. */
8028 temp = 0;
8030 /* Are we taking the address of a nested function? */
8031 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8032 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8033 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8034 && ! TREE_STATIC (exp))
8036 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8037 op0 = force_operand (op0, target);
8039 /* If we are taking the address of something erroneous, just
8040 return a zero. */
8041 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8042 return const0_rtx;
8043 else
8045 /* We make sure to pass const0_rtx down if we came in with
8046 ignore set, to avoid doing the cleanups twice for something. */
8047 op0 = expand_expr (TREE_OPERAND (exp, 0),
8048 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8049 (modifier == EXPAND_INITIALIZER
8050 ? modifier : EXPAND_CONST_ADDRESS));
8052 /* If we are going to ignore the result, OP0 will have been set
8053 to const0_rtx, so just return it. Don't get confused and
8054 think we are taking the address of the constant. */
8055 if (ignore)
8056 return op0;
8058 op0 = protect_from_queue (op0, 0);
8060 /* We would like the object in memory. If it is a constant, we can
8061 have it be statically allocated into memory. For a non-constant,
8062 we need to allocate some memory and store the value into it. */
8064 if (CONSTANT_P (op0))
8065 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8066 op0);
8067 else if (GET_CODE (op0) == MEM)
8069 mark_temp_addr_taken (op0);
8070 temp = XEXP (op0, 0);
8073 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8074 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8076 /* If this object is in a register, it must be not
8077 be BLKmode. */
8078 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8079 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8081 mark_temp_addr_taken (memloc);
8082 emit_move_insn (memloc, op0);
8083 op0 = memloc;
8086 if (GET_CODE (op0) != MEM)
8087 abort ();
8089 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8091 temp = XEXP (op0, 0);
8092 #ifdef POINTERS_EXTEND_UNSIGNED
8093 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8094 && mode == ptr_mode)
8095 temp = convert_memory_address (ptr_mode, temp);
8096 #endif
8097 return temp;
8100 op0 = force_operand (XEXP (op0, 0), target);
8103 if (flag_force_addr && GET_CODE (op0) != REG)
8104 op0 = force_reg (Pmode, op0);
8106 if (GET_CODE (op0) == REG
8107 && ! REG_USERVAR_P (op0))
8108 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)) / BITS_PER_UNIT);
8110 /* If we might have had a temp slot, add an equivalent address
8111 for it. */
8112 if (temp != 0)
8113 update_temp_slot_address (temp, op0);
8115 #ifdef POINTERS_EXTEND_UNSIGNED
8116 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8117 && mode == ptr_mode)
8118 op0 = convert_memory_address (ptr_mode, op0);
8119 #endif
8121 return op0;
8123 case ENTRY_VALUE_EXPR:
8124 abort ();
8126 /* COMPLEX type for Extended Pascal & Fortran */
8127 case COMPLEX_EXPR:
8129 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8130 rtx insns;
8132 /* Get the rtx code of the operands. */
8133 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8134 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8136 if (! target)
8137 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8139 start_sequence ();
8141 /* Move the real (op0) and imaginary (op1) parts to their location. */
8142 emit_move_insn (gen_realpart (mode, target), op0);
8143 emit_move_insn (gen_imagpart (mode, target), op1);
8145 insns = get_insns ();
8146 end_sequence ();
8148 /* Complex construction should appear as a single unit. */
8149 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8150 each with a separate pseudo as destination.
8151 It's not correct for flow to treat them as a unit. */
8152 if (GET_CODE (target) != CONCAT)
8153 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8154 else
8155 emit_insns (insns);
8157 return target;
8160 case REALPART_EXPR:
8161 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8162 return gen_realpart (mode, op0);
8164 case IMAGPART_EXPR:
8165 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8166 return gen_imagpart (mode, op0);
8168 case CONJ_EXPR:
8170 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8171 rtx imag_t;
8172 rtx insns;
8174 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8176 if (! target)
8177 target = gen_reg_rtx (mode);
8179 start_sequence ();
8181 /* Store the realpart and the negated imagpart to target. */
8182 emit_move_insn (gen_realpart (partmode, target),
8183 gen_realpart (partmode, op0));
8185 imag_t = gen_imagpart (partmode, target);
8186 temp = expand_unop (partmode, neg_optab,
8187 gen_imagpart (partmode, op0), imag_t, 0);
8188 if (temp != imag_t)
8189 emit_move_insn (imag_t, temp);
8191 insns = get_insns ();
8192 end_sequence ();
8194 /* Conjugate should appear as a single unit
8195 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8196 each with a separate pseudo as destination.
8197 It's not correct for flow to treat them as a unit. */
8198 if (GET_CODE (target) != CONCAT)
8199 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8200 else
8201 emit_insns (insns);
8203 return target;
8206 case TRY_CATCH_EXPR:
8208 tree handler = TREE_OPERAND (exp, 1);
8210 expand_eh_region_start ();
8212 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8214 expand_eh_region_end (handler);
8216 return op0;
8219 case TRY_FINALLY_EXPR:
8221 tree try_block = TREE_OPERAND (exp, 0);
8222 tree finally_block = TREE_OPERAND (exp, 1);
8223 rtx finally_label = gen_label_rtx ();
8224 rtx done_label = gen_label_rtx ();
8225 rtx return_link = gen_reg_rtx (Pmode);
8226 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8227 (tree) finally_label, (tree) return_link);
8228 TREE_SIDE_EFFECTS (cleanup) = 1;
8230 /* Start a new binding layer that will keep track of all cleanup
8231 actions to be performed. */
8232 expand_start_bindings (2);
8234 target_temp_slot_level = temp_slot_level;
8236 expand_decl_cleanup (NULL_TREE, cleanup);
8237 op0 = expand_expr (try_block, target, tmode, modifier);
8239 preserve_temp_slots (op0);
8240 expand_end_bindings (NULL_TREE, 0, 0);
8241 emit_jump (done_label);
8242 emit_label (finally_label);
8243 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8244 emit_indirect_jump (return_link);
8245 emit_label (done_label);
8246 return op0;
8249 case GOTO_SUBROUTINE_EXPR:
8251 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8252 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8253 rtx return_address = gen_label_rtx ();
8254 emit_move_insn (return_link, gen_rtx_LABEL_REF (Pmode, return_address));
8255 emit_jump (subr);
8256 emit_label (return_address);
8257 return const0_rtx;
8260 case POPDCC_EXPR:
8262 rtx dcc = get_dynamic_cleanup_chain ();
8263 emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc)));
8264 return const0_rtx;
8267 case POPDHC_EXPR:
8269 rtx dhc = get_dynamic_handler_chain ();
8270 emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc)));
8271 return const0_rtx;
8274 case VA_ARG_EXPR:
8275 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8277 default:
8278 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8281 /* Here to do an ordinary binary operator, generating an instruction
8282 from the optab already placed in `this_optab'. */
8283 binop:
8284 preexpand_calls (exp);
8285 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8286 subtarget = 0;
8287 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8288 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8289 binop2:
8290 temp = expand_binop (mode, this_optab, op0, op1, target,
8291 unsignedp, OPTAB_LIB_WIDEN);
8292 if (temp == 0)
8293 abort ();
8294 return temp;
8297 /* Return the tree node and offset if a given argument corresponds to
8298 a string constant. */
8300 tree
8301 string_constant (arg, ptr_offset)
8302 tree arg;
8303 tree *ptr_offset;
8305 STRIP_NOPS (arg);
8307 if (TREE_CODE (arg) == ADDR_EXPR
8308 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8310 *ptr_offset = integer_zero_node;
8311 return TREE_OPERAND (arg, 0);
8313 else if (TREE_CODE (arg) == PLUS_EXPR)
8315 tree arg0 = TREE_OPERAND (arg, 0);
8316 tree arg1 = TREE_OPERAND (arg, 1);
8318 STRIP_NOPS (arg0);
8319 STRIP_NOPS (arg1);
8321 if (TREE_CODE (arg0) == ADDR_EXPR
8322 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
8324 *ptr_offset = arg1;
8325 return TREE_OPERAND (arg0, 0);
8327 else if (TREE_CODE (arg1) == ADDR_EXPR
8328 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
8330 *ptr_offset = arg0;
8331 return TREE_OPERAND (arg1, 0);
8335 return 0;
8338 /* Expand code for a post- or pre- increment or decrement
8339 and return the RTX for the result.
8340 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
8342 static rtx
8343 expand_increment (exp, post, ignore)
8344 register tree exp;
8345 int post, ignore;
8347 register rtx op0, op1;
8348 register rtx temp, value;
8349 register tree incremented = TREE_OPERAND (exp, 0);
8350 optab this_optab = add_optab;
8351 int icode;
8352 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
8353 int op0_is_copy = 0;
8354 int single_insn = 0;
8355 /* 1 means we can't store into OP0 directly,
8356 because it is a subreg narrower than a word,
8357 and we don't dare clobber the rest of the word. */
8358 int bad_subreg = 0;
8360 /* Stabilize any component ref that might need to be
8361 evaluated more than once below. */
8362 if (!post
8363 || TREE_CODE (incremented) == BIT_FIELD_REF
8364 || (TREE_CODE (incremented) == COMPONENT_REF
8365 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
8366 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
8367 incremented = stabilize_reference (incremented);
8368 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
8369 ones into save exprs so that they don't accidentally get evaluated
8370 more than once by the code below. */
8371 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
8372 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
8373 incremented = save_expr (incremented);
8375 /* Compute the operands as RTX.
8376 Note whether OP0 is the actual lvalue or a copy of it:
8377 I believe it is a copy iff it is a register or subreg
8378 and insns were generated in computing it. */
8380 temp = get_last_insn ();
8381 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
8383 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
8384 in place but instead must do sign- or zero-extension during assignment,
8385 so we copy it into a new register and let the code below use it as
8386 a copy.
8388 Note that we can safely modify this SUBREG since it is know not to be
8389 shared (it was made by the expand_expr call above). */
8391 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
8393 if (post)
8394 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
8395 else
8396 bad_subreg = 1;
8398 else if (GET_CODE (op0) == SUBREG
8399 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
8401 /* We cannot increment this SUBREG in place. If we are
8402 post-incrementing, get a copy of the old value. Otherwise,
8403 just mark that we cannot increment in place. */
8404 if (post)
8405 op0 = copy_to_reg (op0);
8406 else
8407 bad_subreg = 1;
8410 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
8411 && temp != get_last_insn ());
8412 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
8413 EXPAND_MEMORY_USE_BAD);
8415 /* Decide whether incrementing or decrementing. */
8416 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
8417 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8418 this_optab = sub_optab;
8420 /* Convert decrement by a constant into a negative increment. */
8421 if (this_optab == sub_optab
8422 && GET_CODE (op1) == CONST_INT)
8424 op1 = GEN_INT (- INTVAL (op1));
8425 this_optab = add_optab;
8428 /* For a preincrement, see if we can do this with a single instruction. */
8429 if (!post)
8431 icode = (int) this_optab->handlers[(int) mode].insn_code;
8432 if (icode != (int) CODE_FOR_nothing
8433 /* Make sure that OP0 is valid for operands 0 and 1
8434 of the insn we want to queue. */
8435 && (*insn_data[icode].operand[0].predicate) (op0, mode)
8436 && (*insn_data[icode].operand[1].predicate) (op0, mode)
8437 && (*insn_data[icode].operand[2].predicate) (op1, mode))
8438 single_insn = 1;
8441 /* If OP0 is not the actual lvalue, but rather a copy in a register,
8442 then we cannot just increment OP0. We must therefore contrive to
8443 increment the original value. Then, for postincrement, we can return
8444 OP0 since it is a copy of the old value. For preincrement, expand here
8445 unless we can do it with a single insn.
8447 Likewise if storing directly into OP0 would clobber high bits
8448 we need to preserve (bad_subreg). */
8449 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
8451 /* This is the easiest way to increment the value wherever it is.
8452 Problems with multiple evaluation of INCREMENTED are prevented
8453 because either (1) it is a component_ref or preincrement,
8454 in which case it was stabilized above, or (2) it is an array_ref
8455 with constant index in an array in a register, which is
8456 safe to reevaluate. */
8457 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
8458 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8459 ? MINUS_EXPR : PLUS_EXPR),
8460 TREE_TYPE (exp),
8461 incremented,
8462 TREE_OPERAND (exp, 1));
8464 while (TREE_CODE (incremented) == NOP_EXPR
8465 || TREE_CODE (incremented) == CONVERT_EXPR)
8467 newexp = convert (TREE_TYPE (incremented), newexp);
8468 incremented = TREE_OPERAND (incremented, 0);
8471 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
8472 return post ? op0 : temp;
8475 if (post)
8477 /* We have a true reference to the value in OP0.
8478 If there is an insn to add or subtract in this mode, queue it.
8479 Queueing the increment insn avoids the register shuffling
8480 that often results if we must increment now and first save
8481 the old value for subsequent use. */
8483 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
8484 op0 = stabilize (op0);
8485 #endif
8487 icode = (int) this_optab->handlers[(int) mode].insn_code;
8488 if (icode != (int) CODE_FOR_nothing
8489 /* Make sure that OP0 is valid for operands 0 and 1
8490 of the insn we want to queue. */
8491 && (*insn_data[icode].operand[0].predicate) (op0, mode)
8492 && (*insn_data[icode].operand[1].predicate) (op0, mode))
8494 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
8495 op1 = force_reg (mode, op1);
8497 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
8499 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
8501 rtx addr = (general_operand (XEXP (op0, 0), mode)
8502 ? force_reg (Pmode, XEXP (op0, 0))
8503 : copy_to_reg (XEXP (op0, 0)));
8504 rtx temp, result;
8506 op0 = change_address (op0, VOIDmode, addr);
8507 temp = force_reg (GET_MODE (op0), op0);
8508 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
8509 op1 = force_reg (mode, op1);
8511 /* The increment queue is LIFO, thus we have to `queue'
8512 the instructions in reverse order. */
8513 enqueue_insn (op0, gen_move_insn (op0, temp));
8514 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
8515 return result;
8519 /* Preincrement, or we can't increment with one simple insn. */
8520 if (post)
8521 /* Save a copy of the value before inc or dec, to return it later. */
8522 temp = value = copy_to_reg (op0);
8523 else
8524 /* Arrange to return the incremented value. */
8525 /* Copy the rtx because expand_binop will protect from the queue,
8526 and the results of that would be invalid for us to return
8527 if our caller does emit_queue before using our result. */
8528 temp = copy_rtx (value = op0);
8530 /* Increment however we can. */
8531 op1 = expand_binop (mode, this_optab, value, op1,
8532 current_function_check_memory_usage ? NULL_RTX : op0,
8533 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
8534 /* Make sure the value is stored into OP0. */
8535 if (op1 != op0)
8536 emit_move_insn (op0, op1);
8538 return temp;
8541 /* Expand all function calls contained within EXP, innermost ones first.
8542 But don't look within expressions that have sequence points.
8543 For each CALL_EXPR, record the rtx for its value
8544 in the CALL_EXPR_RTL field. */
8546 static void
8547 preexpand_calls (exp)
8548 tree exp;
8550 register int nops, i;
8551 int type = TREE_CODE_CLASS (TREE_CODE (exp));
8553 if (! do_preexpand_calls)
8554 return;
8556 /* Only expressions and references can contain calls. */
8558 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
8559 return;
8561 switch (TREE_CODE (exp))
8563 case CALL_EXPR:
8564 /* Do nothing if already expanded. */
8565 if (CALL_EXPR_RTL (exp) != 0
8566 /* Do nothing if the call returns a variable-sized object. */
8567 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
8568 /* Do nothing to built-in functions. */
8569 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
8570 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
8571 == FUNCTION_DECL)
8572 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
8573 return;
8575 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
8576 return;
8578 case COMPOUND_EXPR:
8579 case COND_EXPR:
8580 case TRUTH_ANDIF_EXPR:
8581 case TRUTH_ORIF_EXPR:
8582 /* If we find one of these, then we can be sure
8583 the adjust will be done for it (since it makes jumps).
8584 Do it now, so that if this is inside an argument
8585 of a function, we don't get the stack adjustment
8586 after some other args have already been pushed. */
8587 do_pending_stack_adjust ();
8588 return;
8590 case BLOCK:
8591 case RTL_EXPR:
8592 case WITH_CLEANUP_EXPR:
8593 case CLEANUP_POINT_EXPR:
8594 case TRY_CATCH_EXPR:
8595 return;
8597 case SAVE_EXPR:
8598 if (SAVE_EXPR_RTL (exp) != 0)
8599 return;
8601 default:
8602 break;
8605 nops = tree_code_length[(int) TREE_CODE (exp)];
8606 for (i = 0; i < nops; i++)
8607 if (TREE_OPERAND (exp, i) != 0)
8609 if (TREE_CODE (exp) == TARGET_EXPR && i == 2)
8610 /* We don't need to preexpand the cleanup for a TARGET_EXPR.
8611 It doesn't happen before the call is made. */
8613 else
8615 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
8616 if (type == 'e' || type == '<' || type == '1' || type == '2'
8617 || type == 'r')
8618 preexpand_calls (TREE_OPERAND (exp, i));
8623 /* At the start of a function, record that we have no previously-pushed
8624 arguments waiting to be popped. */
8626 void
8627 init_pending_stack_adjust ()
8629 pending_stack_adjust = 0;
8632 /* When exiting from function, if safe, clear out any pending stack adjust
8633 so the adjustment won't get done.
8635 Note, if the current function calls alloca, then it must have a
8636 frame pointer regardless of the value of flag_omit_frame_pointer. */
8638 void
8639 clear_pending_stack_adjust ()
8641 #ifdef EXIT_IGNORE_STACK
8642 if (optimize > 0
8643 && (! flag_omit_frame_pointer || current_function_calls_alloca)
8644 && EXIT_IGNORE_STACK
8645 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
8646 && ! flag_inline_functions)
8647 pending_stack_adjust = 0;
8648 #endif
8651 /* Pop any previously-pushed arguments that have not been popped yet. */
8653 void
8654 do_pending_stack_adjust ()
8656 if (inhibit_defer_pop == 0)
8658 if (pending_stack_adjust != 0)
8659 adjust_stack (GEN_INT (pending_stack_adjust));
8660 pending_stack_adjust = 0;
8664 /* Expand conditional expressions. */
8666 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
8667 LABEL is an rtx of code CODE_LABEL, in this function and all the
8668 functions here. */
8670 void
8671 jumpifnot (exp, label)
8672 tree exp;
8673 rtx label;
8675 do_jump (exp, label, NULL_RTX);
8678 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
8680 void
8681 jumpif (exp, label)
8682 tree exp;
8683 rtx label;
8685 do_jump (exp, NULL_RTX, label);
8688 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
8689 the result is zero, or IF_TRUE_LABEL if the result is one.
8690 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
8691 meaning fall through in that case.
8693 do_jump always does any pending stack adjust except when it does not
8694 actually perform a jump. An example where there is no jump
8695 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
8697 This function is responsible for optimizing cases such as
8698 &&, || and comparison operators in EXP. */
8700 void
8701 do_jump (exp, if_false_label, if_true_label)
8702 tree exp;
8703 rtx if_false_label, if_true_label;
8705 register enum tree_code code = TREE_CODE (exp);
8706 /* Some cases need to create a label to jump to
8707 in order to properly fall through.
8708 These cases set DROP_THROUGH_LABEL nonzero. */
8709 rtx drop_through_label = 0;
8710 rtx temp;
8711 int i;
8712 tree type;
8713 enum machine_mode mode;
8715 #ifdef MAX_INTEGER_COMPUTATION_MODE
8716 check_max_integer_computation_mode (exp);
8717 #endif
8719 emit_queue ();
8721 switch (code)
8723 case ERROR_MARK:
8724 break;
8726 case INTEGER_CST:
8727 temp = integer_zerop (exp) ? if_false_label : if_true_label;
8728 if (temp)
8729 emit_jump (temp);
8730 break;
8732 #if 0
8733 /* This is not true with #pragma weak */
8734 case ADDR_EXPR:
8735 /* The address of something can never be zero. */
8736 if (if_true_label)
8737 emit_jump (if_true_label);
8738 break;
8739 #endif
8741 case NOP_EXPR:
8742 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
8743 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
8744 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
8745 goto normal;
8746 case CONVERT_EXPR:
8747 /* If we are narrowing the operand, we have to do the compare in the
8748 narrower mode. */
8749 if ((TYPE_PRECISION (TREE_TYPE (exp))
8750 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8751 goto normal;
8752 case NON_LVALUE_EXPR:
8753 case REFERENCE_EXPR:
8754 case ABS_EXPR:
8755 case NEGATE_EXPR:
8756 case LROTATE_EXPR:
8757 case RROTATE_EXPR:
8758 /* These cannot change zero->non-zero or vice versa. */
8759 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
8760 break;
8762 #if 0
8763 /* This is never less insns than evaluating the PLUS_EXPR followed by
8764 a test and can be longer if the test is eliminated. */
8765 case PLUS_EXPR:
8766 /* Reduce to minus. */
8767 exp = build (MINUS_EXPR, TREE_TYPE (exp),
8768 TREE_OPERAND (exp, 0),
8769 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
8770 TREE_OPERAND (exp, 1))));
8771 /* Process as MINUS. */
8772 #endif
8774 case MINUS_EXPR:
8775 /* Non-zero iff operands of minus differ. */
8776 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
8777 TREE_OPERAND (exp, 0),
8778 TREE_OPERAND (exp, 1)),
8779 NE, NE, if_false_label, if_true_label);
8780 break;
8782 case BIT_AND_EXPR:
8783 /* If we are AND'ing with a small constant, do this comparison in the
8784 smallest type that fits. If the machine doesn't have comparisons
8785 that small, it will be converted back to the wider comparison.
8786 This helps if we are testing the sign bit of a narrower object.
8787 combine can't do this for us because it can't know whether a
8788 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
8790 if (! SLOW_BYTE_ACCESS
8791 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8792 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
8793 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
8794 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
8795 && (type = type_for_mode (mode, 1)) != 0
8796 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
8797 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
8798 != CODE_FOR_nothing))
8800 do_jump (convert (type, exp), if_false_label, if_true_label);
8801 break;
8803 goto normal;
8805 case TRUTH_NOT_EXPR:
8806 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
8807 break;
8809 case TRUTH_ANDIF_EXPR:
8810 if (if_false_label == 0)
8811 if_false_label = drop_through_label = gen_label_rtx ();
8812 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
8813 start_cleanup_deferral ();
8814 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8815 end_cleanup_deferral ();
8816 break;
8818 case TRUTH_ORIF_EXPR:
8819 if (if_true_label == 0)
8820 if_true_label = drop_through_label = gen_label_rtx ();
8821 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
8822 start_cleanup_deferral ();
8823 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8824 end_cleanup_deferral ();
8825 break;
8827 case COMPOUND_EXPR:
8828 push_temp_slots ();
8829 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8830 preserve_temp_slots (NULL_RTX);
8831 free_temp_slots ();
8832 pop_temp_slots ();
8833 emit_queue ();
8834 do_pending_stack_adjust ();
8835 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8836 break;
8838 case COMPONENT_REF:
8839 case BIT_FIELD_REF:
8840 case ARRAY_REF:
8842 int bitsize, bitpos, unsignedp;
8843 enum machine_mode mode;
8844 tree type;
8845 tree offset;
8846 int volatilep = 0;
8847 int alignment;
8849 /* Get description of this reference. We don't actually care
8850 about the underlying object here. */
8851 get_inner_reference (exp, &bitsize, &bitpos, &offset,
8852 &mode, &unsignedp, &volatilep,
8853 &alignment);
8855 type = type_for_size (bitsize, unsignedp);
8856 if (! SLOW_BYTE_ACCESS
8857 && type != 0 && bitsize >= 0
8858 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
8859 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
8860 != CODE_FOR_nothing))
8862 do_jump (convert (type, exp), if_false_label, if_true_label);
8863 break;
8865 goto normal;
8868 case COND_EXPR:
8869 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
8870 if (integer_onep (TREE_OPERAND (exp, 1))
8871 && integer_zerop (TREE_OPERAND (exp, 2)))
8872 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
8874 else if (integer_zerop (TREE_OPERAND (exp, 1))
8875 && integer_onep (TREE_OPERAND (exp, 2)))
8876 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
8878 else
8880 register rtx label1 = gen_label_rtx ();
8881 drop_through_label = gen_label_rtx ();
8883 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
8885 start_cleanup_deferral ();
8886 /* Now the THEN-expression. */
8887 do_jump (TREE_OPERAND (exp, 1),
8888 if_false_label ? if_false_label : drop_through_label,
8889 if_true_label ? if_true_label : drop_through_label);
8890 /* In case the do_jump just above never jumps. */
8891 do_pending_stack_adjust ();
8892 emit_label (label1);
8894 /* Now the ELSE-expression. */
8895 do_jump (TREE_OPERAND (exp, 2),
8896 if_false_label ? if_false_label : drop_through_label,
8897 if_true_label ? if_true_label : drop_through_label);
8898 end_cleanup_deferral ();
8900 break;
8902 case EQ_EXPR:
8904 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8906 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
8907 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
8909 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
8910 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
8911 do_jump
8912 (fold
8913 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
8914 fold (build (EQ_EXPR, TREE_TYPE (exp),
8915 fold (build1 (REALPART_EXPR,
8916 TREE_TYPE (inner_type),
8917 exp0)),
8918 fold (build1 (REALPART_EXPR,
8919 TREE_TYPE (inner_type),
8920 exp1)))),
8921 fold (build (EQ_EXPR, TREE_TYPE (exp),
8922 fold (build1 (IMAGPART_EXPR,
8923 TREE_TYPE (inner_type),
8924 exp0)),
8925 fold (build1 (IMAGPART_EXPR,
8926 TREE_TYPE (inner_type),
8927 exp1)))))),
8928 if_false_label, if_true_label);
8931 else if (integer_zerop (TREE_OPERAND (exp, 1)))
8932 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
8934 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
8935 && !can_compare_p (TYPE_MODE (inner_type), ccp_jump))
8936 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
8937 else
8938 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
8939 break;
8942 case NE_EXPR:
8944 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8946 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
8947 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
8949 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
8950 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
8951 do_jump
8952 (fold
8953 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
8954 fold (build (NE_EXPR, TREE_TYPE (exp),
8955 fold (build1 (REALPART_EXPR,
8956 TREE_TYPE (inner_type),
8957 exp0)),
8958 fold (build1 (REALPART_EXPR,
8959 TREE_TYPE (inner_type),
8960 exp1)))),
8961 fold (build (NE_EXPR, TREE_TYPE (exp),
8962 fold (build1 (IMAGPART_EXPR,
8963 TREE_TYPE (inner_type),
8964 exp0)),
8965 fold (build1 (IMAGPART_EXPR,
8966 TREE_TYPE (inner_type),
8967 exp1)))))),
8968 if_false_label, if_true_label);
8971 else if (integer_zerop (TREE_OPERAND (exp, 1)))
8972 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
8974 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
8975 && !can_compare_p (TYPE_MODE (inner_type), ccp_jump))
8976 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
8977 else
8978 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
8979 break;
8982 case LT_EXPR:
8983 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
8984 if (GET_MODE_CLASS (mode) == MODE_INT
8985 && ! can_compare_p (mode, ccp_jump))
8986 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
8987 else
8988 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
8989 break;
8991 case LE_EXPR:
8992 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
8993 if (GET_MODE_CLASS (mode) == MODE_INT
8994 && ! can_compare_p (mode, ccp_jump))
8995 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
8996 else
8997 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
8998 break;
9000 case GT_EXPR:
9001 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9002 if (GET_MODE_CLASS (mode) == MODE_INT
9003 && ! can_compare_p (mode, ccp_jump))
9004 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9005 else
9006 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9007 break;
9009 case GE_EXPR:
9010 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9011 if (GET_MODE_CLASS (mode) == MODE_INT
9012 && ! can_compare_p (mode, ccp_jump))
9013 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9014 else
9015 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9016 break;
9018 default:
9019 normal:
9020 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9021 #if 0
9022 /* This is not needed any more and causes poor code since it causes
9023 comparisons and tests from non-SI objects to have different code
9024 sequences. */
9025 /* Copy to register to avoid generating bad insns by cse
9026 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9027 if (!cse_not_expected && GET_CODE (temp) == MEM)
9028 temp = copy_to_reg (temp);
9029 #endif
9030 do_pending_stack_adjust ();
9031 /* Do any postincrements in the expression that was tested. */
9032 emit_queue ();
9034 if (GET_CODE (temp) == CONST_INT || GET_CODE (temp) == LABEL_REF)
9036 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9037 if (target)
9038 emit_jump (target);
9040 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9041 && ! can_compare_p (GET_MODE (temp), ccp_jump))
9042 /* Note swapping the labels gives us not-equal. */
9043 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9044 else if (GET_MODE (temp) != VOIDmode)
9045 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9046 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9047 GET_MODE (temp), NULL_RTX, 0,
9048 if_false_label, if_true_label);
9049 else
9050 abort ();
9053 if (drop_through_label)
9055 /* If do_jump produces code that might be jumped around,
9056 do any stack adjusts from that code, before the place
9057 where control merges in. */
9058 do_pending_stack_adjust ();
9059 emit_label (drop_through_label);
9063 /* Given a comparison expression EXP for values too wide to be compared
9064 with one insn, test the comparison and jump to the appropriate label.
9065 The code of EXP is ignored; we always test GT if SWAP is 0,
9066 and LT if SWAP is 1. */
9068 static void
9069 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9070 tree exp;
9071 int swap;
9072 rtx if_false_label, if_true_label;
9074 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9075 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9076 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9077 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9079 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9082 /* Compare OP0 with OP1, word at a time, in mode MODE.
9083 UNSIGNEDP says to do unsigned comparison.
9084 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9086 void
9087 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9088 enum machine_mode mode;
9089 int unsignedp;
9090 rtx op0, op1;
9091 rtx if_false_label, if_true_label;
9093 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9094 rtx drop_through_label = 0;
9095 int i;
9097 if (! if_true_label || ! if_false_label)
9098 drop_through_label = gen_label_rtx ();
9099 if (! if_true_label)
9100 if_true_label = drop_through_label;
9101 if (! if_false_label)
9102 if_false_label = drop_through_label;
9104 /* Compare a word at a time, high order first. */
9105 for (i = 0; i < nwords; i++)
9107 rtx op0_word, op1_word;
9109 if (WORDS_BIG_ENDIAN)
9111 op0_word = operand_subword_force (op0, i, mode);
9112 op1_word = operand_subword_force (op1, i, mode);
9114 else
9116 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9117 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9120 /* All but high-order word must be compared as unsigned. */
9121 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9122 (unsignedp || i > 0), word_mode, NULL_RTX, 0,
9123 NULL_RTX, if_true_label);
9125 /* Consider lower words only if these are equal. */
9126 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9127 NULL_RTX, 0, NULL_RTX, if_false_label);
9130 if (if_false_label)
9131 emit_jump (if_false_label);
9132 if (drop_through_label)
9133 emit_label (drop_through_label);
9136 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9137 with one insn, test the comparison and jump to the appropriate label. */
9139 static void
9140 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9141 tree exp;
9142 rtx if_false_label, if_true_label;
9144 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9145 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9146 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9147 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9148 int i;
9149 rtx drop_through_label = 0;
9151 if (! if_false_label)
9152 drop_through_label = if_false_label = gen_label_rtx ();
9154 for (i = 0; i < nwords; i++)
9155 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
9156 operand_subword_force (op1, i, mode),
9157 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9158 word_mode, NULL_RTX, 0, if_false_label,
9159 NULL_RTX);
9161 if (if_true_label)
9162 emit_jump (if_true_label);
9163 if (drop_through_label)
9164 emit_label (drop_through_label);
9167 /* Jump according to whether OP0 is 0.
9168 We assume that OP0 has an integer mode that is too wide
9169 for the available compare insns. */
9171 void
9172 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9173 rtx op0;
9174 rtx if_false_label, if_true_label;
9176 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9177 rtx part;
9178 int i;
9179 rtx drop_through_label = 0;
9181 /* The fastest way of doing this comparison on almost any machine is to
9182 "or" all the words and compare the result. If all have to be loaded
9183 from memory and this is a very wide item, it's possible this may
9184 be slower, but that's highly unlikely. */
9186 part = gen_reg_rtx (word_mode);
9187 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
9188 for (i = 1; i < nwords && part != 0; i++)
9189 part = expand_binop (word_mode, ior_optab, part,
9190 operand_subword_force (op0, i, GET_MODE (op0)),
9191 part, 1, OPTAB_WIDEN);
9193 if (part != 0)
9195 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
9196 NULL_RTX, 0, if_false_label, if_true_label);
9198 return;
9201 /* If we couldn't do the "or" simply, do this with a series of compares. */
9202 if (! if_false_label)
9203 drop_through_label = if_false_label = gen_label_rtx ();
9205 for (i = 0; i < nwords; i++)
9206 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
9207 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0,
9208 if_false_label, NULL_RTX);
9210 if (if_true_label)
9211 emit_jump (if_true_label);
9213 if (drop_through_label)
9214 emit_label (drop_through_label);
9217 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
9218 (including code to compute the values to be compared)
9219 and set (CC0) according to the result.
9220 The decision as to signed or unsigned comparison must be made by the caller.
9222 We force a stack adjustment unless there are currently
9223 things pushed on the stack that aren't yet used.
9225 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9226 compared.
9228 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9229 size of MODE should be used. */
9232 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
9233 register rtx op0, op1;
9234 enum rtx_code code;
9235 int unsignedp;
9236 enum machine_mode mode;
9237 rtx size;
9238 int align;
9240 rtx tem;
9242 /* If one operand is constant, make it the second one. Only do this
9243 if the other operand is not constant as well. */
9245 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9246 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9248 tem = op0;
9249 op0 = op1;
9250 op1 = tem;
9251 code = swap_condition (code);
9254 if (flag_force_mem)
9256 op0 = force_not_mem (op0);
9257 op1 = force_not_mem (op1);
9260 do_pending_stack_adjust ();
9262 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9263 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9264 return tem;
9266 #if 0
9267 /* There's no need to do this now that combine.c can eliminate lots of
9268 sign extensions. This can be less efficient in certain cases on other
9269 machines. */
9271 /* If this is a signed equality comparison, we can do it as an
9272 unsigned comparison since zero-extension is cheaper than sign
9273 extension and comparisons with zero are done as unsigned. This is
9274 the case even on machines that can do fast sign extension, since
9275 zero-extension is easier to combine with other operations than
9276 sign-extension is. If we are comparing against a constant, we must
9277 convert it to what it would look like unsigned. */
9278 if ((code == EQ || code == NE) && ! unsignedp
9279 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9281 if (GET_CODE (op1) == CONST_INT
9282 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9283 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9284 unsignedp = 1;
9286 #endif
9288 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
9290 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
9293 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
9294 The decision as to signed or unsigned comparison must be made by the caller.
9296 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9297 compared.
9299 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9300 size of MODE should be used. */
9302 void
9303 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, align,
9304 if_false_label, if_true_label)
9305 register rtx op0, op1;
9306 enum rtx_code code;
9307 int unsignedp;
9308 enum machine_mode mode;
9309 rtx size;
9310 int align;
9311 rtx if_false_label, if_true_label;
9313 rtx tem;
9314 int dummy_true_label = 0;
9316 /* Reverse the comparison if that is safe and we want to jump if it is
9317 false. */
9318 if (! if_true_label && ! FLOAT_MODE_P (mode))
9320 if_true_label = if_false_label;
9321 if_false_label = 0;
9322 code = reverse_condition (code);
9325 /* If one operand is constant, make it the second one. Only do this
9326 if the other operand is not constant as well. */
9328 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9329 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9331 tem = op0;
9332 op0 = op1;
9333 op1 = tem;
9334 code = swap_condition (code);
9337 if (flag_force_mem)
9339 op0 = force_not_mem (op0);
9340 op1 = force_not_mem (op1);
9343 do_pending_stack_adjust ();
9345 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9346 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9348 if (tem == const_true_rtx)
9350 if (if_true_label)
9351 emit_jump (if_true_label);
9353 else
9355 if (if_false_label)
9356 emit_jump (if_false_label);
9358 return;
9361 #if 0
9362 /* There's no need to do this now that combine.c can eliminate lots of
9363 sign extensions. This can be less efficient in certain cases on other
9364 machines. */
9366 /* If this is a signed equality comparison, we can do it as an
9367 unsigned comparison since zero-extension is cheaper than sign
9368 extension and comparisons with zero are done as unsigned. This is
9369 the case even on machines that can do fast sign extension, since
9370 zero-extension is easier to combine with other operations than
9371 sign-extension is. If we are comparing against a constant, we must
9372 convert it to what it would look like unsigned. */
9373 if ((code == EQ || code == NE) && ! unsignedp
9374 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9376 if (GET_CODE (op1) == CONST_INT
9377 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9378 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9379 unsignedp = 1;
9381 #endif
9383 if (! if_true_label)
9385 dummy_true_label = 1;
9386 if_true_label = gen_label_rtx ();
9389 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, align,
9390 if_true_label);
9392 if (if_false_label)
9393 emit_jump (if_false_label);
9394 if (dummy_true_label)
9395 emit_label (if_true_label);
9398 /* Generate code for a comparison expression EXP (including code to compute
9399 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
9400 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
9401 generated code will drop through.
9402 SIGNED_CODE should be the rtx operation for this comparison for
9403 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
9405 We force a stack adjustment unless there are currently
9406 things pushed on the stack that aren't yet used. */
9408 static void
9409 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
9410 if_true_label)
9411 register tree exp;
9412 enum rtx_code signed_code, unsigned_code;
9413 rtx if_false_label, if_true_label;
9415 register rtx op0, op1;
9416 register tree type;
9417 register enum machine_mode mode;
9418 int unsignedp;
9419 enum rtx_code code;
9421 /* Don't crash if the comparison was erroneous. */
9422 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9423 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
9424 return;
9426 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9427 type = TREE_TYPE (TREE_OPERAND (exp, 0));
9428 mode = TYPE_MODE (type);
9429 unsignedp = TREE_UNSIGNED (type);
9430 code = unsignedp ? unsigned_code : signed_code;
9432 #ifdef HAVE_canonicalize_funcptr_for_compare
9433 /* If function pointers need to be "canonicalized" before they can
9434 be reliably compared, then canonicalize them. */
9435 if (HAVE_canonicalize_funcptr_for_compare
9436 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9437 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9438 == FUNCTION_TYPE))
9440 rtx new_op0 = gen_reg_rtx (mode);
9442 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
9443 op0 = new_op0;
9446 if (HAVE_canonicalize_funcptr_for_compare
9447 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9448 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9449 == FUNCTION_TYPE))
9451 rtx new_op1 = gen_reg_rtx (mode);
9453 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
9454 op1 = new_op1;
9456 #endif
9458 /* Do any postincrements in the expression that was tested. */
9459 emit_queue ();
9461 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
9462 ((mode == BLKmode)
9463 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
9464 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT,
9465 if_false_label, if_true_label);
9468 /* Generate code to calculate EXP using a store-flag instruction
9469 and return an rtx for the result. EXP is either a comparison
9470 or a TRUTH_NOT_EXPR whose operand is a comparison.
9472 If TARGET is nonzero, store the result there if convenient.
9474 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
9475 cheap.
9477 Return zero if there is no suitable set-flag instruction
9478 available on this machine.
9480 Once expand_expr has been called on the arguments of the comparison,
9481 we are committed to doing the store flag, since it is not safe to
9482 re-evaluate the expression. We emit the store-flag insn by calling
9483 emit_store_flag, but only expand the arguments if we have a reason
9484 to believe that emit_store_flag will be successful. If we think that
9485 it will, but it isn't, we have to simulate the store-flag with a
9486 set/jump/set sequence. */
9488 static rtx
9489 do_store_flag (exp, target, mode, only_cheap)
9490 tree exp;
9491 rtx target;
9492 enum machine_mode mode;
9493 int only_cheap;
9495 enum rtx_code code;
9496 tree arg0, arg1, type;
9497 tree tem;
9498 enum machine_mode operand_mode;
9499 int invert = 0;
9500 int unsignedp;
9501 rtx op0, op1;
9502 enum insn_code icode;
9503 rtx subtarget = target;
9504 rtx result, label;
9506 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9507 result at the end. We can't simply invert the test since it would
9508 have already been inverted if it were valid. This case occurs for
9509 some floating-point comparisons. */
9511 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9512 invert = 1, exp = TREE_OPERAND (exp, 0);
9514 arg0 = TREE_OPERAND (exp, 0);
9515 arg1 = TREE_OPERAND (exp, 1);
9516 type = TREE_TYPE (arg0);
9517 operand_mode = TYPE_MODE (type);
9518 unsignedp = TREE_UNSIGNED (type);
9520 /* We won't bother with BLKmode store-flag operations because it would mean
9521 passing a lot of information to emit_store_flag. */
9522 if (operand_mode == BLKmode)
9523 return 0;
9525 /* We won't bother with store-flag operations involving function pointers
9526 when function pointers must be canonicalized before comparisons. */
9527 #ifdef HAVE_canonicalize_funcptr_for_compare
9528 if (HAVE_canonicalize_funcptr_for_compare
9529 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9530 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9531 == FUNCTION_TYPE))
9532 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9533 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9534 == FUNCTION_TYPE))))
9535 return 0;
9536 #endif
9538 STRIP_NOPS (arg0);
9539 STRIP_NOPS (arg1);
9541 /* Get the rtx comparison code to use. We know that EXP is a comparison
9542 operation of some type. Some comparisons against 1 and -1 can be
9543 converted to comparisons with zero. Do so here so that the tests
9544 below will be aware that we have a comparison with zero. These
9545 tests will not catch constants in the first operand, but constants
9546 are rarely passed as the first operand. */
9548 switch (TREE_CODE (exp))
9550 case EQ_EXPR:
9551 code = EQ;
9552 break;
9553 case NE_EXPR:
9554 code = NE;
9555 break;
9556 case LT_EXPR:
9557 if (integer_onep (arg1))
9558 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9559 else
9560 code = unsignedp ? LTU : LT;
9561 break;
9562 case LE_EXPR:
9563 if (! unsignedp && integer_all_onesp (arg1))
9564 arg1 = integer_zero_node, code = LT;
9565 else
9566 code = unsignedp ? LEU : LE;
9567 break;
9568 case GT_EXPR:
9569 if (! unsignedp && integer_all_onesp (arg1))
9570 arg1 = integer_zero_node, code = GE;
9571 else
9572 code = unsignedp ? GTU : GT;
9573 break;
9574 case GE_EXPR:
9575 if (integer_onep (arg1))
9576 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9577 else
9578 code = unsignedp ? GEU : GE;
9579 break;
9580 default:
9581 abort ();
9584 /* Put a constant second. */
9585 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9587 tem = arg0; arg0 = arg1; arg1 = tem;
9588 code = swap_condition (code);
9591 /* If this is an equality or inequality test of a single bit, we can
9592 do this by shifting the bit being tested to the low-order bit and
9593 masking the result with the constant 1. If the condition was EQ,
9594 we xor it with 1. This does not require an scc insn and is faster
9595 than an scc insn even if we have it. */
9597 if ((code == NE || code == EQ)
9598 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9599 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9601 tree inner = TREE_OPERAND (arg0, 0);
9602 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
9603 int ops_unsignedp;
9605 /* If INNER is a right shift of a constant and it plus BITNUM does
9606 not overflow, adjust BITNUM and INNER. */
9608 if (TREE_CODE (inner) == RSHIFT_EXPR
9609 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
9610 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
9611 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
9612 < TYPE_PRECISION (type)))
9614 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
9615 inner = TREE_OPERAND (inner, 0);
9618 /* If we are going to be able to omit the AND below, we must do our
9619 operations as unsigned. If we must use the AND, we have a choice.
9620 Normally unsigned is faster, but for some machines signed is. */
9621 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
9622 #ifdef LOAD_EXTEND_OP
9623 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
9624 #else
9626 #endif
9629 if (subtarget == 0 || GET_CODE (subtarget) != REG
9630 || GET_MODE (subtarget) != operand_mode
9631 || ! safe_from_p (subtarget, inner, 1))
9632 subtarget = 0;
9634 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
9636 if (bitnum != 0)
9637 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
9638 size_int (bitnum), subtarget, ops_unsignedp);
9640 if (GET_MODE (op0) != mode)
9641 op0 = convert_to_mode (mode, op0, ops_unsignedp);
9643 if ((code == EQ && ! invert) || (code == NE && invert))
9644 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
9645 ops_unsignedp, OPTAB_LIB_WIDEN);
9647 /* Put the AND last so it can combine with more things. */
9648 if (bitnum != TYPE_PRECISION (type) - 1)
9649 op0 = expand_and (op0, const1_rtx, subtarget);
9651 return op0;
9654 /* Now see if we are likely to be able to do this. Return if not. */
9655 if (! can_compare_p (operand_mode, ccp_store_flag))
9656 return 0;
9657 icode = setcc_gen_code[(int) code];
9658 if (icode == CODE_FOR_nothing
9659 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
9661 /* We can only do this if it is one of the special cases that
9662 can be handled without an scc insn. */
9663 if ((code == LT && integer_zerop (arg1))
9664 || (! only_cheap && code == GE && integer_zerop (arg1)))
9666 else if (BRANCH_COST >= 0
9667 && ! only_cheap && (code == NE || code == EQ)
9668 && TREE_CODE (type) != REAL_TYPE
9669 && ((abs_optab->handlers[(int) operand_mode].insn_code
9670 != CODE_FOR_nothing)
9671 || (ffs_optab->handlers[(int) operand_mode].insn_code
9672 != CODE_FOR_nothing)))
9674 else
9675 return 0;
9678 preexpand_calls (exp);
9679 if (subtarget == 0 || GET_CODE (subtarget) != REG
9680 || GET_MODE (subtarget) != operand_mode
9681 || ! safe_from_p (subtarget, arg1, 1))
9682 subtarget = 0;
9684 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
9685 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
9687 if (target == 0)
9688 target = gen_reg_rtx (mode);
9690 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
9691 because, if the emit_store_flag does anything it will succeed and
9692 OP0 and OP1 will not be used subsequently. */
9694 result = emit_store_flag (target, code,
9695 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
9696 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
9697 operand_mode, unsignedp, 1);
9699 if (result)
9701 if (invert)
9702 result = expand_binop (mode, xor_optab, result, const1_rtx,
9703 result, 0, OPTAB_LIB_WIDEN);
9704 return result;
9707 /* If this failed, we have to do this with set/compare/jump/set code. */
9708 if (GET_CODE (target) != REG
9709 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9710 target = gen_reg_rtx (GET_MODE (target));
9712 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9713 result = compare_from_rtx (op0, op1, code, unsignedp,
9714 operand_mode, NULL_RTX, 0);
9715 if (GET_CODE (result) == CONST_INT)
9716 return (((result == const0_rtx && ! invert)
9717 || (result != const0_rtx && invert))
9718 ? const0_rtx : const1_rtx);
9720 label = gen_label_rtx ();
9721 if (bcc_gen_fctn[(int) code] == 0)
9722 abort ();
9724 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
9725 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9726 emit_label (label);
9728 return target;
9731 /* Generate a tablejump instruction (used for switch statements). */
9733 #ifdef HAVE_tablejump
9735 /* INDEX is the value being switched on, with the lowest value
9736 in the table already subtracted.
9737 MODE is its expected mode (needed if INDEX is constant).
9738 RANGE is the length of the jump table.
9739 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9741 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9742 index value is out of range. */
9744 void
9745 do_tablejump (index, mode, range, table_label, default_label)
9746 rtx index, range, table_label, default_label;
9747 enum machine_mode mode;
9749 register rtx temp, vector;
9751 /* Do an unsigned comparison (in the proper mode) between the index
9752 expression and the value which represents the length of the range.
9753 Since we just finished subtracting the lower bound of the range
9754 from the index expression, this comparison allows us to simultaneously
9755 check that the original index expression value is both greater than
9756 or equal to the minimum value of the range and less than or equal to
9757 the maximum value of the range. */
9759 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
9760 0, default_label);
9762 /* If index is in range, it must fit in Pmode.
9763 Convert to Pmode so we can index with it. */
9764 if (mode != Pmode)
9765 index = convert_to_mode (Pmode, index, 1);
9767 /* Don't let a MEM slip thru, because then INDEX that comes
9768 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9769 and break_out_memory_refs will go to work on it and mess it up. */
9770 #ifdef PIC_CASE_VECTOR_ADDRESS
9771 if (flag_pic && GET_CODE (index) != REG)
9772 index = copy_to_mode_reg (Pmode, index);
9773 #endif
9775 /* If flag_force_addr were to affect this address
9776 it could interfere with the tricky assumptions made
9777 about addresses that contain label-refs,
9778 which may be valid only very near the tablejump itself. */
9779 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9780 GET_MODE_SIZE, because this indicates how large insns are. The other
9781 uses should all be Pmode, because they are addresses. This code
9782 could fail if addresses and insns are not the same size. */
9783 index = gen_rtx_PLUS (Pmode,
9784 gen_rtx_MULT (Pmode, index,
9785 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9786 gen_rtx_LABEL_REF (Pmode, table_label));
9787 #ifdef PIC_CASE_VECTOR_ADDRESS
9788 if (flag_pic)
9789 index = PIC_CASE_VECTOR_ADDRESS (index);
9790 else
9791 #endif
9792 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9793 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9794 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
9795 RTX_UNCHANGING_P (vector) = 1;
9796 convert_move (temp, vector, 0);
9798 emit_jump_insn (gen_tablejump (temp, table_label));
9800 /* If we are generating PIC code or if the table is PC-relative, the
9801 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9802 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9803 emit_barrier ();
9806 #endif /* HAVE_tablejump */