* expr.c (store_constructor): If a field is non addressable and
[official-gcc.git] / gcc / expr.c
blobcbe46dd955802739ab4711d511de210aaaba842e
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000
3 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "obstack.h"
28 #include "flags.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "except.h"
32 #include "function.h"
33 #include "insn-flags.h"
34 #include "insn-codes.h"
35 #include "insn-config.h"
36 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
37 #include "expr.h"
38 #include "recog.h"
39 #include "reload.h"
40 #include "output.h"
41 #include "typeclass.h"
42 #include "defaults.h"
43 #include "toplev.h"
44 #include "ggc.h"
45 #include "intl.h"
46 #include "tm_p.h"
48 #ifndef ACCUMULATE_OUTGOING_ARGS
49 #define ACCUMULATE_OUTGOING_ARGS 0
50 #endif
52 /* Supply a default definition for PUSH_ARGS. */
53 #ifndef PUSH_ARGS
54 #ifdef PUSH_ROUNDING
55 #define PUSH_ARGS !ACCUMULATE_OUTGOING_ARGS
56 #else
57 #define PUSH_ARGS 0
58 #endif
59 #endif
61 /* Decide whether a function's arguments should be processed
62 from first to last or from last to first.
64 They should if the stack and args grow in opposite directions, but
65 only if we have push insns. */
67 #ifdef PUSH_ROUNDING
69 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
70 #define PUSH_ARGS_REVERSED /* If it's last to first. */
71 #endif
73 #endif
75 #ifndef STACK_PUSH_CODE
76 #ifdef STACK_GROWS_DOWNWARD
77 #define STACK_PUSH_CODE PRE_DEC
78 #else
79 #define STACK_PUSH_CODE PRE_INC
80 #endif
81 #endif
83 /* Assume that case vectors are not pc-relative. */
84 #ifndef CASE_VECTOR_PC_RELATIVE
85 #define CASE_VECTOR_PC_RELATIVE 0
86 #endif
88 /* Hook called by safe_from_p for language-specific tree codes. It is
89 up to the language front-end to install a hook if it has any such
90 codes that safe_from_p needs to know about. Since same_from_p will
91 recursively explore the TREE_OPERANDs of an expression, this hook
92 should not reexamine those pieces. This routine may recursively
93 call safe_from_p; it should always pass `0' as the TOP_P
94 parameter. */
95 int (*lang_safe_from_p) PARAMS ((rtx, tree));
97 /* If this is nonzero, we do not bother generating VOLATILE
98 around volatile memory references, and we are willing to
99 output indirect addresses. If cse is to follow, we reject
100 indirect addresses so a useful potential cse is generated;
101 if it is used only once, instruction combination will produce
102 the same indirect address eventually. */
103 int cse_not_expected;
105 /* Don't check memory usage, since code is being emitted to check a memory
106 usage. Used when current_function_check_memory_usage is true, to avoid
107 infinite recursion. */
108 static int in_check_memory_usage;
110 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
111 static tree placeholder_list = 0;
113 /* This structure is used by move_by_pieces to describe the move to
114 be performed. */
115 struct move_by_pieces
117 rtx to;
118 rtx to_addr;
119 int autinc_to;
120 int explicit_inc_to;
121 rtx from;
122 rtx from_addr;
123 int autinc_from;
124 int explicit_inc_from;
125 unsigned HOST_WIDE_INT len;
126 HOST_WIDE_INT offset;
127 int reverse;
130 /* This structure is used by clear_by_pieces to describe the clear to
131 be performed. */
133 struct clear_by_pieces
135 rtx to;
136 rtx to_addr;
137 int autinc_to;
138 int explicit_inc_to;
139 unsigned HOST_WIDE_INT len;
140 HOST_WIDE_INT offset;
141 int reverse;
144 extern struct obstack permanent_obstack;
146 static rtx get_push_address PARAMS ((int));
148 static rtx enqueue_insn PARAMS ((rtx, rtx));
149 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
150 PARAMS ((unsigned HOST_WIDE_INT,
151 unsigned int));
152 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
153 struct move_by_pieces *));
154 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
155 unsigned int));
156 static void clear_by_pieces_1 PARAMS ((rtx (*) (rtx, ...),
157 enum machine_mode,
158 struct clear_by_pieces *));
159 static rtx get_subtarget PARAMS ((rtx));
160 static int is_zeros_p PARAMS ((tree));
161 static int mostly_zeros_p PARAMS ((tree));
162 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
163 HOST_WIDE_INT, enum machine_mode,
164 tree, tree, unsigned int, int,
165 int));
166 static void store_constructor PARAMS ((tree, rtx, unsigned int, int,
167 HOST_WIDE_INT));
168 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
169 HOST_WIDE_INT, enum machine_mode,
170 tree, enum machine_mode, int,
171 unsigned int, HOST_WIDE_INT, int));
172 static enum memory_use_mode
173 get_memory_usage_from_modifier PARAMS ((enum expand_modifier));
174 static tree save_noncopied_parts PARAMS ((tree, tree));
175 static tree init_noncopied_parts PARAMS ((tree, tree));
176 static int fixed_type_p PARAMS ((tree));
177 static rtx var_rtx PARAMS ((tree));
178 static int readonly_fields_p PARAMS ((tree));
179 static rtx expand_expr_unaligned PARAMS ((tree, unsigned int *));
180 static rtx expand_increment PARAMS ((tree, int, int));
181 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
182 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
183 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
184 rtx, rtx));
185 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
187 /* Record for each mode whether we can move a register directly to or
188 from an object of that mode in memory. If we can't, we won't try
189 to use that mode directly when accessing a field of that mode. */
191 static char direct_load[NUM_MACHINE_MODES];
192 static char direct_store[NUM_MACHINE_MODES];
194 /* If a memory-to-memory move would take MOVE_RATIO or more simple
195 move-instruction sequences, we will do a movstr or libcall instead. */
197 #ifndef MOVE_RATIO
198 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
199 #define MOVE_RATIO 2
200 #else
201 /* If we are optimizing for space (-Os), cut down the default move ratio. */
202 #define MOVE_RATIO (optimize_size ? 3 : 15)
203 #endif
204 #endif
206 /* This macro is used to determine whether move_by_pieces should be called
207 to perform a structure copy. */
208 #ifndef MOVE_BY_PIECES_P
209 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
210 (move_by_pieces_ninsns (SIZE, ALIGN) < MOVE_RATIO)
211 #endif
213 /* This array records the insn_code of insns to perform block moves. */
214 enum insn_code movstr_optab[NUM_MACHINE_MODES];
216 /* This array records the insn_code of insns to perform block clears. */
217 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
219 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
221 #ifndef SLOW_UNALIGNED_ACCESS
222 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
223 #endif
225 /* This is run once per compilation to set up which modes can be used
226 directly in memory and to initialize the block move optab. */
228 void
229 init_expr_once ()
231 rtx insn, pat;
232 enum machine_mode mode;
233 int num_clobbers;
234 rtx mem, mem1;
236 start_sequence ();
238 /* Try indexing by frame ptr and try by stack ptr.
239 It is known that on the Convex the stack ptr isn't a valid index.
240 With luck, one or the other is valid on any machine. */
241 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
242 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
244 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
245 pat = PATTERN (insn);
247 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
248 mode = (enum machine_mode) ((int) mode + 1))
250 int regno;
251 rtx reg;
253 direct_load[(int) mode] = direct_store[(int) mode] = 0;
254 PUT_MODE (mem, mode);
255 PUT_MODE (mem1, mode);
257 /* See if there is some register that can be used in this mode and
258 directly loaded or stored from memory. */
260 if (mode != VOIDmode && mode != BLKmode)
261 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
262 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
263 regno++)
265 if (! HARD_REGNO_MODE_OK (regno, mode))
266 continue;
268 reg = gen_rtx_REG (mode, regno);
270 SET_SRC (pat) = mem;
271 SET_DEST (pat) = reg;
272 if (recog (pat, insn, &num_clobbers) >= 0)
273 direct_load[(int) mode] = 1;
275 SET_SRC (pat) = mem1;
276 SET_DEST (pat) = reg;
277 if (recog (pat, insn, &num_clobbers) >= 0)
278 direct_load[(int) mode] = 1;
280 SET_SRC (pat) = reg;
281 SET_DEST (pat) = mem;
282 if (recog (pat, insn, &num_clobbers) >= 0)
283 direct_store[(int) mode] = 1;
285 SET_SRC (pat) = reg;
286 SET_DEST (pat) = mem1;
287 if (recog (pat, insn, &num_clobbers) >= 0)
288 direct_store[(int) mode] = 1;
292 end_sequence ();
295 /* This is run at the start of compiling a function. */
297 void
298 init_expr ()
300 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
302 pending_chain = 0;
303 pending_stack_adjust = 0;
304 stack_pointer_delta = 0;
305 inhibit_defer_pop = 0;
306 saveregs_value = 0;
307 apply_args_value = 0;
308 forced_labels = 0;
311 void
312 mark_expr_status (p)
313 struct expr_status *p;
315 if (p == NULL)
316 return;
318 ggc_mark_rtx (p->x_saveregs_value);
319 ggc_mark_rtx (p->x_apply_args_value);
320 ggc_mark_rtx (p->x_forced_labels);
323 void
324 free_expr_status (f)
325 struct function *f;
327 free (f->expr);
328 f->expr = NULL;
331 /* Small sanity check that the queue is empty at the end of a function. */
333 void
334 finish_expr_for_function ()
336 if (pending_chain)
337 abort ();
340 /* Manage the queue of increment instructions to be output
341 for POSTINCREMENT_EXPR expressions, etc. */
343 /* Queue up to increment (or change) VAR later. BODY says how:
344 BODY should be the same thing you would pass to emit_insn
345 to increment right away. It will go to emit_insn later on.
347 The value is a QUEUED expression to be used in place of VAR
348 where you want to guarantee the pre-incrementation value of VAR. */
350 static rtx
351 enqueue_insn (var, body)
352 rtx var, body;
354 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
355 body, pending_chain);
356 return pending_chain;
359 /* Use protect_from_queue to convert a QUEUED expression
360 into something that you can put immediately into an instruction.
361 If the queued incrementation has not happened yet,
362 protect_from_queue returns the variable itself.
363 If the incrementation has happened, protect_from_queue returns a temp
364 that contains a copy of the old value of the variable.
366 Any time an rtx which might possibly be a QUEUED is to be put
367 into an instruction, it must be passed through protect_from_queue first.
368 QUEUED expressions are not meaningful in instructions.
370 Do not pass a value through protect_from_queue and then hold
371 on to it for a while before putting it in an instruction!
372 If the queue is flushed in between, incorrect code will result. */
375 protect_from_queue (x, modify)
376 register rtx x;
377 int modify;
379 register RTX_CODE code = GET_CODE (x);
381 #if 0 /* A QUEUED can hang around after the queue is forced out. */
382 /* Shortcut for most common case. */
383 if (pending_chain == 0)
384 return x;
385 #endif
387 if (code != QUEUED)
389 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
390 use of autoincrement. Make a copy of the contents of the memory
391 location rather than a copy of the address, but not if the value is
392 of mode BLKmode. Don't modify X in place since it might be
393 shared. */
394 if (code == MEM && GET_MODE (x) != BLKmode
395 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
397 register rtx y = XEXP (x, 0);
398 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
400 MEM_COPY_ATTRIBUTES (new, x);
402 if (QUEUED_INSN (y))
404 register rtx temp = gen_reg_rtx (GET_MODE (new));
405 emit_insn_before (gen_move_insn (temp, new),
406 QUEUED_INSN (y));
407 return temp;
409 return new;
411 /* Otherwise, recursively protect the subexpressions of all
412 the kinds of rtx's that can contain a QUEUED. */
413 if (code == MEM)
415 rtx tem = protect_from_queue (XEXP (x, 0), 0);
416 if (tem != XEXP (x, 0))
418 x = copy_rtx (x);
419 XEXP (x, 0) = tem;
422 else if (code == PLUS || code == MULT)
424 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
425 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
426 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
428 x = copy_rtx (x);
429 XEXP (x, 0) = new0;
430 XEXP (x, 1) = new1;
433 return x;
435 /* If the increment has not happened, use the variable itself. */
436 if (QUEUED_INSN (x) == 0)
437 return QUEUED_VAR (x);
438 /* If the increment has happened and a pre-increment copy exists,
439 use that copy. */
440 if (QUEUED_COPY (x) != 0)
441 return QUEUED_COPY (x);
442 /* The increment has happened but we haven't set up a pre-increment copy.
443 Set one up now, and use it. */
444 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
445 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
446 QUEUED_INSN (x));
447 return QUEUED_COPY (x);
450 /* Return nonzero if X contains a QUEUED expression:
451 if it contains anything that will be altered by a queued increment.
452 We handle only combinations of MEM, PLUS, MINUS and MULT operators
453 since memory addresses generally contain only those. */
456 queued_subexp_p (x)
457 rtx x;
459 register enum rtx_code code = GET_CODE (x);
460 switch (code)
462 case QUEUED:
463 return 1;
464 case MEM:
465 return queued_subexp_p (XEXP (x, 0));
466 case MULT:
467 case PLUS:
468 case MINUS:
469 return (queued_subexp_p (XEXP (x, 0))
470 || queued_subexp_p (XEXP (x, 1)));
471 default:
472 return 0;
476 /* Perform all the pending incrementations. */
478 void
479 emit_queue ()
481 register rtx p;
482 while ((p = pending_chain))
484 rtx body = QUEUED_BODY (p);
486 if (GET_CODE (body) == SEQUENCE)
488 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
489 emit_insn (QUEUED_BODY (p));
491 else
492 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
493 pending_chain = QUEUED_NEXT (p);
497 /* Copy data from FROM to TO, where the machine modes are not the same.
498 Both modes may be integer, or both may be floating.
499 UNSIGNEDP should be nonzero if FROM is an unsigned type.
500 This causes zero-extension instead of sign-extension. */
502 void
503 convert_move (to, from, unsignedp)
504 register rtx to, from;
505 int unsignedp;
507 enum machine_mode to_mode = GET_MODE (to);
508 enum machine_mode from_mode = GET_MODE (from);
509 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
510 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
511 enum insn_code code;
512 rtx libcall;
514 /* rtx code for making an equivalent value. */
515 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
517 to = protect_from_queue (to, 1);
518 from = protect_from_queue (from, 0);
520 if (to_real != from_real)
521 abort ();
523 /* If FROM is a SUBREG that indicates that we have already done at least
524 the required extension, strip it. We don't handle such SUBREGs as
525 TO here. */
527 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
528 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
529 >= GET_MODE_SIZE (to_mode))
530 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
531 from = gen_lowpart (to_mode, from), from_mode = to_mode;
533 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
534 abort ();
536 if (to_mode == from_mode
537 || (from_mode == VOIDmode && CONSTANT_P (from)))
539 emit_move_insn (to, from);
540 return;
543 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
545 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
546 abort ();
548 if (VECTOR_MODE_P (to_mode))
549 from = gen_rtx_SUBREG (to_mode, from, 0);
550 else
551 to = gen_rtx_SUBREG (from_mode, to, 0);
553 emit_move_insn (to, from);
554 return;
557 if (to_real != from_real)
558 abort ();
560 if (to_real)
562 rtx value, insns;
564 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
566 /* Try converting directly if the insn is supported. */
567 if ((code = can_extend_p (to_mode, from_mode, 0))
568 != CODE_FOR_nothing)
570 emit_unop_insn (code, to, from, UNKNOWN);
571 return;
575 #ifdef HAVE_trunchfqf2
576 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
578 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
579 return;
581 #endif
582 #ifdef HAVE_trunctqfqf2
583 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
585 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
586 return;
588 #endif
589 #ifdef HAVE_truncsfqf2
590 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
592 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
593 return;
595 #endif
596 #ifdef HAVE_truncdfqf2
597 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
599 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
600 return;
602 #endif
603 #ifdef HAVE_truncxfqf2
604 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
606 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
607 return;
609 #endif
610 #ifdef HAVE_trunctfqf2
611 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
613 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
614 return;
616 #endif
618 #ifdef HAVE_trunctqfhf2
619 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
621 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
622 return;
624 #endif
625 #ifdef HAVE_truncsfhf2
626 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
628 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
629 return;
631 #endif
632 #ifdef HAVE_truncdfhf2
633 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
635 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
636 return;
638 #endif
639 #ifdef HAVE_truncxfhf2
640 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
642 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
643 return;
645 #endif
646 #ifdef HAVE_trunctfhf2
647 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
649 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
650 return;
652 #endif
654 #ifdef HAVE_truncsftqf2
655 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
657 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
658 return;
660 #endif
661 #ifdef HAVE_truncdftqf2
662 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
664 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
665 return;
667 #endif
668 #ifdef HAVE_truncxftqf2
669 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
671 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
672 return;
674 #endif
675 #ifdef HAVE_trunctftqf2
676 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
678 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
679 return;
681 #endif
683 #ifdef HAVE_truncdfsf2
684 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
686 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
687 return;
689 #endif
690 #ifdef HAVE_truncxfsf2
691 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
693 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
694 return;
696 #endif
697 #ifdef HAVE_trunctfsf2
698 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
700 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
701 return;
703 #endif
704 #ifdef HAVE_truncxfdf2
705 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
707 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
708 return;
710 #endif
711 #ifdef HAVE_trunctfdf2
712 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
714 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
715 return;
717 #endif
719 libcall = (rtx) 0;
720 switch (from_mode)
722 case SFmode:
723 switch (to_mode)
725 case DFmode:
726 libcall = extendsfdf2_libfunc;
727 break;
729 case XFmode:
730 libcall = extendsfxf2_libfunc;
731 break;
733 case TFmode:
734 libcall = extendsftf2_libfunc;
735 break;
737 default:
738 break;
740 break;
742 case DFmode:
743 switch (to_mode)
745 case SFmode:
746 libcall = truncdfsf2_libfunc;
747 break;
749 case XFmode:
750 libcall = extenddfxf2_libfunc;
751 break;
753 case TFmode:
754 libcall = extenddftf2_libfunc;
755 break;
757 default:
758 break;
760 break;
762 case XFmode:
763 switch (to_mode)
765 case SFmode:
766 libcall = truncxfsf2_libfunc;
767 break;
769 case DFmode:
770 libcall = truncxfdf2_libfunc;
771 break;
773 default:
774 break;
776 break;
778 case TFmode:
779 switch (to_mode)
781 case SFmode:
782 libcall = trunctfsf2_libfunc;
783 break;
785 case DFmode:
786 libcall = trunctfdf2_libfunc;
787 break;
789 default:
790 break;
792 break;
794 default:
795 break;
798 if (libcall == (rtx) 0)
799 /* This conversion is not implemented yet. */
800 abort ();
802 start_sequence ();
803 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
804 1, from, from_mode);
805 insns = get_insns ();
806 end_sequence ();
807 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
808 from));
809 return;
812 /* Now both modes are integers. */
814 /* Handle expanding beyond a word. */
815 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
816 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
818 rtx insns;
819 rtx lowpart;
820 rtx fill_value;
821 rtx lowfrom;
822 int i;
823 enum machine_mode lowpart_mode;
824 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
826 /* Try converting directly if the insn is supported. */
827 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
828 != CODE_FOR_nothing)
830 /* If FROM is a SUBREG, put it into a register. Do this
831 so that we always generate the same set of insns for
832 better cse'ing; if an intermediate assignment occurred,
833 we won't be doing the operation directly on the SUBREG. */
834 if (optimize > 0 && GET_CODE (from) == SUBREG)
835 from = force_reg (from_mode, from);
836 emit_unop_insn (code, to, from, equiv_code);
837 return;
839 /* Next, try converting via full word. */
840 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
841 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
842 != CODE_FOR_nothing))
844 if (GET_CODE (to) == REG)
845 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
846 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
847 emit_unop_insn (code, to,
848 gen_lowpart (word_mode, to), equiv_code);
849 return;
852 /* No special multiword conversion insn; do it by hand. */
853 start_sequence ();
855 /* Since we will turn this into a no conflict block, we must ensure
856 that the source does not overlap the target. */
858 if (reg_overlap_mentioned_p (to, from))
859 from = force_reg (from_mode, from);
861 /* Get a copy of FROM widened to a word, if necessary. */
862 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
863 lowpart_mode = word_mode;
864 else
865 lowpart_mode = from_mode;
867 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
869 lowpart = gen_lowpart (lowpart_mode, to);
870 emit_move_insn (lowpart, lowfrom);
872 /* Compute the value to put in each remaining word. */
873 if (unsignedp)
874 fill_value = const0_rtx;
875 else
877 #ifdef HAVE_slt
878 if (HAVE_slt
879 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
880 && STORE_FLAG_VALUE == -1)
882 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
883 lowpart_mode, 0, 0);
884 fill_value = gen_reg_rtx (word_mode);
885 emit_insn (gen_slt (fill_value));
887 else
888 #endif
890 fill_value
891 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
892 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
893 NULL_RTX, 0);
894 fill_value = convert_to_mode (word_mode, fill_value, 1);
898 /* Fill the remaining words. */
899 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
901 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
902 rtx subword = operand_subword (to, index, 1, to_mode);
904 if (subword == 0)
905 abort ();
907 if (fill_value != subword)
908 emit_move_insn (subword, fill_value);
911 insns = get_insns ();
912 end_sequence ();
914 emit_no_conflict_block (insns, to, from, NULL_RTX,
915 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
916 return;
919 /* Truncating multi-word to a word or less. */
920 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
921 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
923 if (!((GET_CODE (from) == MEM
924 && ! MEM_VOLATILE_P (from)
925 && direct_load[(int) to_mode]
926 && ! mode_dependent_address_p (XEXP (from, 0)))
927 || GET_CODE (from) == REG
928 || GET_CODE (from) == SUBREG))
929 from = force_reg (from_mode, from);
930 convert_move (to, gen_lowpart (word_mode, from), 0);
931 return;
934 /* Handle pointer conversion. */ /* SPEE 900220. */
935 if (to_mode == PQImode)
937 if (from_mode != QImode)
938 from = convert_to_mode (QImode, from, unsignedp);
940 #ifdef HAVE_truncqipqi2
941 if (HAVE_truncqipqi2)
943 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
944 return;
946 #endif /* HAVE_truncqipqi2 */
947 abort ();
950 if (from_mode == PQImode)
952 if (to_mode != QImode)
954 from = convert_to_mode (QImode, from, unsignedp);
955 from_mode = QImode;
957 else
959 #ifdef HAVE_extendpqiqi2
960 if (HAVE_extendpqiqi2)
962 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
963 return;
965 #endif /* HAVE_extendpqiqi2 */
966 abort ();
970 if (to_mode == PSImode)
972 if (from_mode != SImode)
973 from = convert_to_mode (SImode, from, unsignedp);
975 #ifdef HAVE_truncsipsi2
976 if (HAVE_truncsipsi2)
978 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
979 return;
981 #endif /* HAVE_truncsipsi2 */
982 abort ();
985 if (from_mode == PSImode)
987 if (to_mode != SImode)
989 from = convert_to_mode (SImode, from, unsignedp);
990 from_mode = SImode;
992 else
994 #ifdef HAVE_extendpsisi2
995 if (! unsignedp && HAVE_extendpsisi2)
997 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
998 return;
1000 #endif /* HAVE_extendpsisi2 */
1001 #ifdef HAVE_zero_extendpsisi2
1002 if (unsignedp && HAVE_zero_extendpsisi2)
1004 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1005 return;
1007 #endif /* HAVE_zero_extendpsisi2 */
1008 abort ();
1012 if (to_mode == PDImode)
1014 if (from_mode != DImode)
1015 from = convert_to_mode (DImode, from, unsignedp);
1017 #ifdef HAVE_truncdipdi2
1018 if (HAVE_truncdipdi2)
1020 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1021 return;
1023 #endif /* HAVE_truncdipdi2 */
1024 abort ();
1027 if (from_mode == PDImode)
1029 if (to_mode != DImode)
1031 from = convert_to_mode (DImode, from, unsignedp);
1032 from_mode = DImode;
1034 else
1036 #ifdef HAVE_extendpdidi2
1037 if (HAVE_extendpdidi2)
1039 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1040 return;
1042 #endif /* HAVE_extendpdidi2 */
1043 abort ();
1047 /* Now follow all the conversions between integers
1048 no more than a word long. */
1050 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1051 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1052 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1053 GET_MODE_BITSIZE (from_mode)))
1055 if (!((GET_CODE (from) == MEM
1056 && ! MEM_VOLATILE_P (from)
1057 && direct_load[(int) to_mode]
1058 && ! mode_dependent_address_p (XEXP (from, 0)))
1059 || GET_CODE (from) == REG
1060 || GET_CODE (from) == SUBREG))
1061 from = force_reg (from_mode, from);
1062 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1063 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1064 from = copy_to_reg (from);
1065 emit_move_insn (to, gen_lowpart (to_mode, from));
1066 return;
1069 /* Handle extension. */
1070 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1072 /* Convert directly if that works. */
1073 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1074 != CODE_FOR_nothing)
1076 emit_unop_insn (code, to, from, equiv_code);
1077 return;
1079 else
1081 enum machine_mode intermediate;
1082 rtx tmp;
1083 tree shift_amount;
1085 /* Search for a mode to convert via. */
1086 for (intermediate = from_mode; intermediate != VOIDmode;
1087 intermediate = GET_MODE_WIDER_MODE (intermediate))
1088 if (((can_extend_p (to_mode, intermediate, unsignedp)
1089 != CODE_FOR_nothing)
1090 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1091 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1092 GET_MODE_BITSIZE (intermediate))))
1093 && (can_extend_p (intermediate, from_mode, unsignedp)
1094 != CODE_FOR_nothing))
1096 convert_move (to, convert_to_mode (intermediate, from,
1097 unsignedp), unsignedp);
1098 return;
1101 /* No suitable intermediate mode.
1102 Generate what we need with shifts. */
1103 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1104 - GET_MODE_BITSIZE (from_mode), 0);
1105 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1106 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1107 to, unsignedp);
1108 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1109 to, unsignedp);
1110 if (tmp != to)
1111 emit_move_insn (to, tmp);
1112 return;
1116 /* Support special truncate insns for certain modes. */
1118 if (from_mode == DImode && to_mode == SImode)
1120 #ifdef HAVE_truncdisi2
1121 if (HAVE_truncdisi2)
1123 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1124 return;
1126 #endif
1127 convert_move (to, force_reg (from_mode, from), unsignedp);
1128 return;
1131 if (from_mode == DImode && to_mode == HImode)
1133 #ifdef HAVE_truncdihi2
1134 if (HAVE_truncdihi2)
1136 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1137 return;
1139 #endif
1140 convert_move (to, force_reg (from_mode, from), unsignedp);
1141 return;
1144 if (from_mode == DImode && to_mode == QImode)
1146 #ifdef HAVE_truncdiqi2
1147 if (HAVE_truncdiqi2)
1149 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1150 return;
1152 #endif
1153 convert_move (to, force_reg (from_mode, from), unsignedp);
1154 return;
1157 if (from_mode == SImode && to_mode == HImode)
1159 #ifdef HAVE_truncsihi2
1160 if (HAVE_truncsihi2)
1162 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1163 return;
1165 #endif
1166 convert_move (to, force_reg (from_mode, from), unsignedp);
1167 return;
1170 if (from_mode == SImode && to_mode == QImode)
1172 #ifdef HAVE_truncsiqi2
1173 if (HAVE_truncsiqi2)
1175 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1176 return;
1178 #endif
1179 convert_move (to, force_reg (from_mode, from), unsignedp);
1180 return;
1183 if (from_mode == HImode && to_mode == QImode)
1185 #ifdef HAVE_trunchiqi2
1186 if (HAVE_trunchiqi2)
1188 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1189 return;
1191 #endif
1192 convert_move (to, force_reg (from_mode, from), unsignedp);
1193 return;
1196 if (from_mode == TImode && to_mode == DImode)
1198 #ifdef HAVE_trunctidi2
1199 if (HAVE_trunctidi2)
1201 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1202 return;
1204 #endif
1205 convert_move (to, force_reg (from_mode, from), unsignedp);
1206 return;
1209 if (from_mode == TImode && to_mode == SImode)
1211 #ifdef HAVE_trunctisi2
1212 if (HAVE_trunctisi2)
1214 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1215 return;
1217 #endif
1218 convert_move (to, force_reg (from_mode, from), unsignedp);
1219 return;
1222 if (from_mode == TImode && to_mode == HImode)
1224 #ifdef HAVE_trunctihi2
1225 if (HAVE_trunctihi2)
1227 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1228 return;
1230 #endif
1231 convert_move (to, force_reg (from_mode, from), unsignedp);
1232 return;
1235 if (from_mode == TImode && to_mode == QImode)
1237 #ifdef HAVE_trunctiqi2
1238 if (HAVE_trunctiqi2)
1240 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1241 return;
1243 #endif
1244 convert_move (to, force_reg (from_mode, from), unsignedp);
1245 return;
1248 /* Handle truncation of volatile memrefs, and so on;
1249 the things that couldn't be truncated directly,
1250 and for which there was no special instruction. */
1251 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1253 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1254 emit_move_insn (to, temp);
1255 return;
1258 /* Mode combination is not recognized. */
1259 abort ();
1262 /* Return an rtx for a value that would result
1263 from converting X to mode MODE.
1264 Both X and MODE may be floating, or both integer.
1265 UNSIGNEDP is nonzero if X is an unsigned value.
1266 This can be done by referring to a part of X in place
1267 or by copying to a new temporary with conversion.
1269 This function *must not* call protect_from_queue
1270 except when putting X into an insn (in which case convert_move does it). */
1273 convert_to_mode (mode, x, unsignedp)
1274 enum machine_mode mode;
1275 rtx x;
1276 int unsignedp;
1278 return convert_modes (mode, VOIDmode, x, unsignedp);
1281 /* Return an rtx for a value that would result
1282 from converting X from mode OLDMODE to mode MODE.
1283 Both modes may be floating, or both integer.
1284 UNSIGNEDP is nonzero if X is an unsigned value.
1286 This can be done by referring to a part of X in place
1287 or by copying to a new temporary with conversion.
1289 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1291 This function *must not* call protect_from_queue
1292 except when putting X into an insn (in which case convert_move does it). */
1295 convert_modes (mode, oldmode, x, unsignedp)
1296 enum machine_mode mode, oldmode;
1297 rtx x;
1298 int unsignedp;
1300 register rtx temp;
1302 /* If FROM is a SUBREG that indicates that we have already done at least
1303 the required extension, strip it. */
1305 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1306 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1307 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1308 x = gen_lowpart (mode, x);
1310 if (GET_MODE (x) != VOIDmode)
1311 oldmode = GET_MODE (x);
1313 if (mode == oldmode)
1314 return x;
1316 /* There is one case that we must handle specially: If we are converting
1317 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1318 we are to interpret the constant as unsigned, gen_lowpart will do
1319 the wrong if the constant appears negative. What we want to do is
1320 make the high-order word of the constant zero, not all ones. */
1322 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1323 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1324 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1326 HOST_WIDE_INT val = INTVAL (x);
1328 if (oldmode != VOIDmode
1329 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1331 int width = GET_MODE_BITSIZE (oldmode);
1333 /* We need to zero extend VAL. */
1334 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1337 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1340 /* We can do this with a gen_lowpart if both desired and current modes
1341 are integer, and this is either a constant integer, a register, or a
1342 non-volatile MEM. Except for the constant case where MODE is no
1343 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1345 if ((GET_CODE (x) == CONST_INT
1346 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1347 || (GET_MODE_CLASS (mode) == MODE_INT
1348 && GET_MODE_CLASS (oldmode) == MODE_INT
1349 && (GET_CODE (x) == CONST_DOUBLE
1350 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1351 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1352 && direct_load[(int) mode])
1353 || (GET_CODE (x) == REG
1354 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1355 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1357 /* ?? If we don't know OLDMODE, we have to assume here that
1358 X does not need sign- or zero-extension. This may not be
1359 the case, but it's the best we can do. */
1360 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1361 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1363 HOST_WIDE_INT val = INTVAL (x);
1364 int width = GET_MODE_BITSIZE (oldmode);
1366 /* We must sign or zero-extend in this case. Start by
1367 zero-extending, then sign extend if we need to. */
1368 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1369 if (! unsignedp
1370 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1371 val |= (HOST_WIDE_INT) (-1) << width;
1373 return GEN_INT (val);
1376 return gen_lowpart (mode, x);
1379 temp = gen_reg_rtx (mode);
1380 convert_move (temp, x, unsignedp);
1381 return temp;
1384 /* This macro is used to determine what the largest unit size that
1385 move_by_pieces can use is. */
1387 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1388 move efficiently, as opposed to MOVE_MAX which is the maximum
1389 number of bytes we can move with a single instruction. */
1391 #ifndef MOVE_MAX_PIECES
1392 #define MOVE_MAX_PIECES MOVE_MAX
1393 #endif
1395 /* Generate several move instructions to copy LEN bytes
1396 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1397 The caller must pass FROM and TO
1398 through protect_from_queue before calling.
1399 ALIGN is maximum alignment we can assume. */
1401 void
1402 move_by_pieces (to, from, len, align)
1403 rtx to, from;
1404 unsigned HOST_WIDE_INT len;
1405 unsigned int align;
1407 struct move_by_pieces data;
1408 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1409 unsigned int max_size = MOVE_MAX_PIECES + 1;
1410 enum machine_mode mode = VOIDmode, tmode;
1411 enum insn_code icode;
1413 data.offset = 0;
1414 data.to_addr = to_addr;
1415 data.from_addr = from_addr;
1416 data.to = to;
1417 data.from = from;
1418 data.autinc_to
1419 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1420 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1421 data.autinc_from
1422 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1423 || GET_CODE (from_addr) == POST_INC
1424 || GET_CODE (from_addr) == POST_DEC);
1426 data.explicit_inc_from = 0;
1427 data.explicit_inc_to = 0;
1428 data.reverse
1429 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1430 if (data.reverse) data.offset = len;
1431 data.len = len;
1433 /* If copying requires more than two move insns,
1434 copy addresses to registers (to make displacements shorter)
1435 and use post-increment if available. */
1436 if (!(data.autinc_from && data.autinc_to)
1437 && move_by_pieces_ninsns (len, align) > 2)
1439 /* Find the mode of the largest move... */
1440 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1441 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1442 if (GET_MODE_SIZE (tmode) < max_size)
1443 mode = tmode;
1445 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1447 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1448 data.autinc_from = 1;
1449 data.explicit_inc_from = -1;
1451 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1453 data.from_addr = copy_addr_to_reg (from_addr);
1454 data.autinc_from = 1;
1455 data.explicit_inc_from = 1;
1457 if (!data.autinc_from && CONSTANT_P (from_addr))
1458 data.from_addr = copy_addr_to_reg (from_addr);
1459 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1461 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1462 data.autinc_to = 1;
1463 data.explicit_inc_to = -1;
1465 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1467 data.to_addr = copy_addr_to_reg (to_addr);
1468 data.autinc_to = 1;
1469 data.explicit_inc_to = 1;
1471 if (!data.autinc_to && CONSTANT_P (to_addr))
1472 data.to_addr = copy_addr_to_reg (to_addr);
1475 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1476 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1477 align = MOVE_MAX * BITS_PER_UNIT;
1479 /* First move what we can in the largest integer mode, then go to
1480 successively smaller modes. */
1482 while (max_size > 1)
1484 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1485 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1486 if (GET_MODE_SIZE (tmode) < max_size)
1487 mode = tmode;
1489 if (mode == VOIDmode)
1490 break;
1492 icode = mov_optab->handlers[(int) mode].insn_code;
1493 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1494 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1496 max_size = GET_MODE_SIZE (mode);
1499 /* The code above should have handled everything. */
1500 if (data.len > 0)
1501 abort ();
1504 /* Return number of insns required to move L bytes by pieces.
1505 ALIGN (in bytes) is maximum alignment we can assume. */
1507 static unsigned HOST_WIDE_INT
1508 move_by_pieces_ninsns (l, align)
1509 unsigned HOST_WIDE_INT l;
1510 unsigned int align;
1512 unsigned HOST_WIDE_INT n_insns = 0;
1513 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1515 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1516 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1517 align = MOVE_MAX * BITS_PER_UNIT;
1519 while (max_size > 1)
1521 enum machine_mode mode = VOIDmode, tmode;
1522 enum insn_code icode;
1524 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1525 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1526 if (GET_MODE_SIZE (tmode) < max_size)
1527 mode = tmode;
1529 if (mode == VOIDmode)
1530 break;
1532 icode = mov_optab->handlers[(int) mode].insn_code;
1533 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1534 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1536 max_size = GET_MODE_SIZE (mode);
1539 if (l)
1540 abort ();
1541 return n_insns;
1544 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1545 with move instructions for mode MODE. GENFUN is the gen_... function
1546 to make a move insn for that mode. DATA has all the other info. */
1548 static void
1549 move_by_pieces_1 (genfun, mode, data)
1550 rtx (*genfun) PARAMS ((rtx, ...));
1551 enum machine_mode mode;
1552 struct move_by_pieces *data;
1554 unsigned int size = GET_MODE_SIZE (mode);
1555 rtx to1, from1;
1557 while (data->len >= size)
1559 if (data->reverse)
1560 data->offset -= size;
1562 if (data->autinc_to)
1564 to1 = gen_rtx_MEM (mode, data->to_addr);
1565 MEM_COPY_ATTRIBUTES (to1, data->to);
1567 else
1568 to1 = change_address (data->to, mode,
1569 plus_constant (data->to_addr, data->offset));
1571 if (data->autinc_from)
1573 from1 = gen_rtx_MEM (mode, data->from_addr);
1574 MEM_COPY_ATTRIBUTES (from1, data->from);
1576 else
1577 from1 = change_address (data->from, mode,
1578 plus_constant (data->from_addr, data->offset));
1580 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1581 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1582 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1583 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1585 emit_insn ((*genfun) (to1, from1));
1587 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1588 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1589 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1590 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1592 if (! data->reverse)
1593 data->offset += size;
1595 data->len -= size;
1599 /* Emit code to move a block Y to a block X.
1600 This may be done with string-move instructions,
1601 with multiple scalar move instructions, or with a library call.
1603 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1604 with mode BLKmode.
1605 SIZE is an rtx that says how long they are.
1606 ALIGN is the maximum alignment we can assume they have.
1608 Return the address of the new block, if memcpy is called and returns it,
1609 0 otherwise. */
1612 emit_block_move (x, y, size, align)
1613 rtx x, y;
1614 rtx size;
1615 unsigned int align;
1617 rtx retval = 0;
1618 #ifdef TARGET_MEM_FUNCTIONS
1619 static tree fn;
1620 tree call_expr, arg_list;
1621 #endif
1623 if (GET_MODE (x) != BLKmode)
1624 abort ();
1626 if (GET_MODE (y) != BLKmode)
1627 abort ();
1629 x = protect_from_queue (x, 1);
1630 y = protect_from_queue (y, 0);
1631 size = protect_from_queue (size, 0);
1633 if (GET_CODE (x) != MEM)
1634 abort ();
1635 if (GET_CODE (y) != MEM)
1636 abort ();
1637 if (size == 0)
1638 abort ();
1640 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1641 move_by_pieces (x, y, INTVAL (size), align);
1642 else
1644 /* Try the most limited insn first, because there's no point
1645 including more than one in the machine description unless
1646 the more limited one has some advantage. */
1648 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1649 enum machine_mode mode;
1651 /* Since this is a move insn, we don't care about volatility. */
1652 volatile_ok = 1;
1654 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1655 mode = GET_MODE_WIDER_MODE (mode))
1657 enum insn_code code = movstr_optab[(int) mode];
1658 insn_operand_predicate_fn pred;
1660 if (code != CODE_FOR_nothing
1661 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1662 here because if SIZE is less than the mode mask, as it is
1663 returned by the macro, it will definitely be less than the
1664 actual mode mask. */
1665 && ((GET_CODE (size) == CONST_INT
1666 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1667 <= (GET_MODE_MASK (mode) >> 1)))
1668 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1669 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1670 || (*pred) (x, BLKmode))
1671 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1672 || (*pred) (y, BLKmode))
1673 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1674 || (*pred) (opalign, VOIDmode)))
1676 rtx op2;
1677 rtx last = get_last_insn ();
1678 rtx pat;
1680 op2 = convert_to_mode (mode, size, 1);
1681 pred = insn_data[(int) code].operand[2].predicate;
1682 if (pred != 0 && ! (*pred) (op2, mode))
1683 op2 = copy_to_mode_reg (mode, op2);
1685 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1686 if (pat)
1688 emit_insn (pat);
1689 volatile_ok = 0;
1690 return 0;
1692 else
1693 delete_insns_since (last);
1697 volatile_ok = 0;
1699 /* X, Y, or SIZE may have been passed through protect_from_queue.
1701 It is unsafe to save the value generated by protect_from_queue
1702 and reuse it later. Consider what happens if emit_queue is
1703 called before the return value from protect_from_queue is used.
1705 Expansion of the CALL_EXPR below will call emit_queue before
1706 we are finished emitting RTL for argument setup. So if we are
1707 not careful we could get the wrong value for an argument.
1709 To avoid this problem we go ahead and emit code to copy X, Y &
1710 SIZE into new pseudos. We can then place those new pseudos
1711 into an RTL_EXPR and use them later, even after a call to
1712 emit_queue.
1714 Note this is not strictly needed for library calls since they
1715 do not call emit_queue before loading their arguments. However,
1716 we may need to have library calls call emit_queue in the future
1717 since failing to do so could cause problems for targets which
1718 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1719 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1720 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1722 #ifdef TARGET_MEM_FUNCTIONS
1723 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1724 #else
1725 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1726 TREE_UNSIGNED (integer_type_node));
1727 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1728 #endif
1730 #ifdef TARGET_MEM_FUNCTIONS
1731 /* It is incorrect to use the libcall calling conventions to call
1732 memcpy in this context.
1734 This could be a user call to memcpy and the user may wish to
1735 examine the return value from memcpy.
1737 For targets where libcalls and normal calls have different conventions
1738 for returning pointers, we could end up generating incorrect code.
1740 So instead of using a libcall sequence we build up a suitable
1741 CALL_EXPR and expand the call in the normal fashion. */
1742 if (fn == NULL_TREE)
1744 tree fntype;
1746 /* This was copied from except.c, I don't know if all this is
1747 necessary in this context or not. */
1748 fn = get_identifier ("memcpy");
1749 fntype = build_pointer_type (void_type_node);
1750 fntype = build_function_type (fntype, NULL_TREE);
1751 fn = build_decl (FUNCTION_DECL, fn, fntype);
1752 ggc_add_tree_root (&fn, 1);
1753 DECL_EXTERNAL (fn) = 1;
1754 TREE_PUBLIC (fn) = 1;
1755 DECL_ARTIFICIAL (fn) = 1;
1756 make_decl_rtl (fn, NULL_PTR, 1);
1757 assemble_external (fn);
1760 /* We need to make an argument list for the function call.
1762 memcpy has three arguments, the first two are void * addresses and
1763 the last is a size_t byte count for the copy. */
1764 arg_list
1765 = build_tree_list (NULL_TREE,
1766 make_tree (build_pointer_type (void_type_node), x));
1767 TREE_CHAIN (arg_list)
1768 = build_tree_list (NULL_TREE,
1769 make_tree (build_pointer_type (void_type_node), y));
1770 TREE_CHAIN (TREE_CHAIN (arg_list))
1771 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1772 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1774 /* Now we have to build up the CALL_EXPR itself. */
1775 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1776 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1777 call_expr, arg_list, NULL_TREE);
1778 TREE_SIDE_EFFECTS (call_expr) = 1;
1780 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1781 #else
1782 emit_library_call (bcopy_libfunc, LCT_NORMAL,
1783 VOIDmode, 3, y, Pmode, x, Pmode,
1784 convert_to_mode (TYPE_MODE (integer_type_node), size,
1785 TREE_UNSIGNED (integer_type_node)),
1786 TYPE_MODE (integer_type_node));
1787 #endif
1790 return retval;
1793 /* Copy all or part of a value X into registers starting at REGNO.
1794 The number of registers to be filled is NREGS. */
1796 void
1797 move_block_to_reg (regno, x, nregs, mode)
1798 int regno;
1799 rtx x;
1800 int nregs;
1801 enum machine_mode mode;
1803 int i;
1804 #ifdef HAVE_load_multiple
1805 rtx pat;
1806 rtx last;
1807 #endif
1809 if (nregs == 0)
1810 return;
1812 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1813 x = validize_mem (force_const_mem (mode, x));
1815 /* See if the machine can do this with a load multiple insn. */
1816 #ifdef HAVE_load_multiple
1817 if (HAVE_load_multiple)
1819 last = get_last_insn ();
1820 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1821 GEN_INT (nregs));
1822 if (pat)
1824 emit_insn (pat);
1825 return;
1827 else
1828 delete_insns_since (last);
1830 #endif
1832 for (i = 0; i < nregs; i++)
1833 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1834 operand_subword_force (x, i, mode));
1837 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1838 The number of registers to be filled is NREGS. SIZE indicates the number
1839 of bytes in the object X. */
1841 void
1842 move_block_from_reg (regno, x, nregs, size)
1843 int regno;
1844 rtx x;
1845 int nregs;
1846 int size;
1848 int i;
1849 #ifdef HAVE_store_multiple
1850 rtx pat;
1851 rtx last;
1852 #endif
1853 enum machine_mode mode;
1855 /* If SIZE is that of a mode no bigger than a word, just use that
1856 mode's store operation. */
1857 if (size <= UNITS_PER_WORD
1858 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1860 emit_move_insn (change_address (x, mode, NULL),
1861 gen_rtx_REG (mode, regno));
1862 return;
1865 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1866 to the left before storing to memory. Note that the previous test
1867 doesn't handle all cases (e.g. SIZE == 3). */
1868 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1870 rtx tem = operand_subword (x, 0, 1, BLKmode);
1871 rtx shift;
1873 if (tem == 0)
1874 abort ();
1876 shift = expand_shift (LSHIFT_EXPR, word_mode,
1877 gen_rtx_REG (word_mode, regno),
1878 build_int_2 ((UNITS_PER_WORD - size)
1879 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1880 emit_move_insn (tem, shift);
1881 return;
1884 /* See if the machine can do this with a store multiple insn. */
1885 #ifdef HAVE_store_multiple
1886 if (HAVE_store_multiple)
1888 last = get_last_insn ();
1889 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1890 GEN_INT (nregs));
1891 if (pat)
1893 emit_insn (pat);
1894 return;
1896 else
1897 delete_insns_since (last);
1899 #endif
1901 for (i = 0; i < nregs; i++)
1903 rtx tem = operand_subword (x, i, 1, BLKmode);
1905 if (tem == 0)
1906 abort ();
1908 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1912 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1913 registers represented by a PARALLEL. SSIZE represents the total size of
1914 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1915 SRC in bits. */
1916 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1917 the balance will be in what would be the low-order memory addresses, i.e.
1918 left justified for big endian, right justified for little endian. This
1919 happens to be true for the targets currently using this support. If this
1920 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1921 would be needed. */
1923 void
1924 emit_group_load (dst, orig_src, ssize, align)
1925 rtx dst, orig_src;
1926 unsigned int align;
1927 int ssize;
1929 rtx *tmps, src;
1930 int start, i;
1932 if (GET_CODE (dst) != PARALLEL)
1933 abort ();
1935 /* Check for a NULL entry, used to indicate that the parameter goes
1936 both on the stack and in registers. */
1937 if (XEXP (XVECEXP (dst, 0, 0), 0))
1938 start = 0;
1939 else
1940 start = 1;
1942 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
1944 /* If we won't be loading directly from memory, protect the real source
1945 from strange tricks we might play. */
1946 src = orig_src;
1947 if (GET_CODE (src) != MEM && ! CONSTANT_P (src))
1949 if (GET_MODE (src) == VOIDmode)
1950 src = gen_reg_rtx (GET_MODE (dst));
1951 else
1952 src = gen_reg_rtx (GET_MODE (orig_src));
1953 emit_move_insn (src, orig_src);
1956 /* Process the pieces. */
1957 for (i = start; i < XVECLEN (dst, 0); i++)
1959 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1960 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1961 unsigned int bytelen = GET_MODE_SIZE (mode);
1962 int shift = 0;
1964 /* Handle trailing fragments that run over the size of the struct. */
1965 if (ssize >= 0 && bytepos + bytelen > ssize)
1967 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1968 bytelen = ssize - bytepos;
1969 if (bytelen <= 0)
1970 abort ();
1973 /* Optimize the access just a bit. */
1974 if (GET_CODE (src) == MEM
1975 && align >= GET_MODE_ALIGNMENT (mode)
1976 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1977 && bytelen == GET_MODE_SIZE (mode))
1979 tmps[i] = gen_reg_rtx (mode);
1980 emit_move_insn (tmps[i],
1981 change_address (src, mode,
1982 plus_constant (XEXP (src, 0),
1983 bytepos)));
1985 else if (GET_CODE (src) == CONCAT)
1987 if (bytepos == 0
1988 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
1989 tmps[i] = XEXP (src, 0);
1990 else if (bytepos == GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
1991 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
1992 tmps[i] = XEXP (src, 1);
1993 else
1994 abort ();
1996 else if ((CONSTANT_P (src)
1997 && (GET_MODE (src) == VOIDmode || GET_MODE (src) == mode))
1998 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
1999 tmps[i] = src;
2000 else
2001 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2002 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2003 mode, mode, align, ssize);
2005 if (BYTES_BIG_ENDIAN && shift)
2006 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2007 tmps[i], 0, OPTAB_WIDEN);
2010 emit_queue ();
2012 /* Copy the extracted pieces into the proper (probable) hard regs. */
2013 for (i = start; i < XVECLEN (dst, 0); i++)
2014 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2017 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2018 registers represented by a PARALLEL. SSIZE represents the total size of
2019 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
2021 void
2022 emit_group_store (orig_dst, src, ssize, align)
2023 rtx orig_dst, src;
2024 int ssize;
2025 unsigned int align;
2027 rtx *tmps, dst;
2028 int start, i;
2030 if (GET_CODE (src) != PARALLEL)
2031 abort ();
2033 /* Check for a NULL entry, used to indicate that the parameter goes
2034 both on the stack and in registers. */
2035 if (XEXP (XVECEXP (src, 0, 0), 0))
2036 start = 0;
2037 else
2038 start = 1;
2040 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2042 /* Copy the (probable) hard regs into pseudos. */
2043 for (i = start; i < XVECLEN (src, 0); i++)
2045 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2046 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2047 emit_move_insn (tmps[i], reg);
2049 emit_queue ();
2051 /* If we won't be storing directly into memory, protect the real destination
2052 from strange tricks we might play. */
2053 dst = orig_dst;
2054 if (GET_CODE (dst) == PARALLEL)
2056 rtx temp;
2058 /* We can get a PARALLEL dst if there is a conditional expression in
2059 a return statement. In that case, the dst and src are the same,
2060 so no action is necessary. */
2061 if (rtx_equal_p (dst, src))
2062 return;
2064 /* It is unclear if we can ever reach here, but we may as well handle
2065 it. Allocate a temporary, and split this into a store/load to/from
2066 the temporary. */
2068 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2069 emit_group_store (temp, src, ssize, align);
2070 emit_group_load (dst, temp, ssize, align);
2071 return;
2073 else if (GET_CODE (dst) != MEM)
2075 dst = gen_reg_rtx (GET_MODE (orig_dst));
2076 /* Make life a bit easier for combine. */
2077 emit_move_insn (dst, const0_rtx);
2080 /* Process the pieces. */
2081 for (i = start; i < XVECLEN (src, 0); i++)
2083 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2084 enum machine_mode mode = GET_MODE (tmps[i]);
2085 unsigned int bytelen = GET_MODE_SIZE (mode);
2087 /* Handle trailing fragments that run over the size of the struct. */
2088 if (ssize >= 0 && bytepos + bytelen > ssize)
2090 if (BYTES_BIG_ENDIAN)
2092 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2093 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2094 tmps[i], 0, OPTAB_WIDEN);
2096 bytelen = ssize - bytepos;
2099 /* Optimize the access just a bit. */
2100 if (GET_CODE (dst) == MEM
2101 && align >= GET_MODE_ALIGNMENT (mode)
2102 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2103 && bytelen == GET_MODE_SIZE (mode))
2104 emit_move_insn (change_address (dst, mode,
2105 plus_constant (XEXP (dst, 0),
2106 bytepos)),
2107 tmps[i]);
2108 else
2109 store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2110 mode, tmps[i], align, ssize);
2113 emit_queue ();
2115 /* Copy from the pseudo into the (probable) hard reg. */
2116 if (GET_CODE (dst) == REG)
2117 emit_move_insn (orig_dst, dst);
2120 /* Generate code to copy a BLKmode object of TYPE out of a
2121 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2122 is null, a stack temporary is created. TGTBLK is returned.
2124 The primary purpose of this routine is to handle functions
2125 that return BLKmode structures in registers. Some machines
2126 (the PA for example) want to return all small structures
2127 in registers regardless of the structure's alignment. */
2130 copy_blkmode_from_reg (tgtblk, srcreg, type)
2131 rtx tgtblk;
2132 rtx srcreg;
2133 tree type;
2135 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2136 rtx src = NULL, dst = NULL;
2137 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2138 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2140 if (tgtblk == 0)
2142 tgtblk = assign_stack_temp (BLKmode, bytes, 0);
2143 MEM_SET_IN_STRUCT_P (tgtblk, AGGREGATE_TYPE_P (type));
2144 preserve_temp_slots (tgtblk);
2147 /* This code assumes srcreg is at least a full word. If it isn't,
2148 copy it into a new pseudo which is a full word. */
2149 if (GET_MODE (srcreg) != BLKmode
2150 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2151 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2153 /* Structures whose size is not a multiple of a word are aligned
2154 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2155 machine, this means we must skip the empty high order bytes when
2156 calculating the bit offset. */
2157 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2158 big_endian_correction
2159 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2161 /* Copy the structure BITSIZE bites at a time.
2163 We could probably emit more efficient code for machines which do not use
2164 strict alignment, but it doesn't seem worth the effort at the current
2165 time. */
2166 for (bitpos = 0, xbitpos = big_endian_correction;
2167 bitpos < bytes * BITS_PER_UNIT;
2168 bitpos += bitsize, xbitpos += bitsize)
2170 /* We need a new source operand each time xbitpos is on a
2171 word boundary and when xbitpos == big_endian_correction
2172 (the first time through). */
2173 if (xbitpos % BITS_PER_WORD == 0
2174 || xbitpos == big_endian_correction)
2175 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, BLKmode);
2177 /* We need a new destination operand each time bitpos is on
2178 a word boundary. */
2179 if (bitpos % BITS_PER_WORD == 0)
2180 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2182 /* Use xbitpos for the source extraction (right justified) and
2183 xbitpos for the destination store (left justified). */
2184 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2185 extract_bit_field (src, bitsize,
2186 xbitpos % BITS_PER_WORD, 1,
2187 NULL_RTX, word_mode, word_mode,
2188 bitsize, BITS_PER_WORD),
2189 bitsize, BITS_PER_WORD);
2192 return tgtblk;
2195 /* Add a USE expression for REG to the (possibly empty) list pointed
2196 to by CALL_FUSAGE. REG must denote a hard register. */
2198 void
2199 use_reg (call_fusage, reg)
2200 rtx *call_fusage, reg;
2202 if (GET_CODE (reg) != REG
2203 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2204 abort ();
2206 *call_fusage
2207 = gen_rtx_EXPR_LIST (VOIDmode,
2208 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2211 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2212 starting at REGNO. All of these registers must be hard registers. */
2214 void
2215 use_regs (call_fusage, regno, nregs)
2216 rtx *call_fusage;
2217 int regno;
2218 int nregs;
2220 int i;
2222 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2223 abort ();
2225 for (i = 0; i < nregs; i++)
2226 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2229 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2230 PARALLEL REGS. This is for calls that pass values in multiple
2231 non-contiguous locations. The Irix 6 ABI has examples of this. */
2233 void
2234 use_group_regs (call_fusage, regs)
2235 rtx *call_fusage;
2236 rtx regs;
2238 int i;
2240 for (i = 0; i < XVECLEN (regs, 0); i++)
2242 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2244 /* A NULL entry means the parameter goes both on the stack and in
2245 registers. This can also be a MEM for targets that pass values
2246 partially on the stack and partially in registers. */
2247 if (reg != 0 && GET_CODE (reg) == REG)
2248 use_reg (call_fusage, reg);
2252 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2253 rtx with BLKmode). The caller must pass TO through protect_from_queue
2254 before calling. ALIGN is maximum alignment we can assume. */
2256 static void
2257 clear_by_pieces (to, len, align)
2258 rtx to;
2259 unsigned HOST_WIDE_INT len;
2260 unsigned int align;
2262 struct clear_by_pieces data;
2263 rtx to_addr = XEXP (to, 0);
2264 unsigned HOST_WIDE_INT max_size = MOVE_MAX_PIECES + 1;
2265 enum machine_mode mode = VOIDmode, tmode;
2266 enum insn_code icode;
2268 data.offset = 0;
2269 data.to_addr = to_addr;
2270 data.to = to;
2271 data.autinc_to
2272 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2273 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2275 data.explicit_inc_to = 0;
2276 data.reverse
2277 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2278 if (data.reverse)
2279 data.offset = len;
2280 data.len = len;
2282 /* If copying requires more than two move insns,
2283 copy addresses to registers (to make displacements shorter)
2284 and use post-increment if available. */
2285 if (!data.autinc_to
2286 && move_by_pieces_ninsns (len, align) > 2)
2288 /* Determine the main mode we'll be using. */
2289 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2290 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2291 if (GET_MODE_SIZE (tmode) < max_size)
2292 mode = tmode;
2294 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
2296 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
2297 data.autinc_to = 1;
2298 data.explicit_inc_to = -1;
2301 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse
2302 && ! data.autinc_to)
2304 data.to_addr = copy_addr_to_reg (to_addr);
2305 data.autinc_to = 1;
2306 data.explicit_inc_to = 1;
2309 if ( !data.autinc_to && CONSTANT_P (to_addr))
2310 data.to_addr = copy_addr_to_reg (to_addr);
2313 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2314 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2315 align = MOVE_MAX * BITS_PER_UNIT;
2317 /* First move what we can in the largest integer mode, then go to
2318 successively smaller modes. */
2320 while (max_size > 1)
2322 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2323 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2324 if (GET_MODE_SIZE (tmode) < max_size)
2325 mode = tmode;
2327 if (mode == VOIDmode)
2328 break;
2330 icode = mov_optab->handlers[(int) mode].insn_code;
2331 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2332 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2334 max_size = GET_MODE_SIZE (mode);
2337 /* The code above should have handled everything. */
2338 if (data.len != 0)
2339 abort ();
2342 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2343 with move instructions for mode MODE. GENFUN is the gen_... function
2344 to make a move insn for that mode. DATA has all the other info. */
2346 static void
2347 clear_by_pieces_1 (genfun, mode, data)
2348 rtx (*genfun) PARAMS ((rtx, ...));
2349 enum machine_mode mode;
2350 struct clear_by_pieces *data;
2352 unsigned int size = GET_MODE_SIZE (mode);
2353 rtx to1;
2355 while (data->len >= size)
2357 if (data->reverse)
2358 data->offset -= size;
2360 if (data->autinc_to)
2362 to1 = gen_rtx_MEM (mode, data->to_addr);
2363 MEM_COPY_ATTRIBUTES (to1, data->to);
2365 else
2366 to1 = change_address (data->to, mode,
2367 plus_constant (data->to_addr, data->offset));
2369 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2370 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2372 emit_insn ((*genfun) (to1, const0_rtx));
2374 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2375 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2377 if (! data->reverse)
2378 data->offset += size;
2380 data->len -= size;
2384 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2385 its length in bytes and ALIGN is the maximum alignment we can is has.
2387 If we call a function that returns the length of the block, return it. */
2390 clear_storage (object, size, align)
2391 rtx object;
2392 rtx size;
2393 unsigned int align;
2395 #ifdef TARGET_MEM_FUNCTIONS
2396 static tree fn;
2397 tree call_expr, arg_list;
2398 #endif
2399 rtx retval = 0;
2401 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2402 just move a zero. Otherwise, do this a piece at a time. */
2403 if (GET_MODE (object) != BLKmode
2404 && GET_CODE (size) == CONST_INT
2405 && GET_MODE_SIZE (GET_MODE (object)) == INTVAL (size))
2406 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2407 else
2409 object = protect_from_queue (object, 1);
2410 size = protect_from_queue (size, 0);
2412 if (GET_CODE (size) == CONST_INT
2413 && MOVE_BY_PIECES_P (INTVAL (size), align))
2414 clear_by_pieces (object, INTVAL (size), align);
2415 else
2417 /* Try the most limited insn first, because there's no point
2418 including more than one in the machine description unless
2419 the more limited one has some advantage. */
2421 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2422 enum machine_mode mode;
2424 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2425 mode = GET_MODE_WIDER_MODE (mode))
2427 enum insn_code code = clrstr_optab[(int) mode];
2428 insn_operand_predicate_fn pred;
2430 if (code != CODE_FOR_nothing
2431 /* We don't need MODE to be narrower than
2432 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2433 the mode mask, as it is returned by the macro, it will
2434 definitely be less than the actual mode mask. */
2435 && ((GET_CODE (size) == CONST_INT
2436 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2437 <= (GET_MODE_MASK (mode) >> 1)))
2438 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2439 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2440 || (*pred) (object, BLKmode))
2441 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2442 || (*pred) (opalign, VOIDmode)))
2444 rtx op1;
2445 rtx last = get_last_insn ();
2446 rtx pat;
2448 op1 = convert_to_mode (mode, size, 1);
2449 pred = insn_data[(int) code].operand[1].predicate;
2450 if (pred != 0 && ! (*pred) (op1, mode))
2451 op1 = copy_to_mode_reg (mode, op1);
2453 pat = GEN_FCN ((int) code) (object, op1, opalign);
2454 if (pat)
2456 emit_insn (pat);
2457 return 0;
2459 else
2460 delete_insns_since (last);
2464 /* OBJECT or SIZE may have been passed through protect_from_queue.
2466 It is unsafe to save the value generated by protect_from_queue
2467 and reuse it later. Consider what happens if emit_queue is
2468 called before the return value from protect_from_queue is used.
2470 Expansion of the CALL_EXPR below will call emit_queue before
2471 we are finished emitting RTL for argument setup. So if we are
2472 not careful we could get the wrong value for an argument.
2474 To avoid this problem we go ahead and emit code to copy OBJECT
2475 and SIZE into new pseudos. We can then place those new pseudos
2476 into an RTL_EXPR and use them later, even after a call to
2477 emit_queue.
2479 Note this is not strictly needed for library calls since they
2480 do not call emit_queue before loading their arguments. However,
2481 we may need to have library calls call emit_queue in the future
2482 since failing to do so could cause problems for targets which
2483 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2484 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2486 #ifdef TARGET_MEM_FUNCTIONS
2487 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2488 #else
2489 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2490 TREE_UNSIGNED (integer_type_node));
2491 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2492 #endif
2494 #ifdef TARGET_MEM_FUNCTIONS
2495 /* It is incorrect to use the libcall calling conventions to call
2496 memset in this context.
2498 This could be a user call to memset and the user may wish to
2499 examine the return value from memset.
2501 For targets where libcalls and normal calls have different
2502 conventions for returning pointers, we could end up generating
2503 incorrect code.
2505 So instead of using a libcall sequence we build up a suitable
2506 CALL_EXPR and expand the call in the normal fashion. */
2507 if (fn == NULL_TREE)
2509 tree fntype;
2511 /* This was copied from except.c, I don't know if all this is
2512 necessary in this context or not. */
2513 fn = get_identifier ("memset");
2514 fntype = build_pointer_type (void_type_node);
2515 fntype = build_function_type (fntype, NULL_TREE);
2516 fn = build_decl (FUNCTION_DECL, fn, fntype);
2517 ggc_add_tree_root (&fn, 1);
2518 DECL_EXTERNAL (fn) = 1;
2519 TREE_PUBLIC (fn) = 1;
2520 DECL_ARTIFICIAL (fn) = 1;
2521 make_decl_rtl (fn, NULL_PTR, 1);
2522 assemble_external (fn);
2525 /* We need to make an argument list for the function call.
2527 memset has three arguments, the first is a void * addresses, the
2528 second a integer with the initialization value, the last is a
2529 size_t byte count for the copy. */
2530 arg_list
2531 = build_tree_list (NULL_TREE,
2532 make_tree (build_pointer_type (void_type_node),
2533 object));
2534 TREE_CHAIN (arg_list)
2535 = build_tree_list (NULL_TREE,
2536 make_tree (integer_type_node, const0_rtx));
2537 TREE_CHAIN (TREE_CHAIN (arg_list))
2538 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2539 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2541 /* Now we have to build up the CALL_EXPR itself. */
2542 call_expr = build1 (ADDR_EXPR,
2543 build_pointer_type (TREE_TYPE (fn)), fn);
2544 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2545 call_expr, arg_list, NULL_TREE);
2546 TREE_SIDE_EFFECTS (call_expr) = 1;
2548 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2549 #else
2550 emit_library_call (bzero_libfunc, LCT_NORMAL,
2551 VOIDmode, 2, object, Pmode, size,
2552 TYPE_MODE (integer_type_node));
2553 #endif
2557 return retval;
2560 /* Generate code to copy Y into X.
2561 Both Y and X must have the same mode, except that
2562 Y can be a constant with VOIDmode.
2563 This mode cannot be BLKmode; use emit_block_move for that.
2565 Return the last instruction emitted. */
2568 emit_move_insn (x, y)
2569 rtx x, y;
2571 enum machine_mode mode = GET_MODE (x);
2573 x = protect_from_queue (x, 1);
2574 y = protect_from_queue (y, 0);
2576 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2577 abort ();
2579 /* Never force constant_p_rtx to memory. */
2580 if (GET_CODE (y) == CONSTANT_P_RTX)
2582 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2583 y = force_const_mem (mode, y);
2585 /* If X or Y are memory references, verify that their addresses are valid
2586 for the machine. */
2587 if (GET_CODE (x) == MEM
2588 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2589 && ! push_operand (x, GET_MODE (x)))
2590 || (flag_force_addr
2591 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2592 x = change_address (x, VOIDmode, XEXP (x, 0));
2594 if (GET_CODE (y) == MEM
2595 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2596 || (flag_force_addr
2597 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2598 y = change_address (y, VOIDmode, XEXP (y, 0));
2600 if (mode == BLKmode)
2601 abort ();
2603 return emit_move_insn_1 (x, y);
2606 /* Low level part of emit_move_insn.
2607 Called just like emit_move_insn, but assumes X and Y
2608 are basically valid. */
2611 emit_move_insn_1 (x, y)
2612 rtx x, y;
2614 enum machine_mode mode = GET_MODE (x);
2615 enum machine_mode submode;
2616 enum mode_class class = GET_MODE_CLASS (mode);
2617 unsigned int i;
2619 if (mode >= MAX_MACHINE_MODE)
2620 abort ();
2622 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2623 return
2624 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2626 /* Expand complex moves by moving real part and imag part, if possible. */
2627 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2628 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2629 * BITS_PER_UNIT),
2630 (class == MODE_COMPLEX_INT
2631 ? MODE_INT : MODE_FLOAT),
2633 && (mov_optab->handlers[(int) submode].insn_code
2634 != CODE_FOR_nothing))
2636 /* Don't split destination if it is a stack push. */
2637 int stack = push_operand (x, GET_MODE (x));
2639 /* If this is a stack, push the highpart first, so it
2640 will be in the argument order.
2642 In that case, change_address is used only to convert
2643 the mode, not to change the address. */
2644 if (stack)
2646 /* Note that the real part always precedes the imag part in memory
2647 regardless of machine's endianness. */
2648 #ifdef STACK_GROWS_DOWNWARD
2649 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2650 (gen_rtx_MEM (submode, XEXP (x, 0)),
2651 gen_imagpart (submode, y)));
2652 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2653 (gen_rtx_MEM (submode, XEXP (x, 0)),
2654 gen_realpart (submode, y)));
2655 #else
2656 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2657 (gen_rtx_MEM (submode, XEXP (x, 0)),
2658 gen_realpart (submode, y)));
2659 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2660 (gen_rtx_MEM (submode, XEXP (x, 0)),
2661 gen_imagpart (submode, y)));
2662 #endif
2664 else
2666 rtx realpart_x, realpart_y;
2667 rtx imagpart_x, imagpart_y;
2669 /* If this is a complex value with each part being smaller than a
2670 word, the usual calling sequence will likely pack the pieces into
2671 a single register. Unfortunately, SUBREG of hard registers only
2672 deals in terms of words, so we have a problem converting input
2673 arguments to the CONCAT of two registers that is used elsewhere
2674 for complex values. If this is before reload, we can copy it into
2675 memory and reload. FIXME, we should see about using extract and
2676 insert on integer registers, but complex short and complex char
2677 variables should be rarely used. */
2678 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2679 && (reload_in_progress | reload_completed) == 0)
2681 int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2682 int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2684 if (packed_dest_p || packed_src_p)
2686 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2687 ? MODE_FLOAT : MODE_INT);
2689 enum machine_mode reg_mode =
2690 mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2692 if (reg_mode != BLKmode)
2694 rtx mem = assign_stack_temp (reg_mode,
2695 GET_MODE_SIZE (mode), 0);
2697 rtx cmem = change_address (mem, mode, NULL_RTX);
2699 cfun->cannot_inline = N_("function using short complex types cannot be inline");
2701 if (packed_dest_p)
2703 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2704 emit_move_insn_1 (cmem, y);
2705 return emit_move_insn_1 (sreg, mem);
2707 else
2709 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2710 emit_move_insn_1 (mem, sreg);
2711 return emit_move_insn_1 (x, cmem);
2717 realpart_x = gen_realpart (submode, x);
2718 realpart_y = gen_realpart (submode, y);
2719 imagpart_x = gen_imagpart (submode, x);
2720 imagpart_y = gen_imagpart (submode, y);
2722 /* Show the output dies here. This is necessary for SUBREGs
2723 of pseudos since we cannot track their lifetimes correctly;
2724 hard regs shouldn't appear here except as return values.
2725 We never want to emit such a clobber after reload. */
2726 if (x != y
2727 && ! (reload_in_progress || reload_completed)
2728 && (GET_CODE (realpart_x) == SUBREG
2729 || GET_CODE (imagpart_x) == SUBREG))
2731 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2734 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2735 (realpart_x, realpart_y));
2736 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2737 (imagpart_x, imagpart_y));
2740 return get_last_insn ();
2743 /* This will handle any multi-word mode that lacks a move_insn pattern.
2744 However, you will get better code if you define such patterns,
2745 even if they must turn into multiple assembler instructions. */
2746 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2748 rtx last_insn = 0;
2749 rtx seq, inner;
2750 int need_clobber;
2752 #ifdef PUSH_ROUNDING
2754 /* If X is a push on the stack, do the push now and replace
2755 X with a reference to the stack pointer. */
2756 if (push_operand (x, GET_MODE (x)))
2758 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2759 x = change_address (x, VOIDmode, stack_pointer_rtx);
2761 #endif
2763 /* If we are in reload, see if either operand is a MEM whose address
2764 is scheduled for replacement. */
2765 if (reload_in_progress && GET_CODE (x) == MEM
2766 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
2768 rtx new = gen_rtx_MEM (GET_MODE (x), inner);
2770 MEM_COPY_ATTRIBUTES (new, x);
2771 x = new;
2773 if (reload_in_progress && GET_CODE (y) == MEM
2774 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
2776 rtx new = gen_rtx_MEM (GET_MODE (y), inner);
2778 MEM_COPY_ATTRIBUTES (new, y);
2779 y = new;
2782 start_sequence ();
2784 need_clobber = 0;
2785 for (i = 0;
2786 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2787 i++)
2789 rtx xpart = operand_subword (x, i, 1, mode);
2790 rtx ypart = operand_subword (y, i, 1, mode);
2792 /* If we can't get a part of Y, put Y into memory if it is a
2793 constant. Otherwise, force it into a register. If we still
2794 can't get a part of Y, abort. */
2795 if (ypart == 0 && CONSTANT_P (y))
2797 y = force_const_mem (mode, y);
2798 ypart = operand_subword (y, i, 1, mode);
2800 else if (ypart == 0)
2801 ypart = operand_subword_force (y, i, mode);
2803 if (xpart == 0 || ypart == 0)
2804 abort ();
2806 need_clobber |= (GET_CODE (xpart) == SUBREG);
2808 last_insn = emit_move_insn (xpart, ypart);
2811 seq = gen_sequence ();
2812 end_sequence ();
2814 /* Show the output dies here. This is necessary for SUBREGs
2815 of pseudos since we cannot track their lifetimes correctly;
2816 hard regs shouldn't appear here except as return values.
2817 We never want to emit such a clobber after reload. */
2818 if (x != y
2819 && ! (reload_in_progress || reload_completed)
2820 && need_clobber != 0)
2822 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2825 emit_insn (seq);
2827 return last_insn;
2829 else
2830 abort ();
2833 /* Pushing data onto the stack. */
2835 /* Push a block of length SIZE (perhaps variable)
2836 and return an rtx to address the beginning of the block.
2837 Note that it is not possible for the value returned to be a QUEUED.
2838 The value may be virtual_outgoing_args_rtx.
2840 EXTRA is the number of bytes of padding to push in addition to SIZE.
2841 BELOW nonzero means this padding comes at low addresses;
2842 otherwise, the padding comes at high addresses. */
2845 push_block (size, extra, below)
2846 rtx size;
2847 int extra, below;
2849 register rtx temp;
2851 size = convert_modes (Pmode, ptr_mode, size, 1);
2852 if (CONSTANT_P (size))
2853 anti_adjust_stack (plus_constant (size, extra));
2854 else if (GET_CODE (size) == REG && extra == 0)
2855 anti_adjust_stack (size);
2856 else
2858 temp = copy_to_mode_reg (Pmode, size);
2859 if (extra != 0)
2860 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2861 temp, 0, OPTAB_LIB_WIDEN);
2862 anti_adjust_stack (temp);
2865 #ifndef STACK_GROWS_DOWNWARD
2866 #ifdef ARGS_GROW_DOWNWARD
2867 if (!ACCUMULATE_OUTGOING_ARGS)
2868 #else
2869 if (0)
2870 #endif
2871 #else
2872 if (1)
2873 #endif
2875 /* Return the lowest stack address when STACK or ARGS grow downward and
2876 we are not aaccumulating outgoing arguments (the c4x port uses such
2877 conventions). */
2878 temp = virtual_outgoing_args_rtx;
2879 if (extra != 0 && below)
2880 temp = plus_constant (temp, extra);
2882 else
2884 if (GET_CODE (size) == CONST_INT)
2885 temp = plus_constant (virtual_outgoing_args_rtx,
2886 -INTVAL (size) - (below ? 0 : extra));
2887 else if (extra != 0 && !below)
2888 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2889 negate_rtx (Pmode, plus_constant (size, extra)));
2890 else
2891 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2892 negate_rtx (Pmode, size));
2895 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2899 gen_push_operand ()
2901 return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2904 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2905 block of SIZE bytes. */
2907 static rtx
2908 get_push_address (size)
2909 int size;
2911 register rtx temp;
2913 if (STACK_PUSH_CODE == POST_DEC)
2914 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2915 else if (STACK_PUSH_CODE == POST_INC)
2916 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2917 else
2918 temp = stack_pointer_rtx;
2920 return copy_to_reg (temp);
2923 /* Generate code to push X onto the stack, assuming it has mode MODE and
2924 type TYPE.
2925 MODE is redundant except when X is a CONST_INT (since they don't
2926 carry mode info).
2927 SIZE is an rtx for the size of data to be copied (in bytes),
2928 needed only if X is BLKmode.
2930 ALIGN is maximum alignment we can assume.
2932 If PARTIAL and REG are both nonzero, then copy that many of the first
2933 words of X into registers starting with REG, and push the rest of X.
2934 The amount of space pushed is decreased by PARTIAL words,
2935 rounded *down* to a multiple of PARM_BOUNDARY.
2936 REG must be a hard register in this case.
2937 If REG is zero but PARTIAL is not, take any all others actions for an
2938 argument partially in registers, but do not actually load any
2939 registers.
2941 EXTRA is the amount in bytes of extra space to leave next to this arg.
2942 This is ignored if an argument block has already been allocated.
2944 On a machine that lacks real push insns, ARGS_ADDR is the address of
2945 the bottom of the argument block for this call. We use indexing off there
2946 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2947 argument block has not been preallocated.
2949 ARGS_SO_FAR is the size of args previously pushed for this call.
2951 REG_PARM_STACK_SPACE is nonzero if functions require stack space
2952 for arguments passed in registers. If nonzero, it will be the number
2953 of bytes required. */
2955 void
2956 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2957 args_addr, args_so_far, reg_parm_stack_space,
2958 alignment_pad)
2959 register rtx x;
2960 enum machine_mode mode;
2961 tree type;
2962 rtx size;
2963 unsigned int align;
2964 int partial;
2965 rtx reg;
2966 int extra;
2967 rtx args_addr;
2968 rtx args_so_far;
2969 int reg_parm_stack_space;
2970 rtx alignment_pad;
2972 rtx xinner;
2973 enum direction stack_direction
2974 #ifdef STACK_GROWS_DOWNWARD
2975 = downward;
2976 #else
2977 = upward;
2978 #endif
2980 /* Decide where to pad the argument: `downward' for below,
2981 `upward' for above, or `none' for don't pad it.
2982 Default is below for small data on big-endian machines; else above. */
2983 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2985 /* Invert direction if stack is post-update. */
2986 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2987 if (where_pad != none)
2988 where_pad = (where_pad == downward ? upward : downward);
2990 xinner = x = protect_from_queue (x, 0);
2992 if (mode == BLKmode)
2994 /* Copy a block into the stack, entirely or partially. */
2996 register rtx temp;
2997 int used = partial * UNITS_PER_WORD;
2998 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2999 int skip;
3001 if (size == 0)
3002 abort ();
3004 used -= offset;
3006 /* USED is now the # of bytes we need not copy to the stack
3007 because registers will take care of them. */
3009 if (partial != 0)
3010 xinner = change_address (xinner, BLKmode,
3011 plus_constant (XEXP (xinner, 0), used));
3013 /* If the partial register-part of the arg counts in its stack size,
3014 skip the part of stack space corresponding to the registers.
3015 Otherwise, start copying to the beginning of the stack space,
3016 by setting SKIP to 0. */
3017 skip = (reg_parm_stack_space == 0) ? 0 : used;
3019 #ifdef PUSH_ROUNDING
3020 /* Do it with several push insns if that doesn't take lots of insns
3021 and if there is no difficulty with push insns that skip bytes
3022 on the stack for alignment purposes. */
3023 if (args_addr == 0
3024 && PUSH_ARGS
3025 && GET_CODE (size) == CONST_INT
3026 && skip == 0
3027 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3028 /* Here we avoid the case of a structure whose weak alignment
3029 forces many pushes of a small amount of data,
3030 and such small pushes do rounding that causes trouble. */
3031 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3032 || align >= BIGGEST_ALIGNMENT
3033 || PUSH_ROUNDING (align) == align)
3034 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3036 /* Push padding now if padding above and stack grows down,
3037 or if padding below and stack grows up.
3038 But if space already allocated, this has already been done. */
3039 if (extra && args_addr == 0
3040 && where_pad != none && where_pad != stack_direction)
3041 anti_adjust_stack (GEN_INT (extra));
3043 stack_pointer_delta += INTVAL (size) - used;
3044 move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
3045 INTVAL (size) - used, align);
3047 if (current_function_check_memory_usage && ! in_check_memory_usage)
3049 rtx temp;
3051 in_check_memory_usage = 1;
3052 temp = get_push_address (INTVAL (size) - used);
3053 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3054 emit_library_call (chkr_copy_bitmap_libfunc,
3055 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3056 Pmode, XEXP (xinner, 0), Pmode,
3057 GEN_INT (INTVAL (size) - used),
3058 TYPE_MODE (sizetype));
3059 else
3060 emit_library_call (chkr_set_right_libfunc,
3061 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3062 Pmode, GEN_INT (INTVAL (size) - used),
3063 TYPE_MODE (sizetype),
3064 GEN_INT (MEMORY_USE_RW),
3065 TYPE_MODE (integer_type_node));
3066 in_check_memory_usage = 0;
3069 else
3070 #endif /* PUSH_ROUNDING */
3072 rtx target;
3074 /* Otherwise make space on the stack and copy the data
3075 to the address of that space. */
3077 /* Deduct words put into registers from the size we must copy. */
3078 if (partial != 0)
3080 if (GET_CODE (size) == CONST_INT)
3081 size = GEN_INT (INTVAL (size) - used);
3082 else
3083 size = expand_binop (GET_MODE (size), sub_optab, size,
3084 GEN_INT (used), NULL_RTX, 0,
3085 OPTAB_LIB_WIDEN);
3088 /* Get the address of the stack space.
3089 In this case, we do not deal with EXTRA separately.
3090 A single stack adjust will do. */
3091 if (! args_addr)
3093 temp = push_block (size, extra, where_pad == downward);
3094 extra = 0;
3096 else if (GET_CODE (args_so_far) == CONST_INT)
3097 temp = memory_address (BLKmode,
3098 plus_constant (args_addr,
3099 skip + INTVAL (args_so_far)));
3100 else
3101 temp = memory_address (BLKmode,
3102 plus_constant (gen_rtx_PLUS (Pmode,
3103 args_addr,
3104 args_so_far),
3105 skip));
3106 if (current_function_check_memory_usage && ! in_check_memory_usage)
3108 in_check_memory_usage = 1;
3109 target = copy_to_reg (temp);
3110 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3111 emit_library_call (chkr_copy_bitmap_libfunc,
3112 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3113 target, Pmode,
3114 XEXP (xinner, 0), Pmode,
3115 size, TYPE_MODE (sizetype));
3116 else
3117 emit_library_call (chkr_set_right_libfunc,
3118 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3119 target, Pmode,
3120 size, TYPE_MODE (sizetype),
3121 GEN_INT (MEMORY_USE_RW),
3122 TYPE_MODE (integer_type_node));
3123 in_check_memory_usage = 0;
3126 target = gen_rtx_MEM (BLKmode, temp);
3128 if (type != 0)
3130 set_mem_attributes (target, type, 1);
3131 /* Function incoming arguments may overlap with sibling call
3132 outgoing arguments and we cannot allow reordering of reads
3133 from function arguments with stores to outgoing arguments
3134 of sibling calls. */
3135 MEM_ALIAS_SET (target) = 0;
3138 /* TEMP is the address of the block. Copy the data there. */
3139 if (GET_CODE (size) == CONST_INT
3140 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3142 move_by_pieces (target, xinner, INTVAL (size), align);
3143 goto ret;
3145 else
3147 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3148 enum machine_mode mode;
3150 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3151 mode != VOIDmode;
3152 mode = GET_MODE_WIDER_MODE (mode))
3154 enum insn_code code = movstr_optab[(int) mode];
3155 insn_operand_predicate_fn pred;
3157 if (code != CODE_FOR_nothing
3158 && ((GET_CODE (size) == CONST_INT
3159 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3160 <= (GET_MODE_MASK (mode) >> 1)))
3161 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3162 && (!(pred = insn_data[(int) code].operand[0].predicate)
3163 || ((*pred) (target, BLKmode)))
3164 && (!(pred = insn_data[(int) code].operand[1].predicate)
3165 || ((*pred) (xinner, BLKmode)))
3166 && (!(pred = insn_data[(int) code].operand[3].predicate)
3167 || ((*pred) (opalign, VOIDmode))))
3169 rtx op2 = convert_to_mode (mode, size, 1);
3170 rtx last = get_last_insn ();
3171 rtx pat;
3173 pred = insn_data[(int) code].operand[2].predicate;
3174 if (pred != 0 && ! (*pred) (op2, mode))
3175 op2 = copy_to_mode_reg (mode, op2);
3177 pat = GEN_FCN ((int) code) (target, xinner,
3178 op2, opalign);
3179 if (pat)
3181 emit_insn (pat);
3182 goto ret;
3184 else
3185 delete_insns_since (last);
3190 if (!ACCUMULATE_OUTGOING_ARGS)
3192 /* If the source is referenced relative to the stack pointer,
3193 copy it to another register to stabilize it. We do not need
3194 to do this if we know that we won't be changing sp. */
3196 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3197 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3198 temp = copy_to_reg (temp);
3201 /* Make inhibit_defer_pop nonzero around the library call
3202 to force it to pop the bcopy-arguments right away. */
3203 NO_DEFER_POP;
3204 #ifdef TARGET_MEM_FUNCTIONS
3205 emit_library_call (memcpy_libfunc, LCT_NORMAL,
3206 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3207 convert_to_mode (TYPE_MODE (sizetype),
3208 size, TREE_UNSIGNED (sizetype)),
3209 TYPE_MODE (sizetype));
3210 #else
3211 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3212 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3213 convert_to_mode (TYPE_MODE (integer_type_node),
3214 size,
3215 TREE_UNSIGNED (integer_type_node)),
3216 TYPE_MODE (integer_type_node));
3217 #endif
3218 OK_DEFER_POP;
3221 else if (partial > 0)
3223 /* Scalar partly in registers. */
3225 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3226 int i;
3227 int not_stack;
3228 /* # words of start of argument
3229 that we must make space for but need not store. */
3230 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3231 int args_offset = INTVAL (args_so_far);
3232 int skip;
3234 /* Push padding now if padding above and stack grows down,
3235 or if padding below and stack grows up.
3236 But if space already allocated, this has already been done. */
3237 if (extra && args_addr == 0
3238 && where_pad != none && where_pad != stack_direction)
3239 anti_adjust_stack (GEN_INT (extra));
3241 /* If we make space by pushing it, we might as well push
3242 the real data. Otherwise, we can leave OFFSET nonzero
3243 and leave the space uninitialized. */
3244 if (args_addr == 0)
3245 offset = 0;
3247 /* Now NOT_STACK gets the number of words that we don't need to
3248 allocate on the stack. */
3249 not_stack = partial - offset;
3251 /* If the partial register-part of the arg counts in its stack size,
3252 skip the part of stack space corresponding to the registers.
3253 Otherwise, start copying to the beginning of the stack space,
3254 by setting SKIP to 0. */
3255 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3257 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3258 x = validize_mem (force_const_mem (mode, x));
3260 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3261 SUBREGs of such registers are not allowed. */
3262 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3263 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3264 x = copy_to_reg (x);
3266 /* Loop over all the words allocated on the stack for this arg. */
3267 /* We can do it by words, because any scalar bigger than a word
3268 has a size a multiple of a word. */
3269 #ifndef PUSH_ARGS_REVERSED
3270 for (i = not_stack; i < size; i++)
3271 #else
3272 for (i = size - 1; i >= not_stack; i--)
3273 #endif
3274 if (i >= not_stack + offset)
3275 emit_push_insn (operand_subword_force (x, i, mode),
3276 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3277 0, args_addr,
3278 GEN_INT (args_offset + ((i - not_stack + skip)
3279 * UNITS_PER_WORD)),
3280 reg_parm_stack_space, alignment_pad);
3282 else
3284 rtx addr;
3285 rtx target = NULL_RTX;
3286 rtx dest;
3288 /* Push padding now if padding above and stack grows down,
3289 or if padding below and stack grows up.
3290 But if space already allocated, this has already been done. */
3291 if (extra && args_addr == 0
3292 && where_pad != none && where_pad != stack_direction)
3293 anti_adjust_stack (GEN_INT (extra));
3295 #ifdef PUSH_ROUNDING
3296 if (args_addr == 0 && PUSH_ARGS)
3298 addr = gen_push_operand ();
3299 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3301 else
3302 #endif
3304 if (GET_CODE (args_so_far) == CONST_INT)
3305 addr
3306 = memory_address (mode,
3307 plus_constant (args_addr,
3308 INTVAL (args_so_far)));
3309 else
3310 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3311 args_so_far));
3312 target = addr;
3315 dest = gen_rtx_MEM (mode, addr);
3316 if (type != 0)
3318 set_mem_attributes (dest, type, 1);
3319 /* Function incoming arguments may overlap with sibling call
3320 outgoing arguments and we cannot allow reordering of reads
3321 from function arguments with stores to outgoing arguments
3322 of sibling calls. */
3323 MEM_ALIAS_SET (dest) = 0;
3326 emit_move_insn (dest, x);
3328 if (current_function_check_memory_usage && ! in_check_memory_usage)
3330 in_check_memory_usage = 1;
3331 if (target == 0)
3332 target = get_push_address (GET_MODE_SIZE (mode));
3334 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3335 emit_library_call (chkr_copy_bitmap_libfunc,
3336 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3337 Pmode, XEXP (x, 0), Pmode,
3338 GEN_INT (GET_MODE_SIZE (mode)),
3339 TYPE_MODE (sizetype));
3340 else
3341 emit_library_call (chkr_set_right_libfunc,
3342 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3343 Pmode, GEN_INT (GET_MODE_SIZE (mode)),
3344 TYPE_MODE (sizetype),
3345 GEN_INT (MEMORY_USE_RW),
3346 TYPE_MODE (integer_type_node));
3347 in_check_memory_usage = 0;
3351 ret:
3352 /* If part should go in registers, copy that part
3353 into the appropriate registers. Do this now, at the end,
3354 since mem-to-mem copies above may do function calls. */
3355 if (partial > 0 && reg != 0)
3357 /* Handle calls that pass values in multiple non-contiguous locations.
3358 The Irix 6 ABI has examples of this. */
3359 if (GET_CODE (reg) == PARALLEL)
3360 emit_group_load (reg, x, -1, align); /* ??? size? */
3361 else
3362 move_block_to_reg (REGNO (reg), x, partial, mode);
3365 if (extra && args_addr == 0 && where_pad == stack_direction)
3366 anti_adjust_stack (GEN_INT (extra));
3368 if (alignment_pad && args_addr == 0)
3369 anti_adjust_stack (alignment_pad);
3372 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3373 operations. */
3375 static rtx
3376 get_subtarget (x)
3377 rtx x;
3379 return ((x == 0
3380 /* Only registers can be subtargets. */
3381 || GET_CODE (x) != REG
3382 /* If the register is readonly, it can't be set more than once. */
3383 || RTX_UNCHANGING_P (x)
3384 /* Don't use hard regs to avoid extending their life. */
3385 || REGNO (x) < FIRST_PSEUDO_REGISTER
3386 /* Avoid subtargets inside loops,
3387 since they hide some invariant expressions. */
3388 || preserve_subexpressions_p ())
3389 ? 0 : x);
3392 /* Expand an assignment that stores the value of FROM into TO.
3393 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3394 (This may contain a QUEUED rtx;
3395 if the value is constant, this rtx is a constant.)
3396 Otherwise, the returned value is NULL_RTX.
3398 SUGGEST_REG is no longer actually used.
3399 It used to mean, copy the value through a register
3400 and return that register, if that is possible.
3401 We now use WANT_VALUE to decide whether to do this. */
3404 expand_assignment (to, from, want_value, suggest_reg)
3405 tree to, from;
3406 int want_value;
3407 int suggest_reg ATTRIBUTE_UNUSED;
3409 register rtx to_rtx = 0;
3410 rtx result;
3412 /* Don't crash if the lhs of the assignment was erroneous. */
3414 if (TREE_CODE (to) == ERROR_MARK)
3416 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3417 return want_value ? result : NULL_RTX;
3420 /* Assignment of a structure component needs special treatment
3421 if the structure component's rtx is not simply a MEM.
3422 Assignment of an array element at a constant index, and assignment of
3423 an array element in an unaligned packed structure field, has the same
3424 problem. */
3426 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3427 || TREE_CODE (to) == ARRAY_REF)
3429 enum machine_mode mode1;
3430 HOST_WIDE_INT bitsize, bitpos;
3431 tree offset;
3432 int unsignedp;
3433 int volatilep = 0;
3434 tree tem;
3435 unsigned int alignment;
3437 push_temp_slots ();
3438 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3439 &unsignedp, &volatilep, &alignment);
3441 /* If we are going to use store_bit_field and extract_bit_field,
3442 make sure to_rtx will be safe for multiple use. */
3444 if (mode1 == VOIDmode && want_value)
3445 tem = stabilize_reference (tem);
3447 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3448 if (offset != 0)
3450 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3452 if (GET_CODE (to_rtx) != MEM)
3453 abort ();
3455 if (GET_MODE (offset_rtx) != ptr_mode)
3457 #ifdef POINTERS_EXTEND_UNSIGNED
3458 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
3459 #else
3460 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3461 #endif
3464 /* A constant address in TO_RTX can have VOIDmode, we must not try
3465 to call force_reg for that case. Avoid that case. */
3466 if (GET_CODE (to_rtx) == MEM
3467 && GET_MODE (to_rtx) == BLKmode
3468 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3469 && bitsize
3470 && (bitpos % bitsize) == 0
3471 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3472 && alignment == GET_MODE_ALIGNMENT (mode1))
3474 rtx temp = change_address (to_rtx, mode1,
3475 plus_constant (XEXP (to_rtx, 0),
3476 (bitpos /
3477 BITS_PER_UNIT)));
3478 if (GET_CODE (XEXP (temp, 0)) == REG)
3479 to_rtx = temp;
3480 else
3481 to_rtx = change_address (to_rtx, mode1,
3482 force_reg (GET_MODE (XEXP (temp, 0)),
3483 XEXP (temp, 0)));
3484 bitpos = 0;
3487 to_rtx = change_address (to_rtx, VOIDmode,
3488 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3489 force_reg (ptr_mode,
3490 offset_rtx)));
3493 if (volatilep)
3495 if (GET_CODE (to_rtx) == MEM)
3497 /* When the offset is zero, to_rtx is the address of the
3498 structure we are storing into, and hence may be shared.
3499 We must make a new MEM before setting the volatile bit. */
3500 if (offset == 0)
3501 to_rtx = copy_rtx (to_rtx);
3503 MEM_VOLATILE_P (to_rtx) = 1;
3505 #if 0 /* This was turned off because, when a field is volatile
3506 in an object which is not volatile, the object may be in a register,
3507 and then we would abort over here. */
3508 else
3509 abort ();
3510 #endif
3513 if (TREE_CODE (to) == COMPONENT_REF
3514 && TREE_READONLY (TREE_OPERAND (to, 1)))
3516 if (offset == 0)
3517 to_rtx = copy_rtx (to_rtx);
3519 RTX_UNCHANGING_P (to_rtx) = 1;
3522 /* Check the access. */
3523 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3525 rtx to_addr;
3526 int size;
3527 int best_mode_size;
3528 enum machine_mode best_mode;
3530 best_mode = get_best_mode (bitsize, bitpos,
3531 TYPE_ALIGN (TREE_TYPE (tem)),
3532 mode1, volatilep);
3533 if (best_mode == VOIDmode)
3534 best_mode = QImode;
3536 best_mode_size = GET_MODE_BITSIZE (best_mode);
3537 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3538 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3539 size *= GET_MODE_SIZE (best_mode);
3541 /* Check the access right of the pointer. */
3542 in_check_memory_usage = 1;
3543 if (size)
3544 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
3545 VOIDmode, 3, to_addr, Pmode,
3546 GEN_INT (size), TYPE_MODE (sizetype),
3547 GEN_INT (MEMORY_USE_WO),
3548 TYPE_MODE (integer_type_node));
3549 in_check_memory_usage = 0;
3552 /* If this is a varying-length object, we must get the address of
3553 the source and do an explicit block move. */
3554 if (bitsize < 0)
3556 unsigned int from_align;
3557 rtx from_rtx = expand_expr_unaligned (from, &from_align);
3558 rtx inner_to_rtx
3559 = change_address (to_rtx, VOIDmode,
3560 plus_constant (XEXP (to_rtx, 0),
3561 bitpos / BITS_PER_UNIT));
3563 emit_block_move (inner_to_rtx, from_rtx, expr_size (from),
3564 MIN (alignment, from_align));
3565 free_temp_slots ();
3566 pop_temp_slots ();
3567 return to_rtx;
3569 else
3571 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3572 (want_value
3573 /* Spurious cast for HPUX compiler. */
3574 ? ((enum machine_mode)
3575 TYPE_MODE (TREE_TYPE (to)))
3576 : VOIDmode),
3577 unsignedp,
3578 alignment,
3579 int_size_in_bytes (TREE_TYPE (tem)),
3580 get_alias_set (to));
3582 preserve_temp_slots (result);
3583 free_temp_slots ();
3584 pop_temp_slots ();
3586 /* If the value is meaningful, convert RESULT to the proper mode.
3587 Otherwise, return nothing. */
3588 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3589 TYPE_MODE (TREE_TYPE (from)),
3590 result,
3591 TREE_UNSIGNED (TREE_TYPE (to)))
3592 : NULL_RTX);
3596 /* If the rhs is a function call and its value is not an aggregate,
3597 call the function before we start to compute the lhs.
3598 This is needed for correct code for cases such as
3599 val = setjmp (buf) on machines where reference to val
3600 requires loading up part of an address in a separate insn.
3602 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3603 since it might be a promoted variable where the zero- or sign- extension
3604 needs to be done. Handling this in the normal way is safe because no
3605 computation is done before the call. */
3606 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3607 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3608 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3609 && GET_CODE (DECL_RTL (to)) == REG))
3611 rtx value;
3613 push_temp_slots ();
3614 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3615 if (to_rtx == 0)
3616 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3618 /* Handle calls that return values in multiple non-contiguous locations.
3619 The Irix 6 ABI has examples of this. */
3620 if (GET_CODE (to_rtx) == PARALLEL)
3621 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3622 TYPE_ALIGN (TREE_TYPE (from)));
3623 else if (GET_MODE (to_rtx) == BLKmode)
3624 emit_block_move (to_rtx, value, expr_size (from),
3625 TYPE_ALIGN (TREE_TYPE (from)));
3626 else
3628 #ifdef POINTERS_EXTEND_UNSIGNED
3629 if (TREE_CODE (TREE_TYPE (to)) == REFERENCE_TYPE
3630 || TREE_CODE (TREE_TYPE (to)) == POINTER_TYPE)
3631 value = convert_memory_address (GET_MODE (to_rtx), value);
3632 #endif
3633 emit_move_insn (to_rtx, value);
3635 preserve_temp_slots (to_rtx);
3636 free_temp_slots ();
3637 pop_temp_slots ();
3638 return want_value ? to_rtx : NULL_RTX;
3641 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3642 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3644 if (to_rtx == 0)
3646 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3647 if (GET_CODE (to_rtx) == MEM)
3648 MEM_ALIAS_SET (to_rtx) = get_alias_set (to);
3651 /* Don't move directly into a return register. */
3652 if (TREE_CODE (to) == RESULT_DECL
3653 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3655 rtx temp;
3657 push_temp_slots ();
3658 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3660 if (GET_CODE (to_rtx) == PARALLEL)
3661 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)),
3662 TYPE_ALIGN (TREE_TYPE (from)));
3663 else
3664 emit_move_insn (to_rtx, temp);
3666 preserve_temp_slots (to_rtx);
3667 free_temp_slots ();
3668 pop_temp_slots ();
3669 return want_value ? to_rtx : NULL_RTX;
3672 /* In case we are returning the contents of an object which overlaps
3673 the place the value is being stored, use a safe function when copying
3674 a value through a pointer into a structure value return block. */
3675 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3676 && current_function_returns_struct
3677 && !current_function_returns_pcc_struct)
3679 rtx from_rtx, size;
3681 push_temp_slots ();
3682 size = expr_size (from);
3683 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3684 EXPAND_MEMORY_USE_DONT);
3686 /* Copy the rights of the bitmap. */
3687 if (current_function_check_memory_usage)
3688 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
3689 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3690 XEXP (from_rtx, 0), Pmode,
3691 convert_to_mode (TYPE_MODE (sizetype),
3692 size, TREE_UNSIGNED (sizetype)),
3693 TYPE_MODE (sizetype));
3695 #ifdef TARGET_MEM_FUNCTIONS
3696 emit_library_call (memcpy_libfunc, LCT_NORMAL,
3697 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3698 XEXP (from_rtx, 0), Pmode,
3699 convert_to_mode (TYPE_MODE (sizetype),
3700 size, TREE_UNSIGNED (sizetype)),
3701 TYPE_MODE (sizetype));
3702 #else
3703 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3704 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3705 XEXP (to_rtx, 0), Pmode,
3706 convert_to_mode (TYPE_MODE (integer_type_node),
3707 size, TREE_UNSIGNED (integer_type_node)),
3708 TYPE_MODE (integer_type_node));
3709 #endif
3711 preserve_temp_slots (to_rtx);
3712 free_temp_slots ();
3713 pop_temp_slots ();
3714 return want_value ? to_rtx : NULL_RTX;
3717 /* Compute FROM and store the value in the rtx we got. */
3719 push_temp_slots ();
3720 result = store_expr (from, to_rtx, want_value);
3721 preserve_temp_slots (result);
3722 free_temp_slots ();
3723 pop_temp_slots ();
3724 return want_value ? result : NULL_RTX;
3727 /* Generate code for computing expression EXP,
3728 and storing the value into TARGET.
3729 TARGET may contain a QUEUED rtx.
3731 If WANT_VALUE is nonzero, return a copy of the value
3732 not in TARGET, so that we can be sure to use the proper
3733 value in a containing expression even if TARGET has something
3734 else stored in it. If possible, we copy the value through a pseudo
3735 and return that pseudo. Or, if the value is constant, we try to
3736 return the constant. In some cases, we return a pseudo
3737 copied *from* TARGET.
3739 If the mode is BLKmode then we may return TARGET itself.
3740 It turns out that in BLKmode it doesn't cause a problem.
3741 because C has no operators that could combine two different
3742 assignments into the same BLKmode object with different values
3743 with no sequence point. Will other languages need this to
3744 be more thorough?
3746 If WANT_VALUE is 0, we return NULL, to make sure
3747 to catch quickly any cases where the caller uses the value
3748 and fails to set WANT_VALUE. */
3751 store_expr (exp, target, want_value)
3752 register tree exp;
3753 register rtx target;
3754 int want_value;
3756 register rtx temp;
3757 int dont_return_target = 0;
3759 if (TREE_CODE (exp) == COMPOUND_EXPR)
3761 /* Perform first part of compound expression, then assign from second
3762 part. */
3763 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3764 emit_queue ();
3765 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3767 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3769 /* For conditional expression, get safe form of the target. Then
3770 test the condition, doing the appropriate assignment on either
3771 side. This avoids the creation of unnecessary temporaries.
3772 For non-BLKmode, it is more efficient not to do this. */
3774 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3776 emit_queue ();
3777 target = protect_from_queue (target, 1);
3779 do_pending_stack_adjust ();
3780 NO_DEFER_POP;
3781 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3782 start_cleanup_deferral ();
3783 store_expr (TREE_OPERAND (exp, 1), target, 0);
3784 end_cleanup_deferral ();
3785 emit_queue ();
3786 emit_jump_insn (gen_jump (lab2));
3787 emit_barrier ();
3788 emit_label (lab1);
3789 start_cleanup_deferral ();
3790 store_expr (TREE_OPERAND (exp, 2), target, 0);
3791 end_cleanup_deferral ();
3792 emit_queue ();
3793 emit_label (lab2);
3794 OK_DEFER_POP;
3796 return want_value ? target : NULL_RTX;
3798 else if (queued_subexp_p (target))
3799 /* If target contains a postincrement, let's not risk
3800 using it as the place to generate the rhs. */
3802 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3804 /* Expand EXP into a new pseudo. */
3805 temp = gen_reg_rtx (GET_MODE (target));
3806 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3808 else
3809 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3811 /* If target is volatile, ANSI requires accessing the value
3812 *from* the target, if it is accessed. So make that happen.
3813 In no case return the target itself. */
3814 if (! MEM_VOLATILE_P (target) && want_value)
3815 dont_return_target = 1;
3817 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3818 && GET_MODE (target) != BLKmode)
3819 /* If target is in memory and caller wants value in a register instead,
3820 arrange that. Pass TARGET as target for expand_expr so that,
3821 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3822 We know expand_expr will not use the target in that case.
3823 Don't do this if TARGET is volatile because we are supposed
3824 to write it and then read it. */
3826 temp = expand_expr (exp, target, GET_MODE (target), 0);
3827 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3828 temp = copy_to_reg (temp);
3829 dont_return_target = 1;
3831 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3832 /* If this is an scalar in a register that is stored in a wider mode
3833 than the declared mode, compute the result into its declared mode
3834 and then convert to the wider mode. Our value is the computed
3835 expression. */
3837 /* If we don't want a value, we can do the conversion inside EXP,
3838 which will often result in some optimizations. Do the conversion
3839 in two steps: first change the signedness, if needed, then
3840 the extend. But don't do this if the type of EXP is a subtype
3841 of something else since then the conversion might involve
3842 more than just converting modes. */
3843 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3844 && TREE_TYPE (TREE_TYPE (exp)) == 0)
3846 if (TREE_UNSIGNED (TREE_TYPE (exp))
3847 != SUBREG_PROMOTED_UNSIGNED_P (target))
3849 = convert
3850 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3851 TREE_TYPE (exp)),
3852 exp);
3854 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3855 SUBREG_PROMOTED_UNSIGNED_P (target)),
3856 exp);
3859 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3861 /* If TEMP is a volatile MEM and we want a result value, make
3862 the access now so it gets done only once. Likewise if
3863 it contains TARGET. */
3864 if (GET_CODE (temp) == MEM && want_value
3865 && (MEM_VOLATILE_P (temp)
3866 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
3867 temp = copy_to_reg (temp);
3869 /* If TEMP is a VOIDmode constant, use convert_modes to make
3870 sure that we properly convert it. */
3871 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3872 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3873 TYPE_MODE (TREE_TYPE (exp)), temp,
3874 SUBREG_PROMOTED_UNSIGNED_P (target));
3876 convert_move (SUBREG_REG (target), temp,
3877 SUBREG_PROMOTED_UNSIGNED_P (target));
3879 /* If we promoted a constant, change the mode back down to match
3880 target. Otherwise, the caller might get confused by a result whose
3881 mode is larger than expected. */
3883 if (want_value && GET_MODE (temp) != GET_MODE (target)
3884 && GET_MODE (temp) != VOIDmode)
3886 temp = gen_rtx_SUBREG (GET_MODE (target), temp, 0);
3887 SUBREG_PROMOTED_VAR_P (temp) = 1;
3888 SUBREG_PROMOTED_UNSIGNED_P (temp)
3889 = SUBREG_PROMOTED_UNSIGNED_P (target);
3892 return want_value ? temp : NULL_RTX;
3894 else
3896 temp = expand_expr (exp, target, GET_MODE (target), 0);
3897 /* Return TARGET if it's a specified hardware register.
3898 If TARGET is a volatile mem ref, either return TARGET
3899 or return a reg copied *from* TARGET; ANSI requires this.
3901 Otherwise, if TEMP is not TARGET, return TEMP
3902 if it is constant (for efficiency),
3903 or if we really want the correct value. */
3904 if (!(target && GET_CODE (target) == REG
3905 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3906 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3907 && ! rtx_equal_p (temp, target)
3908 && (CONSTANT_P (temp) || want_value))
3909 dont_return_target = 1;
3912 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3913 the same as that of TARGET, adjust the constant. This is needed, for
3914 example, in case it is a CONST_DOUBLE and we want only a word-sized
3915 value. */
3916 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3917 && TREE_CODE (exp) != ERROR_MARK
3918 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3919 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3920 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3922 if (current_function_check_memory_usage
3923 && GET_CODE (target) == MEM
3924 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
3926 in_check_memory_usage = 1;
3927 if (GET_CODE (temp) == MEM)
3928 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
3929 VOIDmode, 3, XEXP (target, 0), Pmode,
3930 XEXP (temp, 0), Pmode,
3931 expr_size (exp), TYPE_MODE (sizetype));
3932 else
3933 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
3934 VOIDmode, 3, XEXP (target, 0), Pmode,
3935 expr_size (exp), TYPE_MODE (sizetype),
3936 GEN_INT (MEMORY_USE_WO),
3937 TYPE_MODE (integer_type_node));
3938 in_check_memory_usage = 0;
3941 /* If value was not generated in the target, store it there.
3942 Convert the value to TARGET's type first if nec. */
3943 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
3944 one or both of them are volatile memory refs, we have to distinguish
3945 two cases:
3946 - expand_expr has used TARGET. In this case, we must not generate
3947 another copy. This can be detected by TARGET being equal according
3948 to == .
3949 - expand_expr has not used TARGET - that means that the source just
3950 happens to have the same RTX form. Since temp will have been created
3951 by expand_expr, it will compare unequal according to == .
3952 We must generate a copy in this case, to reach the correct number
3953 of volatile memory references. */
3955 if ((! rtx_equal_p (temp, target)
3956 || (temp != target && (side_effects_p (temp)
3957 || side_effects_p (target))))
3958 && TREE_CODE (exp) != ERROR_MARK)
3960 target = protect_from_queue (target, 1);
3961 if (GET_MODE (temp) != GET_MODE (target)
3962 && GET_MODE (temp) != VOIDmode)
3964 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3965 if (dont_return_target)
3967 /* In this case, we will return TEMP,
3968 so make sure it has the proper mode.
3969 But don't forget to store the value into TARGET. */
3970 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3971 emit_move_insn (target, temp);
3973 else
3974 convert_move (target, temp, unsignedp);
3977 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3979 /* Handle copying a string constant into an array.
3980 The string constant may be shorter than the array.
3981 So copy just the string's actual length, and clear the rest. */
3982 rtx size;
3983 rtx addr;
3985 /* Get the size of the data type of the string,
3986 which is actually the size of the target. */
3987 size = expr_size (exp);
3988 if (GET_CODE (size) == CONST_INT
3989 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3990 emit_block_move (target, temp, size, TYPE_ALIGN (TREE_TYPE (exp)));
3991 else
3993 /* Compute the size of the data to copy from the string. */
3994 tree copy_size
3995 = size_binop (MIN_EXPR,
3996 make_tree (sizetype, size),
3997 size_int (TREE_STRING_LENGTH (exp)));
3998 unsigned int align = TYPE_ALIGN (TREE_TYPE (exp));
3999 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
4000 VOIDmode, 0);
4001 rtx label = 0;
4003 /* Copy that much. */
4004 emit_block_move (target, temp, copy_size_rtx,
4005 TYPE_ALIGN (TREE_TYPE (exp)));
4007 /* Figure out how much is left in TARGET that we have to clear.
4008 Do all calculations in ptr_mode. */
4010 addr = XEXP (target, 0);
4011 addr = convert_modes (ptr_mode, Pmode, addr, 1);
4013 if (GET_CODE (copy_size_rtx) == CONST_INT)
4015 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
4016 size = plus_constant (size, -TREE_STRING_LENGTH (exp));
4017 align = MIN (align, (BITS_PER_UNIT
4018 * (INTVAL (copy_size_rtx)
4019 & - INTVAL (copy_size_rtx))));
4021 else
4023 addr = force_reg (ptr_mode, addr);
4024 addr = expand_binop (ptr_mode, add_optab, addr,
4025 copy_size_rtx, NULL_RTX, 0,
4026 OPTAB_LIB_WIDEN);
4028 size = expand_binop (ptr_mode, sub_optab, size,
4029 copy_size_rtx, NULL_RTX, 0,
4030 OPTAB_LIB_WIDEN);
4032 align = BITS_PER_UNIT;
4033 label = gen_label_rtx ();
4034 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4035 GET_MODE (size), 0, 0, label);
4037 align = MIN (align, expr_align (copy_size));
4039 if (size != const0_rtx)
4041 rtx dest = gen_rtx_MEM (BLKmode, addr);
4043 MEM_COPY_ATTRIBUTES (dest, target);
4045 /* Be sure we can write on ADDR. */
4046 in_check_memory_usage = 1;
4047 if (current_function_check_memory_usage)
4048 emit_library_call (chkr_check_addr_libfunc,
4049 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
4050 addr, Pmode,
4051 size, TYPE_MODE (sizetype),
4052 GEN_INT (MEMORY_USE_WO),
4053 TYPE_MODE (integer_type_node));
4054 in_check_memory_usage = 0;
4055 clear_storage (dest, size, align);
4058 if (label)
4059 emit_label (label);
4062 /* Handle calls that return values in multiple non-contiguous locations.
4063 The Irix 6 ABI has examples of this. */
4064 else if (GET_CODE (target) == PARALLEL)
4065 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
4066 TYPE_ALIGN (TREE_TYPE (exp)));
4067 else if (GET_MODE (temp) == BLKmode)
4068 emit_block_move (target, temp, expr_size (exp),
4069 TYPE_ALIGN (TREE_TYPE (exp)));
4070 else
4071 emit_move_insn (target, temp);
4074 /* If we don't want a value, return NULL_RTX. */
4075 if (! want_value)
4076 return NULL_RTX;
4078 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4079 ??? The latter test doesn't seem to make sense. */
4080 else if (dont_return_target && GET_CODE (temp) != MEM)
4081 return temp;
4083 /* Return TARGET itself if it is a hard register. */
4084 else if (want_value && GET_MODE (target) != BLKmode
4085 && ! (GET_CODE (target) == REG
4086 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4087 return copy_to_reg (target);
4089 else
4090 return target;
4093 /* Return 1 if EXP just contains zeros. */
4095 static int
4096 is_zeros_p (exp)
4097 tree exp;
4099 tree elt;
4101 switch (TREE_CODE (exp))
4103 case CONVERT_EXPR:
4104 case NOP_EXPR:
4105 case NON_LVALUE_EXPR:
4106 return is_zeros_p (TREE_OPERAND (exp, 0));
4108 case INTEGER_CST:
4109 return integer_zerop (exp);
4111 case COMPLEX_CST:
4112 return
4113 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4115 case REAL_CST:
4116 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4118 case CONSTRUCTOR:
4119 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4120 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4121 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4122 if (! is_zeros_p (TREE_VALUE (elt)))
4123 return 0;
4125 return 1;
4127 default:
4128 return 0;
4132 /* Return 1 if EXP contains mostly (3/4) zeros. */
4134 static int
4135 mostly_zeros_p (exp)
4136 tree exp;
4138 if (TREE_CODE (exp) == CONSTRUCTOR)
4140 int elts = 0, zeros = 0;
4141 tree elt = CONSTRUCTOR_ELTS (exp);
4142 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4144 /* If there are no ranges of true bits, it is all zero. */
4145 return elt == NULL_TREE;
4147 for (; elt; elt = TREE_CHAIN (elt))
4149 /* We do not handle the case where the index is a RANGE_EXPR,
4150 so the statistic will be somewhat inaccurate.
4151 We do make a more accurate count in store_constructor itself,
4152 so since this function is only used for nested array elements,
4153 this should be close enough. */
4154 if (mostly_zeros_p (TREE_VALUE (elt)))
4155 zeros++;
4156 elts++;
4159 return 4 * zeros >= 3 * elts;
4162 return is_zeros_p (exp);
4165 /* Helper function for store_constructor.
4166 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4167 TYPE is the type of the CONSTRUCTOR, not the element type.
4168 ALIGN and CLEARED are as for store_constructor.
4169 ALIAS_SET is the alias set to use for any stores.
4171 This provides a recursive shortcut back to store_constructor when it isn't
4172 necessary to go through store_field. This is so that we can pass through
4173 the cleared field to let store_constructor know that we may not have to
4174 clear a substructure if the outer structure has already been cleared. */
4176 static void
4177 store_constructor_field (target, bitsize, bitpos,
4178 mode, exp, type, align, cleared, alias_set)
4179 rtx target;
4180 unsigned HOST_WIDE_INT bitsize;
4181 HOST_WIDE_INT bitpos;
4182 enum machine_mode mode;
4183 tree exp, type;
4184 unsigned int align;
4185 int cleared;
4186 int alias_set;
4188 if (TREE_CODE (exp) == CONSTRUCTOR
4189 && bitpos % BITS_PER_UNIT == 0
4190 /* If we have a non-zero bitpos for a register target, then we just
4191 let store_field do the bitfield handling. This is unlikely to
4192 generate unnecessary clear instructions anyways. */
4193 && (bitpos == 0 || GET_CODE (target) == MEM))
4195 if (bitpos != 0)
4196 target
4197 = change_address (target,
4198 GET_MODE (target) == BLKmode
4199 || 0 != (bitpos
4200 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4201 ? BLKmode : VOIDmode,
4202 plus_constant (XEXP (target, 0),
4203 bitpos / BITS_PER_UNIT));
4205 if (GET_CODE (target) == MEM)
4206 MEM_ALIAS_SET (target) = alias_set;
4207 store_constructor (exp, target, align, cleared, bitsize / BITS_PER_UNIT);
4209 else
4210 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, align,
4211 int_size_in_bytes (type), alias_set);
4214 /* Store the value of constructor EXP into the rtx TARGET.
4215 TARGET is either a REG or a MEM.
4216 ALIGN is the maximum known alignment for TARGET.
4217 CLEARED is true if TARGET is known to have been zero'd.
4218 SIZE is the number of bytes of TARGET we are allowed to modify: this
4219 may not be the same as the size of EXP if we are assigning to a field
4220 which has been packed to exclude padding bits. */
4222 static void
4223 store_constructor (exp, target, align, cleared, size)
4224 tree exp;
4225 rtx target;
4226 unsigned int align;
4227 int cleared;
4228 HOST_WIDE_INT size;
4230 tree type = TREE_TYPE (exp);
4231 #ifdef WORD_REGISTER_OPERATIONS
4232 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4233 #endif
4235 /* We know our target cannot conflict, since safe_from_p has been called. */
4236 #if 0
4237 /* Don't try copying piece by piece into a hard register
4238 since that is vulnerable to being clobbered by EXP.
4239 Instead, construct in a pseudo register and then copy it all. */
4240 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
4242 rtx temp = gen_reg_rtx (GET_MODE (target));
4243 store_constructor (exp, temp, align, cleared, size);
4244 emit_move_insn (target, temp);
4245 return;
4247 #endif
4249 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4250 || TREE_CODE (type) == QUAL_UNION_TYPE)
4252 register tree elt;
4254 /* Inform later passes that the whole union value is dead. */
4255 if ((TREE_CODE (type) == UNION_TYPE
4256 || TREE_CODE (type) == QUAL_UNION_TYPE)
4257 && ! cleared)
4259 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4261 /* If the constructor is empty, clear the union. */
4262 if (! CONSTRUCTOR_ELTS (exp) && ! cleared)
4263 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
4266 /* If we are building a static constructor into a register,
4267 set the initial value as zero so we can fold the value into
4268 a constant. But if more than one register is involved,
4269 this probably loses. */
4270 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4271 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4273 if (! cleared)
4274 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4276 cleared = 1;
4279 /* If the constructor has fewer fields than the structure
4280 or if we are initializing the structure to mostly zeros,
4281 clear the whole structure first. Don't do this is TARGET is
4282 register whose mode size isn't equal to SIZE since clear_storage
4283 can't handle this case. */
4284 else if (size > 0
4285 && ((list_length (CONSTRUCTOR_ELTS (exp))
4286 != fields_length (type))
4287 || mostly_zeros_p (exp))
4288 && (GET_CODE (target) != REG
4289 || GET_MODE_SIZE (GET_MODE (target)) == size))
4291 if (! cleared)
4292 clear_storage (target, GEN_INT (size), align);
4294 cleared = 1;
4296 else if (! cleared)
4297 /* Inform later passes that the old value is dead. */
4298 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4300 /* Store each element of the constructor into
4301 the corresponding field of TARGET. */
4303 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4305 register tree field = TREE_PURPOSE (elt);
4306 #ifdef WORD_REGISTER_OPERATIONS
4307 tree value = TREE_VALUE (elt);
4308 #endif
4309 register enum machine_mode mode;
4310 HOST_WIDE_INT bitsize;
4311 HOST_WIDE_INT bitpos = 0;
4312 int unsignedp;
4313 tree offset;
4314 rtx to_rtx = target;
4316 /* Just ignore missing fields.
4317 We cleared the whole structure, above,
4318 if any fields are missing. */
4319 if (field == 0)
4320 continue;
4322 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4323 continue;
4325 if (host_integerp (DECL_SIZE (field), 1))
4326 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4327 else
4328 bitsize = -1;
4330 unsignedp = TREE_UNSIGNED (field);
4331 mode = DECL_MODE (field);
4332 if (DECL_BIT_FIELD (field))
4333 mode = VOIDmode;
4335 offset = DECL_FIELD_OFFSET (field);
4336 if (host_integerp (offset, 0)
4337 && host_integerp (bit_position (field), 0))
4339 bitpos = int_bit_position (field);
4340 offset = 0;
4342 else
4343 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4345 if (offset)
4347 rtx offset_rtx;
4349 if (contains_placeholder_p (offset))
4350 offset = build (WITH_RECORD_EXPR, sizetype,
4351 offset, make_tree (TREE_TYPE (exp), target));
4353 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4354 if (GET_CODE (to_rtx) != MEM)
4355 abort ();
4357 if (GET_MODE (offset_rtx) != ptr_mode)
4359 #ifdef POINTERS_EXTEND_UNSIGNED
4360 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
4361 #else
4362 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4363 #endif
4366 to_rtx
4367 = change_address (to_rtx, VOIDmode,
4368 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
4369 force_reg (ptr_mode,
4370 offset_rtx)));
4371 align = DECL_OFFSET_ALIGN (field);
4374 if (TREE_READONLY (field))
4376 if (GET_CODE (to_rtx) == MEM)
4377 to_rtx = copy_rtx (to_rtx);
4379 RTX_UNCHANGING_P (to_rtx) = 1;
4382 #ifdef WORD_REGISTER_OPERATIONS
4383 /* If this initializes a field that is smaller than a word, at the
4384 start of a word, try to widen it to a full word.
4385 This special case allows us to output C++ member function
4386 initializations in a form that the optimizers can understand. */
4387 if (GET_CODE (target) == REG
4388 && bitsize < BITS_PER_WORD
4389 && bitpos % BITS_PER_WORD == 0
4390 && GET_MODE_CLASS (mode) == MODE_INT
4391 && TREE_CODE (value) == INTEGER_CST
4392 && exp_size >= 0
4393 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4395 tree type = TREE_TYPE (value);
4396 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4398 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4399 value = convert (type, value);
4401 if (BYTES_BIG_ENDIAN)
4402 value
4403 = fold (build (LSHIFT_EXPR, type, value,
4404 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4405 bitsize = BITS_PER_WORD;
4406 mode = word_mode;
4408 #endif
4409 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4410 TREE_VALUE (elt), type, align, cleared,
4411 (DECL_NONADDRESSABLE_P (field)
4412 && GET_CODE (to_rtx) == MEM)
4413 ? MEM_ALIAS_SET (to_rtx)
4414 : get_alias_set (TREE_TYPE (field)));
4417 else if (TREE_CODE (type) == ARRAY_TYPE)
4419 register tree elt;
4420 register int i;
4421 int need_to_clear;
4422 tree domain = TYPE_DOMAIN (type);
4423 tree elttype = TREE_TYPE (type);
4424 int const_bounds_p = (host_integerp (TYPE_MIN_VALUE (domain), 0)
4425 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4426 HOST_WIDE_INT minelt;
4427 HOST_WIDE_INT maxelt;
4429 /* If we have constant bounds for the range of the type, get them. */
4430 if (const_bounds_p)
4432 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4433 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4436 /* If the constructor has fewer elements than the array,
4437 clear the whole array first. Similarly if this is
4438 static constructor of a non-BLKmode object. */
4439 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4440 need_to_clear = 1;
4441 else
4443 HOST_WIDE_INT count = 0, zero_count = 0;
4444 need_to_clear = ! const_bounds_p;
4446 /* This loop is a more accurate version of the loop in
4447 mostly_zeros_p (it handles RANGE_EXPR in an index).
4448 It is also needed to check for missing elements. */
4449 for (elt = CONSTRUCTOR_ELTS (exp);
4450 elt != NULL_TREE && ! need_to_clear;
4451 elt = TREE_CHAIN (elt))
4453 tree index = TREE_PURPOSE (elt);
4454 HOST_WIDE_INT this_node_count;
4456 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4458 tree lo_index = TREE_OPERAND (index, 0);
4459 tree hi_index = TREE_OPERAND (index, 1);
4461 if (! host_integerp (lo_index, 1)
4462 || ! host_integerp (hi_index, 1))
4464 need_to_clear = 1;
4465 break;
4468 this_node_count = (tree_low_cst (hi_index, 1)
4469 - tree_low_cst (lo_index, 1) + 1);
4471 else
4472 this_node_count = 1;
4474 count += this_node_count;
4475 if (mostly_zeros_p (TREE_VALUE (elt)))
4476 zero_count += this_node_count;
4479 /* Clear the entire array first if there are any missing elements,
4480 or if the incidence of zero elements is >= 75%. */
4481 if (! need_to_clear
4482 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4483 need_to_clear = 1;
4486 if (need_to_clear && size > 0)
4488 if (! cleared)
4489 clear_storage (target, GEN_INT (size), align);
4490 cleared = 1;
4492 else
4493 /* Inform later passes that the old value is dead. */
4494 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4496 /* Store each element of the constructor into
4497 the corresponding element of TARGET, determined
4498 by counting the elements. */
4499 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4500 elt;
4501 elt = TREE_CHAIN (elt), i++)
4503 register enum machine_mode mode;
4504 HOST_WIDE_INT bitsize;
4505 HOST_WIDE_INT bitpos;
4506 int unsignedp;
4507 tree value = TREE_VALUE (elt);
4508 unsigned int align = TYPE_ALIGN (TREE_TYPE (value));
4509 tree index = TREE_PURPOSE (elt);
4510 rtx xtarget = target;
4512 if (cleared && is_zeros_p (value))
4513 continue;
4515 unsignedp = TREE_UNSIGNED (elttype);
4516 mode = TYPE_MODE (elttype);
4517 if (mode == BLKmode)
4518 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4519 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4520 : -1);
4521 else
4522 bitsize = GET_MODE_BITSIZE (mode);
4524 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4526 tree lo_index = TREE_OPERAND (index, 0);
4527 tree hi_index = TREE_OPERAND (index, 1);
4528 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4529 struct nesting *loop;
4530 HOST_WIDE_INT lo, hi, count;
4531 tree position;
4533 /* If the range is constant and "small", unroll the loop. */
4534 if (const_bounds_p
4535 && host_integerp (lo_index, 0)
4536 && host_integerp (hi_index, 0)
4537 && (lo = tree_low_cst (lo_index, 0),
4538 hi = tree_low_cst (hi_index, 0),
4539 count = hi - lo + 1,
4540 (GET_CODE (target) != MEM
4541 || count <= 2
4542 || (host_integerp (TYPE_SIZE (elttype), 1)
4543 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4544 <= 40 * 8)))))
4546 lo -= minelt; hi -= minelt;
4547 for (; lo <= hi; lo++)
4549 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4550 store_constructor_field
4551 (target, bitsize, bitpos, mode, value, type, align,
4552 cleared,
4553 TYPE_NONALIASED_COMPONENT (type)
4554 ? MEM_ALIAS_SET (target) : get_alias_set (elttype));
4557 else
4559 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4560 loop_top = gen_label_rtx ();
4561 loop_end = gen_label_rtx ();
4563 unsignedp = TREE_UNSIGNED (domain);
4565 index = build_decl (VAR_DECL, NULL_TREE, domain);
4567 DECL_RTL (index) = index_r
4568 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4569 &unsignedp, 0));
4571 if (TREE_CODE (value) == SAVE_EXPR
4572 && SAVE_EXPR_RTL (value) == 0)
4574 /* Make sure value gets expanded once before the
4575 loop. */
4576 expand_expr (value, const0_rtx, VOIDmode, 0);
4577 emit_queue ();
4579 store_expr (lo_index, index_r, 0);
4580 loop = expand_start_loop (0);
4582 /* Assign value to element index. */
4583 position
4584 = convert (ssizetype,
4585 fold (build (MINUS_EXPR, TREE_TYPE (index),
4586 index, TYPE_MIN_VALUE (domain))));
4587 position = size_binop (MULT_EXPR, position,
4588 convert (ssizetype,
4589 TYPE_SIZE_UNIT (elttype)));
4591 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4592 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4593 xtarget = change_address (target, mode, addr);
4594 if (TREE_CODE (value) == CONSTRUCTOR)
4595 store_constructor (value, xtarget, align, cleared,
4596 bitsize / BITS_PER_UNIT);
4597 else
4598 store_expr (value, xtarget, 0);
4600 expand_exit_loop_if_false (loop,
4601 build (LT_EXPR, integer_type_node,
4602 index, hi_index));
4604 expand_increment (build (PREINCREMENT_EXPR,
4605 TREE_TYPE (index),
4606 index, integer_one_node), 0, 0);
4607 expand_end_loop ();
4608 emit_label (loop_end);
4611 else if ((index != 0 && ! host_integerp (index, 0))
4612 || ! host_integerp (TYPE_SIZE (elttype), 1))
4614 rtx pos_rtx, addr;
4615 tree position;
4617 if (index == 0)
4618 index = ssize_int (1);
4620 if (minelt)
4621 index = convert (ssizetype,
4622 fold (build (MINUS_EXPR, index,
4623 TYPE_MIN_VALUE (domain))));
4625 position = size_binop (MULT_EXPR, index,
4626 convert (ssizetype,
4627 TYPE_SIZE_UNIT (elttype)));
4628 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4629 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4630 xtarget = change_address (target, mode, addr);
4631 store_expr (value, xtarget, 0);
4633 else
4635 if (index != 0)
4636 bitpos = ((tree_low_cst (index, 0) - minelt)
4637 * tree_low_cst (TYPE_SIZE (elttype), 1));
4638 else
4639 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4641 store_constructor_field (target, bitsize, bitpos, mode, value,
4642 type, align, cleared,
4643 TYPE_NONALIASED_COMPONENT (type)
4644 ? MEM_ALIAS_SET (target) :
4645 get_alias_set (elttype));
4651 /* Set constructor assignments. */
4652 else if (TREE_CODE (type) == SET_TYPE)
4654 tree elt = CONSTRUCTOR_ELTS (exp);
4655 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4656 tree domain = TYPE_DOMAIN (type);
4657 tree domain_min, domain_max, bitlength;
4659 /* The default implementation strategy is to extract the constant
4660 parts of the constructor, use that to initialize the target,
4661 and then "or" in whatever non-constant ranges we need in addition.
4663 If a large set is all zero or all ones, it is
4664 probably better to set it using memset (if available) or bzero.
4665 Also, if a large set has just a single range, it may also be
4666 better to first clear all the first clear the set (using
4667 bzero/memset), and set the bits we want. */
4669 /* Check for all zeros. */
4670 if (elt == NULL_TREE && size > 0)
4672 if (!cleared)
4673 clear_storage (target, GEN_INT (size), TYPE_ALIGN (type));
4674 return;
4677 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4678 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4679 bitlength = size_binop (PLUS_EXPR,
4680 size_diffop (domain_max, domain_min),
4681 ssize_int (1));
4683 nbits = tree_low_cst (bitlength, 1);
4685 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4686 are "complicated" (more than one range), initialize (the
4687 constant parts) by copying from a constant. */
4688 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4689 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4691 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4692 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4693 char *bit_buffer = (char *) alloca (nbits);
4694 HOST_WIDE_INT word = 0;
4695 unsigned int bit_pos = 0;
4696 unsigned int ibit = 0;
4697 unsigned int offset = 0; /* In bytes from beginning of set. */
4699 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4700 for (;;)
4702 if (bit_buffer[ibit])
4704 if (BYTES_BIG_ENDIAN)
4705 word |= (1 << (set_word_size - 1 - bit_pos));
4706 else
4707 word |= 1 << bit_pos;
4710 bit_pos++; ibit++;
4711 if (bit_pos >= set_word_size || ibit == nbits)
4713 if (word != 0 || ! cleared)
4715 rtx datum = GEN_INT (word);
4716 rtx to_rtx;
4718 /* The assumption here is that it is safe to use
4719 XEXP if the set is multi-word, but not if
4720 it's single-word. */
4721 if (GET_CODE (target) == MEM)
4723 to_rtx = plus_constant (XEXP (target, 0), offset);
4724 to_rtx = change_address (target, mode, to_rtx);
4726 else if (offset == 0)
4727 to_rtx = target;
4728 else
4729 abort ();
4730 emit_move_insn (to_rtx, datum);
4733 if (ibit == nbits)
4734 break;
4735 word = 0;
4736 bit_pos = 0;
4737 offset += set_word_size / BITS_PER_UNIT;
4741 else if (!cleared)
4742 /* Don't bother clearing storage if the set is all ones. */
4743 if (TREE_CHAIN (elt) != NULL_TREE
4744 || (TREE_PURPOSE (elt) == NULL_TREE
4745 ? nbits != 1
4746 : ( ! host_integerp (TREE_VALUE (elt), 0)
4747 || ! host_integerp (TREE_PURPOSE (elt), 0)
4748 || (tree_low_cst (TREE_VALUE (elt), 0)
4749 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
4750 != (HOST_WIDE_INT) nbits))))
4751 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
4753 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4755 /* Start of range of element or NULL. */
4756 tree startbit = TREE_PURPOSE (elt);
4757 /* End of range of element, or element value. */
4758 tree endbit = TREE_VALUE (elt);
4759 #ifdef TARGET_MEM_FUNCTIONS
4760 HOST_WIDE_INT startb, endb;
4761 #endif
4762 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4764 bitlength_rtx = expand_expr (bitlength,
4765 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4767 /* Handle non-range tuple element like [ expr ]. */
4768 if (startbit == NULL_TREE)
4770 startbit = save_expr (endbit);
4771 endbit = startbit;
4774 startbit = convert (sizetype, startbit);
4775 endbit = convert (sizetype, endbit);
4776 if (! integer_zerop (domain_min))
4778 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4779 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4781 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4782 EXPAND_CONST_ADDRESS);
4783 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4784 EXPAND_CONST_ADDRESS);
4786 if (REG_P (target))
4788 targetx = assign_stack_temp (GET_MODE (target),
4789 GET_MODE_SIZE (GET_MODE (target)),
4791 emit_move_insn (targetx, target);
4794 else if (GET_CODE (target) == MEM)
4795 targetx = target;
4796 else
4797 abort ();
4799 #ifdef TARGET_MEM_FUNCTIONS
4800 /* Optimization: If startbit and endbit are
4801 constants divisible by BITS_PER_UNIT,
4802 call memset instead. */
4803 if (TREE_CODE (startbit) == INTEGER_CST
4804 && TREE_CODE (endbit) == INTEGER_CST
4805 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4806 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4808 emit_library_call (memset_libfunc, LCT_NORMAL,
4809 VOIDmode, 3,
4810 plus_constant (XEXP (targetx, 0),
4811 startb / BITS_PER_UNIT),
4812 Pmode,
4813 constm1_rtx, TYPE_MODE (integer_type_node),
4814 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4815 TYPE_MODE (sizetype));
4817 else
4818 #endif
4819 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4820 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
4821 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
4822 startbit_rtx, TYPE_MODE (sizetype),
4823 endbit_rtx, TYPE_MODE (sizetype));
4825 if (REG_P (target))
4826 emit_move_insn (target, targetx);
4830 else
4831 abort ();
4834 /* Store the value of EXP (an expression tree)
4835 into a subfield of TARGET which has mode MODE and occupies
4836 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4837 If MODE is VOIDmode, it means that we are storing into a bit-field.
4839 If VALUE_MODE is VOIDmode, return nothing in particular.
4840 UNSIGNEDP is not used in this case.
4842 Otherwise, return an rtx for the value stored. This rtx
4843 has mode VALUE_MODE if that is convenient to do.
4844 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4846 ALIGN is the alignment that TARGET is known to have.
4847 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
4849 ALIAS_SET is the alias set for the destination. This value will
4850 (in general) be different from that for TARGET, since TARGET is a
4851 reference to the containing structure. */
4853 static rtx
4854 store_field (target, bitsize, bitpos, mode, exp, value_mode,
4855 unsignedp, align, total_size, alias_set)
4856 rtx target;
4857 HOST_WIDE_INT bitsize;
4858 HOST_WIDE_INT bitpos;
4859 enum machine_mode mode;
4860 tree exp;
4861 enum machine_mode value_mode;
4862 int unsignedp;
4863 unsigned int align;
4864 HOST_WIDE_INT total_size;
4865 int alias_set;
4867 HOST_WIDE_INT width_mask = 0;
4869 if (TREE_CODE (exp) == ERROR_MARK)
4870 return const0_rtx;
4872 if (bitsize < HOST_BITS_PER_WIDE_INT)
4873 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
4875 /* If we are storing into an unaligned field of an aligned union that is
4876 in a register, we may have the mode of TARGET being an integer mode but
4877 MODE == BLKmode. In that case, get an aligned object whose size and
4878 alignment are the same as TARGET and store TARGET into it (we can avoid
4879 the store if the field being stored is the entire width of TARGET). Then
4880 call ourselves recursively to store the field into a BLKmode version of
4881 that object. Finally, load from the object into TARGET. This is not
4882 very efficient in general, but should only be slightly more expensive
4883 than the otherwise-required unaligned accesses. Perhaps this can be
4884 cleaned up later. */
4886 if (mode == BLKmode
4887 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4889 rtx object = assign_stack_temp (GET_MODE (target),
4890 GET_MODE_SIZE (GET_MODE (target)), 0);
4891 rtx blk_object = copy_rtx (object);
4893 MEM_SET_IN_STRUCT_P (object, 1);
4894 MEM_SET_IN_STRUCT_P (blk_object, 1);
4895 PUT_MODE (blk_object, BLKmode);
4897 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4898 emit_move_insn (object, target);
4900 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4901 align, total_size, alias_set);
4903 /* Even though we aren't returning target, we need to
4904 give it the updated value. */
4905 emit_move_insn (target, object);
4907 return blk_object;
4910 if (GET_CODE (target) == CONCAT)
4912 /* We're storing into a struct containing a single __complex. */
4914 if (bitpos != 0)
4915 abort ();
4916 return store_expr (exp, target, 0);
4919 /* If the structure is in a register or if the component
4920 is a bit field, we cannot use addressing to access it.
4921 Use bit-field techniques or SUBREG to store in it. */
4923 if (mode == VOIDmode
4924 || (mode != BLKmode && ! direct_store[(int) mode]
4925 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
4926 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
4927 || GET_CODE (target) == REG
4928 || GET_CODE (target) == SUBREG
4929 /* If the field isn't aligned enough to store as an ordinary memref,
4930 store it as a bit field. */
4931 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
4932 && (align < GET_MODE_ALIGNMENT (mode)
4933 || bitpos % GET_MODE_ALIGNMENT (mode)))
4934 || (mode == BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
4935 && (TYPE_ALIGN (TREE_TYPE (exp)) > align
4936 || bitpos % TYPE_ALIGN (TREE_TYPE (exp)) != 0))
4937 /* If the RHS and field are a constant size and the size of the
4938 RHS isn't the same size as the bitfield, we must use bitfield
4939 operations. */
4940 || (bitsize >= 0
4941 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
4942 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
4944 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4946 /* If BITSIZE is narrower than the size of the type of EXP
4947 we will be narrowing TEMP. Normally, what's wanted are the
4948 low-order bits. However, if EXP's type is a record and this is
4949 big-endian machine, we want the upper BITSIZE bits. */
4950 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4951 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4952 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4953 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4954 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4955 - bitsize),
4956 temp, 1);
4958 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4959 MODE. */
4960 if (mode != VOIDmode && mode != BLKmode
4961 && mode != TYPE_MODE (TREE_TYPE (exp)))
4962 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4964 /* If the modes of TARGET and TEMP are both BLKmode, both
4965 must be in memory and BITPOS must be aligned on a byte
4966 boundary. If so, we simply do a block copy. */
4967 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4969 unsigned int exp_align = expr_align (exp);
4971 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4972 || bitpos % BITS_PER_UNIT != 0)
4973 abort ();
4975 target = change_address (target, VOIDmode,
4976 plus_constant (XEXP (target, 0),
4977 bitpos / BITS_PER_UNIT));
4979 /* Make sure that ALIGN is no stricter than the alignment of EXP. */
4980 align = MIN (exp_align, align);
4982 /* Find an alignment that is consistent with the bit position. */
4983 while ((bitpos % align) != 0)
4984 align >>= 1;
4986 emit_block_move (target, temp,
4987 bitsize == -1 ? expr_size (exp)
4988 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4989 / BITS_PER_UNIT),
4990 align);
4992 return value_mode == VOIDmode ? const0_rtx : target;
4995 /* Store the value in the bitfield. */
4996 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4997 if (value_mode != VOIDmode)
4999 /* The caller wants an rtx for the value. */
5000 /* If possible, avoid refetching from the bitfield itself. */
5001 if (width_mask != 0
5002 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5004 tree count;
5005 enum machine_mode tmode;
5007 if (unsignedp)
5008 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
5009 tmode = GET_MODE (temp);
5010 if (tmode == VOIDmode)
5011 tmode = value_mode;
5012 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5013 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5014 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5016 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5017 NULL_RTX, value_mode, 0, align,
5018 total_size);
5020 return const0_rtx;
5022 else
5024 rtx addr = XEXP (target, 0);
5025 rtx to_rtx;
5027 /* If a value is wanted, it must be the lhs;
5028 so make the address stable for multiple use. */
5030 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5031 && ! CONSTANT_ADDRESS_P (addr)
5032 /* A frame-pointer reference is already stable. */
5033 && ! (GET_CODE (addr) == PLUS
5034 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5035 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5036 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5037 addr = copy_to_reg (addr);
5039 /* Now build a reference to just the desired component. */
5041 to_rtx = copy_rtx (change_address (target, mode,
5042 plus_constant (addr,
5043 (bitpos
5044 / BITS_PER_UNIT))));
5045 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5046 MEM_ALIAS_SET (to_rtx) = alias_set;
5048 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5052 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5053 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
5054 ARRAY_REFs and find the ultimate containing object, which we return.
5056 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5057 bit position, and *PUNSIGNEDP to the signedness of the field.
5058 If the position of the field is variable, we store a tree
5059 giving the variable offset (in units) in *POFFSET.
5060 This offset is in addition to the bit position.
5061 If the position is not variable, we store 0 in *POFFSET.
5062 We set *PALIGNMENT to the alignment of the address that will be
5063 computed. This is the alignment of the thing we return if *POFFSET
5064 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
5066 If any of the extraction expressions is volatile,
5067 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5069 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5070 is a mode that can be used to access the field. In that case, *PBITSIZE
5071 is redundant.
5073 If the field describes a variable-sized object, *PMODE is set to
5074 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5075 this case, but the address of the object can be found. */
5077 tree
5078 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5079 punsignedp, pvolatilep, palignment)
5080 tree exp;
5081 HOST_WIDE_INT *pbitsize;
5082 HOST_WIDE_INT *pbitpos;
5083 tree *poffset;
5084 enum machine_mode *pmode;
5085 int *punsignedp;
5086 int *pvolatilep;
5087 unsigned int *palignment;
5089 tree size_tree = 0;
5090 enum machine_mode mode = VOIDmode;
5091 tree offset = size_zero_node;
5092 tree bit_offset = bitsize_zero_node;
5093 unsigned int alignment = BIGGEST_ALIGNMENT;
5094 tree tem;
5096 /* First get the mode, signedness, and size. We do this from just the
5097 outermost expression. */
5098 if (TREE_CODE (exp) == COMPONENT_REF)
5100 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5101 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5102 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5104 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5106 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5108 size_tree = TREE_OPERAND (exp, 1);
5109 *punsignedp = TREE_UNSIGNED (exp);
5111 else
5113 mode = TYPE_MODE (TREE_TYPE (exp));
5114 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5116 if (mode == BLKmode)
5117 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5118 else
5119 *pbitsize = GET_MODE_BITSIZE (mode);
5122 if (size_tree != 0)
5124 if (! host_integerp (size_tree, 1))
5125 mode = BLKmode, *pbitsize = -1;
5126 else
5127 *pbitsize = tree_low_cst (size_tree, 1);
5130 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5131 and find the ultimate containing object. */
5132 while (1)
5134 if (TREE_CODE (exp) == BIT_FIELD_REF)
5135 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5136 else if (TREE_CODE (exp) == COMPONENT_REF)
5138 tree field = TREE_OPERAND (exp, 1);
5139 tree this_offset = DECL_FIELD_OFFSET (field);
5141 /* If this field hasn't been filled in yet, don't go
5142 past it. This should only happen when folding expressions
5143 made during type construction. */
5144 if (this_offset == 0)
5145 break;
5146 else if (! TREE_CONSTANT (this_offset)
5147 && contains_placeholder_p (this_offset))
5148 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5150 offset = size_binop (PLUS_EXPR, offset, this_offset);
5151 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5152 DECL_FIELD_BIT_OFFSET (field));
5154 if (! host_integerp (offset, 0))
5155 alignment = MIN (alignment, DECL_OFFSET_ALIGN (field));
5158 else if (TREE_CODE (exp) == ARRAY_REF)
5160 tree index = TREE_OPERAND (exp, 1);
5161 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5162 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5163 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (exp));
5165 /* We assume all arrays have sizes that are a multiple of a byte.
5166 First subtract the lower bound, if any, in the type of the
5167 index, then convert to sizetype and multiply by the size of the
5168 array element. */
5169 if (low_bound != 0 && ! integer_zerop (low_bound))
5170 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5171 index, low_bound));
5173 /* If the index has a self-referential type, pass it to a
5174 WITH_RECORD_EXPR; if the component size is, pass our
5175 component to one. */
5176 if (! TREE_CONSTANT (index)
5177 && contains_placeholder_p (index))
5178 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5179 if (! TREE_CONSTANT (unit_size)
5180 && contains_placeholder_p (unit_size))
5181 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size,
5182 TREE_OPERAND (exp, 0));
5184 offset = size_binop (PLUS_EXPR, offset,
5185 size_binop (MULT_EXPR,
5186 convert (sizetype, index),
5187 unit_size));
5190 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5191 && ! ((TREE_CODE (exp) == NOP_EXPR
5192 || TREE_CODE (exp) == CONVERT_EXPR)
5193 && (TYPE_MODE (TREE_TYPE (exp))
5194 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5195 break;
5197 /* If any reference in the chain is volatile, the effect is volatile. */
5198 if (TREE_THIS_VOLATILE (exp))
5199 *pvolatilep = 1;
5201 /* If the offset is non-constant already, then we can't assume any
5202 alignment more than the alignment here. */
5203 if (! TREE_CONSTANT (offset))
5204 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5206 exp = TREE_OPERAND (exp, 0);
5209 if (DECL_P (exp))
5210 alignment = MIN (alignment, DECL_ALIGN (exp));
5211 else if (TREE_TYPE (exp) != 0)
5212 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5214 /* If OFFSET is constant, see if we can return the whole thing as a
5215 constant bit position. Otherwise, split it up. */
5216 if (host_integerp (offset, 0)
5217 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5218 bitsize_unit_node))
5219 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5220 && host_integerp (tem, 0))
5221 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5222 else
5223 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5225 *pmode = mode;
5226 *palignment = alignment;
5227 return exp;
5230 /* Subroutine of expand_exp: compute memory_usage from modifier. */
5232 static enum memory_use_mode
5233 get_memory_usage_from_modifier (modifier)
5234 enum expand_modifier modifier;
5236 switch (modifier)
5238 case EXPAND_NORMAL:
5239 case EXPAND_SUM:
5240 return MEMORY_USE_RO;
5241 break;
5242 case EXPAND_MEMORY_USE_WO:
5243 return MEMORY_USE_WO;
5244 break;
5245 case EXPAND_MEMORY_USE_RW:
5246 return MEMORY_USE_RW;
5247 break;
5248 case EXPAND_MEMORY_USE_DONT:
5249 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5250 MEMORY_USE_DONT, because they are modifiers to a call of
5251 expand_expr in the ADDR_EXPR case of expand_expr. */
5252 case EXPAND_CONST_ADDRESS:
5253 case EXPAND_INITIALIZER:
5254 return MEMORY_USE_DONT;
5255 case EXPAND_MEMORY_USE_BAD:
5256 default:
5257 abort ();
5261 /* Given an rtx VALUE that may contain additions and multiplications,
5262 return an equivalent value that just refers to a register or memory.
5263 This is done by generating instructions to perform the arithmetic
5264 and returning a pseudo-register containing the value.
5266 The returned value may be a REG, SUBREG, MEM or constant. */
5269 force_operand (value, target)
5270 rtx value, target;
5272 register optab binoptab = 0;
5273 /* Use a temporary to force order of execution of calls to
5274 `force_operand'. */
5275 rtx tmp;
5276 register rtx op2;
5277 /* Use subtarget as the target for operand 0 of a binary operation. */
5278 register rtx subtarget = get_subtarget (target);
5280 /* Check for a PIC address load. */
5281 if (flag_pic
5282 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5283 && XEXP (value, 0) == pic_offset_table_rtx
5284 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5285 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5286 || GET_CODE (XEXP (value, 1)) == CONST))
5288 if (!subtarget)
5289 subtarget = gen_reg_rtx (GET_MODE (value));
5290 emit_move_insn (subtarget, value);
5291 return subtarget;
5294 if (GET_CODE (value) == PLUS)
5295 binoptab = add_optab;
5296 else if (GET_CODE (value) == MINUS)
5297 binoptab = sub_optab;
5298 else if (GET_CODE (value) == MULT)
5300 op2 = XEXP (value, 1);
5301 if (!CONSTANT_P (op2)
5302 && !(GET_CODE (op2) == REG && op2 != subtarget))
5303 subtarget = 0;
5304 tmp = force_operand (XEXP (value, 0), subtarget);
5305 return expand_mult (GET_MODE (value), tmp,
5306 force_operand (op2, NULL_RTX),
5307 target, 1);
5310 if (binoptab)
5312 op2 = XEXP (value, 1);
5313 if (!CONSTANT_P (op2)
5314 && !(GET_CODE (op2) == REG && op2 != subtarget))
5315 subtarget = 0;
5316 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5318 binoptab = add_optab;
5319 op2 = negate_rtx (GET_MODE (value), op2);
5322 /* Check for an addition with OP2 a constant integer and our first
5323 operand a PLUS of a virtual register and something else. In that
5324 case, we want to emit the sum of the virtual register and the
5325 constant first and then add the other value. This allows virtual
5326 register instantiation to simply modify the constant rather than
5327 creating another one around this addition. */
5328 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5329 && GET_CODE (XEXP (value, 0)) == PLUS
5330 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5331 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5332 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5334 rtx temp = expand_binop (GET_MODE (value), binoptab,
5335 XEXP (XEXP (value, 0), 0), op2,
5336 subtarget, 0, OPTAB_LIB_WIDEN);
5337 return expand_binop (GET_MODE (value), binoptab, temp,
5338 force_operand (XEXP (XEXP (value, 0), 1), 0),
5339 target, 0, OPTAB_LIB_WIDEN);
5342 tmp = force_operand (XEXP (value, 0), subtarget);
5343 return expand_binop (GET_MODE (value), binoptab, tmp,
5344 force_operand (op2, NULL_RTX),
5345 target, 0, OPTAB_LIB_WIDEN);
5346 /* We give UNSIGNEDP = 0 to expand_binop
5347 because the only operations we are expanding here are signed ones. */
5349 return value;
5352 /* Subroutine of expand_expr:
5353 save the non-copied parts (LIST) of an expr (LHS), and return a list
5354 which can restore these values to their previous values,
5355 should something modify their storage. */
5357 static tree
5358 save_noncopied_parts (lhs, list)
5359 tree lhs;
5360 tree list;
5362 tree tail;
5363 tree parts = 0;
5365 for (tail = list; tail; tail = TREE_CHAIN (tail))
5366 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5367 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
5368 else
5370 tree part = TREE_VALUE (tail);
5371 tree part_type = TREE_TYPE (part);
5372 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
5373 rtx target = assign_temp (part_type, 0, 1, 1);
5374 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
5375 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
5376 parts = tree_cons (to_be_saved,
5377 build (RTL_EXPR, part_type, NULL_TREE,
5378 (tree) target),
5379 parts);
5380 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
5382 return parts;
5385 /* Subroutine of expand_expr:
5386 record the non-copied parts (LIST) of an expr (LHS), and return a list
5387 which specifies the initial values of these parts. */
5389 static tree
5390 init_noncopied_parts (lhs, list)
5391 tree lhs;
5392 tree list;
5394 tree tail;
5395 tree parts = 0;
5397 for (tail = list; tail; tail = TREE_CHAIN (tail))
5398 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5399 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
5400 else if (TREE_PURPOSE (tail))
5402 tree part = TREE_VALUE (tail);
5403 tree part_type = TREE_TYPE (part);
5404 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
5405 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5407 return parts;
5410 /* Subroutine of expand_expr: return nonzero iff there is no way that
5411 EXP can reference X, which is being modified. TOP_P is nonzero if this
5412 call is going to be used to determine whether we need a temporary
5413 for EXP, as opposed to a recursive call to this function.
5415 It is always safe for this routine to return zero since it merely
5416 searches for optimization opportunities. */
5419 safe_from_p (x, exp, top_p)
5420 rtx x;
5421 tree exp;
5422 int top_p;
5424 rtx exp_rtl = 0;
5425 int i, nops;
5426 static int save_expr_count;
5427 static int save_expr_size = 0;
5428 static tree *save_expr_rewritten;
5429 static tree save_expr_trees[256];
5431 if (x == 0
5432 /* If EXP has varying size, we MUST use a target since we currently
5433 have no way of allocating temporaries of variable size
5434 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5435 So we assume here that something at a higher level has prevented a
5436 clash. This is somewhat bogus, but the best we can do. Only
5437 do this when X is BLKmode and when we are at the top level. */
5438 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5439 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5440 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5441 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5442 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5443 != INTEGER_CST)
5444 && GET_MODE (x) == BLKmode))
5445 return 1;
5447 if (top_p && save_expr_size == 0)
5449 int rtn;
5451 save_expr_count = 0;
5452 save_expr_size = ARRAY_SIZE (save_expr_trees);
5453 save_expr_rewritten = &save_expr_trees[0];
5455 rtn = safe_from_p (x, exp, 1);
5457 for (i = 0; i < save_expr_count; ++i)
5459 if (TREE_CODE (save_expr_trees[i]) != ERROR_MARK)
5460 abort ();
5461 TREE_SET_CODE (save_expr_trees[i], SAVE_EXPR);
5464 save_expr_size = 0;
5466 return rtn;
5469 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5470 find the underlying pseudo. */
5471 if (GET_CODE (x) == SUBREG)
5473 x = SUBREG_REG (x);
5474 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5475 return 0;
5478 /* If X is a location in the outgoing argument area, it is always safe. */
5479 if (GET_CODE (x) == MEM
5480 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5481 || (GET_CODE (XEXP (x, 0)) == PLUS
5482 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
5483 return 1;
5485 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5487 case 'd':
5488 exp_rtl = DECL_RTL (exp);
5489 break;
5491 case 'c':
5492 return 1;
5494 case 'x':
5495 if (TREE_CODE (exp) == TREE_LIST)
5496 return ((TREE_VALUE (exp) == 0
5497 || safe_from_p (x, TREE_VALUE (exp), 0))
5498 && (TREE_CHAIN (exp) == 0
5499 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5500 else if (TREE_CODE (exp) == ERROR_MARK)
5501 return 1; /* An already-visited SAVE_EXPR? */
5502 else
5503 return 0;
5505 case '1':
5506 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5508 case '2':
5509 case '<':
5510 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5511 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5513 case 'e':
5514 case 'r':
5515 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5516 the expression. If it is set, we conflict iff we are that rtx or
5517 both are in memory. Otherwise, we check all operands of the
5518 expression recursively. */
5520 switch (TREE_CODE (exp))
5522 case ADDR_EXPR:
5523 return (staticp (TREE_OPERAND (exp, 0))
5524 || safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5525 || TREE_STATIC (exp));
5527 case INDIRECT_REF:
5528 if (GET_CODE (x) == MEM)
5529 return 0;
5530 break;
5532 case CALL_EXPR:
5533 /* Assume that the call will clobber all hard registers and
5534 all of memory. */
5535 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5536 || GET_CODE (x) == MEM)
5537 return 0;
5538 break;
5540 case RTL_EXPR:
5541 /* If a sequence exists, we would have to scan every instruction
5542 in the sequence to see if it was safe. This is probably not
5543 worthwhile. */
5544 if (RTL_EXPR_SEQUENCE (exp))
5545 return 0;
5547 exp_rtl = RTL_EXPR_RTL (exp);
5548 break;
5550 case WITH_CLEANUP_EXPR:
5551 exp_rtl = RTL_EXPR_RTL (exp);
5552 break;
5554 case CLEANUP_POINT_EXPR:
5555 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5557 case SAVE_EXPR:
5558 exp_rtl = SAVE_EXPR_RTL (exp);
5559 if (exp_rtl)
5560 break;
5562 /* This SAVE_EXPR might appear many times in the top-level
5563 safe_from_p() expression, and if it has a complex
5564 subexpression, examining it multiple times could result
5565 in a combinatorial explosion. E.g. on an Alpha
5566 running at least 200MHz, a Fortran test case compiled with
5567 optimization took about 28 minutes to compile -- even though
5568 it was only a few lines long, and the complicated line causing
5569 so much time to be spent in the earlier version of safe_from_p()
5570 had only 293 or so unique nodes.
5572 So, turn this SAVE_EXPR into an ERROR_MARK for now, but remember
5573 where it is so we can turn it back in the top-level safe_from_p()
5574 when we're done. */
5576 /* For now, don't bother re-sizing the array. */
5577 if (save_expr_count >= save_expr_size)
5578 return 0;
5579 save_expr_rewritten[save_expr_count++] = exp;
5581 nops = TREE_CODE_LENGTH (SAVE_EXPR);
5582 for (i = 0; i < nops; i++)
5584 tree operand = TREE_OPERAND (exp, i);
5585 if (operand == NULL_TREE)
5586 continue;
5587 TREE_SET_CODE (exp, ERROR_MARK);
5588 if (!safe_from_p (x, operand, 0))
5589 return 0;
5590 TREE_SET_CODE (exp, SAVE_EXPR);
5592 TREE_SET_CODE (exp, ERROR_MARK);
5593 return 1;
5595 case BIND_EXPR:
5596 /* The only operand we look at is operand 1. The rest aren't
5597 part of the expression. */
5598 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5600 case METHOD_CALL_EXPR:
5601 /* This takes a rtx argument, but shouldn't appear here. */
5602 abort ();
5604 default:
5605 break;
5608 /* If we have an rtx, we do not need to scan our operands. */
5609 if (exp_rtl)
5610 break;
5612 nops = first_rtl_op (TREE_CODE (exp));
5613 for (i = 0; i < nops; i++)
5614 if (TREE_OPERAND (exp, i) != 0
5615 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5616 return 0;
5618 /* If this is a language-specific tree code, it may require
5619 special handling. */
5620 if (TREE_CODE (exp) >= LAST_AND_UNUSED_TREE_CODE
5621 && lang_safe_from_p
5622 && !(*lang_safe_from_p) (x, exp))
5623 return 0;
5626 /* If we have an rtl, find any enclosed object. Then see if we conflict
5627 with it. */
5628 if (exp_rtl)
5630 if (GET_CODE (exp_rtl) == SUBREG)
5632 exp_rtl = SUBREG_REG (exp_rtl);
5633 if (GET_CODE (exp_rtl) == REG
5634 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5635 return 0;
5638 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5639 are memory and EXP is not readonly. */
5640 return ! (rtx_equal_p (x, exp_rtl)
5641 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5642 && ! TREE_READONLY (exp)));
5645 /* If we reach here, it is safe. */
5646 return 1;
5649 /* Subroutine of expand_expr: return nonzero iff EXP is an
5650 expression whose type is statically determinable. */
5652 static int
5653 fixed_type_p (exp)
5654 tree exp;
5656 if (TREE_CODE (exp) == PARM_DECL
5657 || TREE_CODE (exp) == VAR_DECL
5658 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5659 || TREE_CODE (exp) == COMPONENT_REF
5660 || TREE_CODE (exp) == ARRAY_REF)
5661 return 1;
5662 return 0;
5665 /* Subroutine of expand_expr: return rtx if EXP is a
5666 variable or parameter; else return 0. */
5668 static rtx
5669 var_rtx (exp)
5670 tree exp;
5672 STRIP_NOPS (exp);
5673 switch (TREE_CODE (exp))
5675 case PARM_DECL:
5676 case VAR_DECL:
5677 return DECL_RTL (exp);
5678 default:
5679 return 0;
5683 #ifdef MAX_INTEGER_COMPUTATION_MODE
5684 void
5685 check_max_integer_computation_mode (exp)
5686 tree exp;
5688 enum tree_code code;
5689 enum machine_mode mode;
5691 /* Strip any NOPs that don't change the mode. */
5692 STRIP_NOPS (exp);
5693 code = TREE_CODE (exp);
5695 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5696 if (code == NOP_EXPR
5697 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5698 return;
5700 /* First check the type of the overall operation. We need only look at
5701 unary, binary and relational operations. */
5702 if (TREE_CODE_CLASS (code) == '1'
5703 || TREE_CODE_CLASS (code) == '2'
5704 || TREE_CODE_CLASS (code) == '<')
5706 mode = TYPE_MODE (TREE_TYPE (exp));
5707 if (GET_MODE_CLASS (mode) == MODE_INT
5708 && mode > MAX_INTEGER_COMPUTATION_MODE)
5709 fatal ("unsupported wide integer operation");
5712 /* Check operand of a unary op. */
5713 if (TREE_CODE_CLASS (code) == '1')
5715 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5716 if (GET_MODE_CLASS (mode) == MODE_INT
5717 && mode > MAX_INTEGER_COMPUTATION_MODE)
5718 fatal ("unsupported wide integer operation");
5721 /* Check operands of a binary/comparison op. */
5722 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5724 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5725 if (GET_MODE_CLASS (mode) == MODE_INT
5726 && mode > MAX_INTEGER_COMPUTATION_MODE)
5727 fatal ("unsupported wide integer operation");
5729 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5730 if (GET_MODE_CLASS (mode) == MODE_INT
5731 && mode > MAX_INTEGER_COMPUTATION_MODE)
5732 fatal ("unsupported wide integer operation");
5735 #endif
5737 /* Utility function used by expand_expr to see if TYPE, a RECORD_TYPE,
5738 has any readonly fields. If any of the fields have types that
5739 contain readonly fields, return true as well. */
5741 static int
5742 readonly_fields_p (type)
5743 tree type;
5745 tree field;
5747 for (field = TYPE_FIELDS (type); field != 0; field = TREE_CHAIN (field))
5748 if (TREE_CODE (field) == FIELD_DECL
5749 && (TREE_READONLY (field)
5750 || (TREE_CODE (TREE_TYPE (field)) == RECORD_TYPE
5751 && readonly_fields_p (TREE_TYPE (field)))))
5752 return 1;
5754 return 0;
5757 /* expand_expr: generate code for computing expression EXP.
5758 An rtx for the computed value is returned. The value is never null.
5759 In the case of a void EXP, const0_rtx is returned.
5761 The value may be stored in TARGET if TARGET is nonzero.
5762 TARGET is just a suggestion; callers must assume that
5763 the rtx returned may not be the same as TARGET.
5765 If TARGET is CONST0_RTX, it means that the value will be ignored.
5767 If TMODE is not VOIDmode, it suggests generating the
5768 result in mode TMODE. But this is done only when convenient.
5769 Otherwise, TMODE is ignored and the value generated in its natural mode.
5770 TMODE is just a suggestion; callers must assume that
5771 the rtx returned may not have mode TMODE.
5773 Note that TARGET may have neither TMODE nor MODE. In that case, it
5774 probably will not be used.
5776 If MODIFIER is EXPAND_SUM then when EXP is an addition
5777 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5778 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5779 products as above, or REG or MEM, or constant.
5780 Ordinarily in such cases we would output mul or add instructions
5781 and then return a pseudo reg containing the sum.
5783 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5784 it also marks a label as absolutely required (it can't be dead).
5785 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
5786 This is used for outputting expressions used in initializers.
5788 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5789 with a constant address even if that address is not normally legitimate.
5790 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
5793 expand_expr (exp, target, tmode, modifier)
5794 register tree exp;
5795 rtx target;
5796 enum machine_mode tmode;
5797 enum expand_modifier modifier;
5799 register rtx op0, op1, temp;
5800 tree type = TREE_TYPE (exp);
5801 int unsignedp = TREE_UNSIGNED (type);
5802 register enum machine_mode mode;
5803 register enum tree_code code = TREE_CODE (exp);
5804 optab this_optab;
5805 rtx subtarget, original_target;
5806 int ignore;
5807 tree context;
5808 /* Used by check-memory-usage to make modifier read only. */
5809 enum expand_modifier ro_modifier;
5811 /* Handle ERROR_MARK before anybody tries to access its type. */
5812 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
5814 op0 = CONST0_RTX (tmode);
5815 if (op0 != 0)
5816 return op0;
5817 return const0_rtx;
5820 mode = TYPE_MODE (type);
5821 /* Use subtarget as the target for operand 0 of a binary operation. */
5822 subtarget = get_subtarget (target);
5823 original_target = target;
5824 ignore = (target == const0_rtx
5825 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
5826 || code == CONVERT_EXPR || code == REFERENCE_EXPR
5827 || code == COND_EXPR)
5828 && TREE_CODE (type) == VOID_TYPE));
5830 /* Make a read-only version of the modifier. */
5831 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
5832 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
5833 ro_modifier = modifier;
5834 else
5835 ro_modifier = EXPAND_NORMAL;
5837 /* If we are going to ignore this result, we need only do something
5838 if there is a side-effect somewhere in the expression. If there
5839 is, short-circuit the most common cases here. Note that we must
5840 not call expand_expr with anything but const0_rtx in case this
5841 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
5843 if (ignore)
5845 if (! TREE_SIDE_EFFECTS (exp))
5846 return const0_rtx;
5848 /* Ensure we reference a volatile object even if value is ignored, but
5849 don't do this if all we are doing is taking its address. */
5850 if (TREE_THIS_VOLATILE (exp)
5851 && TREE_CODE (exp) != FUNCTION_DECL
5852 && mode != VOIDmode && mode != BLKmode
5853 && modifier != EXPAND_CONST_ADDRESS)
5855 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
5856 if (GET_CODE (temp) == MEM)
5857 temp = copy_to_reg (temp);
5858 return const0_rtx;
5861 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
5862 || code == INDIRECT_REF || code == BUFFER_REF)
5863 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5864 VOIDmode, ro_modifier);
5865 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
5866 || code == ARRAY_REF)
5868 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5869 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5870 return const0_rtx;
5872 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
5873 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
5874 /* If the second operand has no side effects, just evaluate
5875 the first. */
5876 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5877 VOIDmode, ro_modifier);
5878 else if (code == BIT_FIELD_REF)
5880 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5881 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5882 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, ro_modifier);
5883 return const0_rtx;
5886 target = 0;
5889 #ifdef MAX_INTEGER_COMPUTATION_MODE
5890 /* Only check stuff here if the mode we want is different from the mode
5891 of the expression; if it's the same, check_max_integer_computiation_mode
5892 will handle it. Do we really need to check this stuff at all? */
5894 if (target
5895 && GET_MODE (target) != mode
5896 && TREE_CODE (exp) != INTEGER_CST
5897 && TREE_CODE (exp) != PARM_DECL
5898 && TREE_CODE (exp) != ARRAY_REF
5899 && TREE_CODE (exp) != COMPONENT_REF
5900 && TREE_CODE (exp) != BIT_FIELD_REF
5901 && TREE_CODE (exp) != INDIRECT_REF
5902 && TREE_CODE (exp) != CALL_EXPR
5903 && TREE_CODE (exp) != VAR_DECL
5904 && TREE_CODE (exp) != RTL_EXPR)
5906 enum machine_mode mode = GET_MODE (target);
5908 if (GET_MODE_CLASS (mode) == MODE_INT
5909 && mode > MAX_INTEGER_COMPUTATION_MODE)
5910 fatal ("unsupported wide integer operation");
5913 if (tmode != mode
5914 && TREE_CODE (exp) != INTEGER_CST
5915 && TREE_CODE (exp) != PARM_DECL
5916 && TREE_CODE (exp) != ARRAY_REF
5917 && TREE_CODE (exp) != COMPONENT_REF
5918 && TREE_CODE (exp) != BIT_FIELD_REF
5919 && TREE_CODE (exp) != INDIRECT_REF
5920 && TREE_CODE (exp) != VAR_DECL
5921 && TREE_CODE (exp) != CALL_EXPR
5922 && TREE_CODE (exp) != RTL_EXPR
5923 && GET_MODE_CLASS (tmode) == MODE_INT
5924 && tmode > MAX_INTEGER_COMPUTATION_MODE)
5925 fatal ("unsupported wide integer operation");
5927 check_max_integer_computation_mode (exp);
5928 #endif
5930 /* If will do cse, generate all results into pseudo registers
5931 since 1) that allows cse to find more things
5932 and 2) otherwise cse could produce an insn the machine
5933 cannot support. */
5935 if (! cse_not_expected && mode != BLKmode && target
5936 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
5937 target = subtarget;
5939 switch (code)
5941 case LABEL_DECL:
5943 tree function = decl_function_context (exp);
5944 /* Handle using a label in a containing function. */
5945 if (function != current_function_decl
5946 && function != inline_function_decl && function != 0)
5948 struct function *p = find_function_data (function);
5949 p->expr->x_forced_labels
5950 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
5951 p->expr->x_forced_labels);
5953 else
5955 if (modifier == EXPAND_INITIALIZER)
5956 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5957 label_rtx (exp),
5958 forced_labels);
5961 temp = gen_rtx_MEM (FUNCTION_MODE,
5962 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
5963 if (function != current_function_decl
5964 && function != inline_function_decl && function != 0)
5965 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
5966 return temp;
5969 case PARM_DECL:
5970 if (DECL_RTL (exp) == 0)
5972 error_with_decl (exp, "prior parameter's size depends on `%s'");
5973 return CONST0_RTX (mode);
5976 /* ... fall through ... */
5978 case VAR_DECL:
5979 /* If a static var's type was incomplete when the decl was written,
5980 but the type is complete now, lay out the decl now. */
5981 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5982 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
5984 layout_decl (exp, 0);
5985 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
5988 /* Although static-storage variables start off initialized, according to
5989 ANSI C, a memcpy could overwrite them with uninitialized values. So
5990 we check them too. This also lets us check for read-only variables
5991 accessed via a non-const declaration, in case it won't be detected
5992 any other way (e.g., in an embedded system or OS kernel without
5993 memory protection).
5995 Aggregates are not checked here; they're handled elsewhere. */
5996 if (cfun && current_function_check_memory_usage
5997 && code == VAR_DECL
5998 && GET_CODE (DECL_RTL (exp)) == MEM
5999 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6001 enum memory_use_mode memory_usage;
6002 memory_usage = get_memory_usage_from_modifier (modifier);
6004 in_check_memory_usage = 1;
6005 if (memory_usage != MEMORY_USE_DONT)
6006 emit_library_call (chkr_check_addr_libfunc,
6007 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
6008 XEXP (DECL_RTL (exp), 0), Pmode,
6009 GEN_INT (int_size_in_bytes (type)),
6010 TYPE_MODE (sizetype),
6011 GEN_INT (memory_usage),
6012 TYPE_MODE (integer_type_node));
6013 in_check_memory_usage = 0;
6016 /* ... fall through ... */
6018 case FUNCTION_DECL:
6019 case RESULT_DECL:
6020 if (DECL_RTL (exp) == 0)
6021 abort ();
6023 /* Ensure variable marked as used even if it doesn't go through
6024 a parser. If it hasn't be used yet, write out an external
6025 definition. */
6026 if (! TREE_USED (exp))
6028 assemble_external (exp);
6029 TREE_USED (exp) = 1;
6032 /* Show we haven't gotten RTL for this yet. */
6033 temp = 0;
6035 /* Handle variables inherited from containing functions. */
6036 context = decl_function_context (exp);
6038 /* We treat inline_function_decl as an alias for the current function
6039 because that is the inline function whose vars, types, etc.
6040 are being merged into the current function.
6041 See expand_inline_function. */
6043 if (context != 0 && context != current_function_decl
6044 && context != inline_function_decl
6045 /* If var is static, we don't need a static chain to access it. */
6046 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6047 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6049 rtx addr;
6051 /* Mark as non-local and addressable. */
6052 DECL_NONLOCAL (exp) = 1;
6053 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6054 abort ();
6055 mark_addressable (exp);
6056 if (GET_CODE (DECL_RTL (exp)) != MEM)
6057 abort ();
6058 addr = XEXP (DECL_RTL (exp), 0);
6059 if (GET_CODE (addr) == MEM)
6060 addr = change_address (addr, Pmode,
6061 fix_lexical_addr (XEXP (addr, 0), exp));
6062 else
6063 addr = fix_lexical_addr (addr, exp);
6065 temp = change_address (DECL_RTL (exp), mode, addr);
6068 /* This is the case of an array whose size is to be determined
6069 from its initializer, while the initializer is still being parsed.
6070 See expand_decl. */
6072 else if (GET_CODE (DECL_RTL (exp)) == MEM
6073 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6074 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
6075 XEXP (DECL_RTL (exp), 0));
6077 /* If DECL_RTL is memory, we are in the normal case and either
6078 the address is not valid or it is not a register and -fforce-addr
6079 is specified, get the address into a register. */
6081 else if (GET_CODE (DECL_RTL (exp)) == MEM
6082 && modifier != EXPAND_CONST_ADDRESS
6083 && modifier != EXPAND_SUM
6084 && modifier != EXPAND_INITIALIZER
6085 && (! memory_address_p (DECL_MODE (exp),
6086 XEXP (DECL_RTL (exp), 0))
6087 || (flag_force_addr
6088 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6089 temp = change_address (DECL_RTL (exp), VOIDmode,
6090 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6092 /* If we got something, return it. But first, set the alignment
6093 the address is a register. */
6094 if (temp != 0)
6096 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6097 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6099 return temp;
6102 /* If the mode of DECL_RTL does not match that of the decl, it
6103 must be a promoted value. We return a SUBREG of the wanted mode,
6104 but mark it so that we know that it was already extended. */
6106 if (GET_CODE (DECL_RTL (exp)) == REG
6107 && GET_MODE (DECL_RTL (exp)) != mode)
6109 /* Get the signedness used for this variable. Ensure we get the
6110 same mode we got when the variable was declared. */
6111 if (GET_MODE (DECL_RTL (exp))
6112 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
6113 abort ();
6115 temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
6116 SUBREG_PROMOTED_VAR_P (temp) = 1;
6117 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6118 return temp;
6121 return DECL_RTL (exp);
6123 case INTEGER_CST:
6124 return immed_double_const (TREE_INT_CST_LOW (exp),
6125 TREE_INT_CST_HIGH (exp), mode);
6127 case CONST_DECL:
6128 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
6129 EXPAND_MEMORY_USE_BAD);
6131 case REAL_CST:
6132 /* If optimized, generate immediate CONST_DOUBLE
6133 which will be turned into memory by reload if necessary.
6135 We used to force a register so that loop.c could see it. But
6136 this does not allow gen_* patterns to perform optimizations with
6137 the constants. It also produces two insns in cases like "x = 1.0;".
6138 On most machines, floating-point constants are not permitted in
6139 many insns, so we'd end up copying it to a register in any case.
6141 Now, we do the copying in expand_binop, if appropriate. */
6142 return immed_real_const (exp);
6144 case COMPLEX_CST:
6145 case STRING_CST:
6146 if (! TREE_CST_RTL (exp))
6147 output_constant_def (exp, 1);
6149 /* TREE_CST_RTL probably contains a constant address.
6150 On RISC machines where a constant address isn't valid,
6151 make some insns to get that address into a register. */
6152 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6153 && modifier != EXPAND_CONST_ADDRESS
6154 && modifier != EXPAND_INITIALIZER
6155 && modifier != EXPAND_SUM
6156 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6157 || (flag_force_addr
6158 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6159 return change_address (TREE_CST_RTL (exp), VOIDmode,
6160 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6161 return TREE_CST_RTL (exp);
6163 case EXPR_WITH_FILE_LOCATION:
6165 rtx to_return;
6166 const char *saved_input_filename = input_filename;
6167 int saved_lineno = lineno;
6168 input_filename = EXPR_WFL_FILENAME (exp);
6169 lineno = EXPR_WFL_LINENO (exp);
6170 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6171 emit_line_note (input_filename, lineno);
6172 /* Possibly avoid switching back and force here. */
6173 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6174 input_filename = saved_input_filename;
6175 lineno = saved_lineno;
6176 return to_return;
6179 case SAVE_EXPR:
6180 context = decl_function_context (exp);
6182 /* If this SAVE_EXPR was at global context, assume we are an
6183 initialization function and move it into our context. */
6184 if (context == 0)
6185 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6187 /* We treat inline_function_decl as an alias for the current function
6188 because that is the inline function whose vars, types, etc.
6189 are being merged into the current function.
6190 See expand_inline_function. */
6191 if (context == current_function_decl || context == inline_function_decl)
6192 context = 0;
6194 /* If this is non-local, handle it. */
6195 if (context)
6197 /* The following call just exists to abort if the context is
6198 not of a containing function. */
6199 find_function_data (context);
6201 temp = SAVE_EXPR_RTL (exp);
6202 if (temp && GET_CODE (temp) == REG)
6204 put_var_into_stack (exp);
6205 temp = SAVE_EXPR_RTL (exp);
6207 if (temp == 0 || GET_CODE (temp) != MEM)
6208 abort ();
6209 return change_address (temp, mode,
6210 fix_lexical_addr (XEXP (temp, 0), exp));
6212 if (SAVE_EXPR_RTL (exp) == 0)
6214 if (mode == VOIDmode)
6215 temp = const0_rtx;
6216 else
6218 temp = assign_temp (type, 3, 0, 0);
6219 if (GET_CODE (temp) == MEM)
6220 RTX_UNCHANGING_P (temp) = 1;
6223 SAVE_EXPR_RTL (exp) = temp;
6224 if (!optimize && GET_CODE (temp) == REG)
6225 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6226 save_expr_regs);
6228 /* If the mode of TEMP does not match that of the expression, it
6229 must be a promoted value. We pass store_expr a SUBREG of the
6230 wanted mode but mark it so that we know that it was already
6231 extended. Note that `unsignedp' was modified above in
6232 this case. */
6234 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6236 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6237 SUBREG_PROMOTED_VAR_P (temp) = 1;
6238 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6241 if (temp == const0_rtx)
6242 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6243 EXPAND_MEMORY_USE_BAD);
6244 else
6245 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6247 TREE_USED (exp) = 1;
6250 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6251 must be a promoted value. We return a SUBREG of the wanted mode,
6252 but mark it so that we know that it was already extended. */
6254 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6255 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6257 /* Compute the signedness and make the proper SUBREG. */
6258 promote_mode (type, mode, &unsignedp, 0);
6259 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6260 SUBREG_PROMOTED_VAR_P (temp) = 1;
6261 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6262 return temp;
6265 return SAVE_EXPR_RTL (exp);
6267 case UNSAVE_EXPR:
6269 rtx temp;
6270 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6271 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6272 return temp;
6275 case PLACEHOLDER_EXPR:
6277 tree placeholder_expr;
6279 /* If there is an object on the head of the placeholder list,
6280 see if some object in it of type TYPE or a pointer to it. For
6281 further information, see tree.def. */
6282 for (placeholder_expr = placeholder_list;
6283 placeholder_expr != 0;
6284 placeholder_expr = TREE_CHAIN (placeholder_expr))
6286 tree need_type = TYPE_MAIN_VARIANT (type);
6287 tree object = 0;
6288 tree old_list = placeholder_list;
6289 tree elt;
6291 /* Find the outermost reference that is of the type we want.
6292 If none, see if any object has a type that is a pointer to
6293 the type we want. */
6294 for (elt = TREE_PURPOSE (placeholder_expr);
6295 elt != 0 && object == 0;
6297 = ((TREE_CODE (elt) == COMPOUND_EXPR
6298 || TREE_CODE (elt) == COND_EXPR)
6299 ? TREE_OPERAND (elt, 1)
6300 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6301 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6302 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6303 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6304 ? TREE_OPERAND (elt, 0) : 0))
6305 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6306 object = elt;
6308 for (elt = TREE_PURPOSE (placeholder_expr);
6309 elt != 0 && object == 0;
6311 = ((TREE_CODE (elt) == COMPOUND_EXPR
6312 || TREE_CODE (elt) == COND_EXPR)
6313 ? TREE_OPERAND (elt, 1)
6314 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6315 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6316 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6317 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6318 ? TREE_OPERAND (elt, 0) : 0))
6319 if (POINTER_TYPE_P (TREE_TYPE (elt))
6320 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6321 == need_type))
6322 object = build1 (INDIRECT_REF, need_type, elt);
6324 if (object != 0)
6326 /* Expand this object skipping the list entries before
6327 it was found in case it is also a PLACEHOLDER_EXPR.
6328 In that case, we want to translate it using subsequent
6329 entries. */
6330 placeholder_list = TREE_CHAIN (placeholder_expr);
6331 temp = expand_expr (object, original_target, tmode,
6332 ro_modifier);
6333 placeholder_list = old_list;
6334 return temp;
6339 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6340 abort ();
6342 case WITH_RECORD_EXPR:
6343 /* Put the object on the placeholder list, expand our first operand,
6344 and pop the list. */
6345 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6346 placeholder_list);
6347 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
6348 tmode, ro_modifier);
6349 placeholder_list = TREE_CHAIN (placeholder_list);
6350 return target;
6352 case GOTO_EXPR:
6353 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6354 expand_goto (TREE_OPERAND (exp, 0));
6355 else
6356 expand_computed_goto (TREE_OPERAND (exp, 0));
6357 return const0_rtx;
6359 case EXIT_EXPR:
6360 expand_exit_loop_if_false (NULL_PTR,
6361 invert_truthvalue (TREE_OPERAND (exp, 0)));
6362 return const0_rtx;
6364 case LABELED_BLOCK_EXPR:
6365 if (LABELED_BLOCK_BODY (exp))
6366 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6367 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6368 return const0_rtx;
6370 case EXIT_BLOCK_EXPR:
6371 if (EXIT_BLOCK_RETURN (exp))
6372 sorry ("returned value in block_exit_expr");
6373 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6374 return const0_rtx;
6376 case LOOP_EXPR:
6377 push_temp_slots ();
6378 expand_start_loop (1);
6379 expand_expr_stmt (TREE_OPERAND (exp, 0));
6380 expand_end_loop ();
6381 pop_temp_slots ();
6383 return const0_rtx;
6385 case BIND_EXPR:
6387 tree vars = TREE_OPERAND (exp, 0);
6388 int vars_need_expansion = 0;
6390 /* Need to open a binding contour here because
6391 if there are any cleanups they must be contained here. */
6392 expand_start_bindings (2);
6394 /* Mark the corresponding BLOCK for output in its proper place. */
6395 if (TREE_OPERAND (exp, 2) != 0
6396 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6397 insert_block (TREE_OPERAND (exp, 2));
6399 /* If VARS have not yet been expanded, expand them now. */
6400 while (vars)
6402 if (DECL_RTL (vars) == 0)
6404 vars_need_expansion = 1;
6405 expand_decl (vars);
6407 expand_decl_init (vars);
6408 vars = TREE_CHAIN (vars);
6411 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6413 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6415 return temp;
6418 case RTL_EXPR:
6419 if (RTL_EXPR_SEQUENCE (exp))
6421 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6422 abort ();
6423 emit_insns (RTL_EXPR_SEQUENCE (exp));
6424 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6426 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6427 free_temps_for_rtl_expr (exp);
6428 return RTL_EXPR_RTL (exp);
6430 case CONSTRUCTOR:
6431 /* If we don't need the result, just ensure we evaluate any
6432 subexpressions. */
6433 if (ignore)
6435 tree elt;
6436 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6437 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6438 EXPAND_MEMORY_USE_BAD);
6439 return const0_rtx;
6442 /* All elts simple constants => refer to a constant in memory. But
6443 if this is a non-BLKmode mode, let it store a field at a time
6444 since that should make a CONST_INT or CONST_DOUBLE when we
6445 fold. Likewise, if we have a target we can use, it is best to
6446 store directly into the target unless the type is large enough
6447 that memcpy will be used. If we are making an initializer and
6448 all operands are constant, put it in memory as well. */
6449 else if ((TREE_STATIC (exp)
6450 && ((mode == BLKmode
6451 && ! (target != 0 && safe_from_p (target, exp, 1)))
6452 || TREE_ADDRESSABLE (exp)
6453 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6454 && (! MOVE_BY_PIECES_P
6455 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6456 TYPE_ALIGN (type)))
6457 && ! mostly_zeros_p (exp))))
6458 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6460 rtx constructor = output_constant_def (exp, 1);
6462 if (modifier != EXPAND_CONST_ADDRESS
6463 && modifier != EXPAND_INITIALIZER
6464 && modifier != EXPAND_SUM
6465 && (! memory_address_p (GET_MODE (constructor),
6466 XEXP (constructor, 0))
6467 || (flag_force_addr
6468 && GET_CODE (XEXP (constructor, 0)) != REG)))
6469 constructor = change_address (constructor, VOIDmode,
6470 XEXP (constructor, 0));
6471 return constructor;
6474 else
6476 /* Handle calls that pass values in multiple non-contiguous
6477 locations. The Irix 6 ABI has examples of this. */
6478 if (target == 0 || ! safe_from_p (target, exp, 1)
6479 || GET_CODE (target) == PARALLEL)
6481 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
6482 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6483 else
6484 target = assign_temp (type, 0, 1, 1);
6487 if (TREE_READONLY (exp))
6489 if (GET_CODE (target) == MEM)
6490 target = copy_rtx (target);
6492 RTX_UNCHANGING_P (target) = 1;
6495 store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0,
6496 int_size_in_bytes (TREE_TYPE (exp)));
6497 return target;
6500 case INDIRECT_REF:
6502 tree exp1 = TREE_OPERAND (exp, 0);
6503 tree index;
6504 tree string = string_constant (exp1, &index);
6506 /* Try to optimize reads from const strings. */
6507 if (string
6508 && TREE_CODE (string) == STRING_CST
6509 && TREE_CODE (index) == INTEGER_CST
6510 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6511 && GET_MODE_CLASS (mode) == MODE_INT
6512 && GET_MODE_SIZE (mode) == 1
6513 && modifier != EXPAND_MEMORY_USE_WO)
6514 return
6515 GEN_INT (TREE_STRING_POINTER (string)[TREE_INT_CST_LOW (index)]);
6517 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6518 op0 = memory_address (mode, op0);
6520 if (cfun && current_function_check_memory_usage
6521 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6523 enum memory_use_mode memory_usage;
6524 memory_usage = get_memory_usage_from_modifier (modifier);
6526 if (memory_usage != MEMORY_USE_DONT)
6528 in_check_memory_usage = 1;
6529 emit_library_call (chkr_check_addr_libfunc,
6530 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, op0,
6531 Pmode, GEN_INT (int_size_in_bytes (type)),
6532 TYPE_MODE (sizetype),
6533 GEN_INT (memory_usage),
6534 TYPE_MODE (integer_type_node));
6535 in_check_memory_usage = 0;
6539 temp = gen_rtx_MEM (mode, op0);
6540 set_mem_attributes (temp, exp, 0);
6542 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6543 here, because, in C and C++, the fact that a location is accessed
6544 through a pointer to const does not mean that the value there can
6545 never change. Languages where it can never change should
6546 also set TREE_STATIC. */
6547 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
6549 /* If we are writing to this object and its type is a record with
6550 readonly fields, we must mark it as readonly so it will
6551 conflict with readonly references to those fields. */
6552 if (modifier == EXPAND_MEMORY_USE_WO
6553 && TREE_CODE (type) == RECORD_TYPE && readonly_fields_p (type))
6554 RTX_UNCHANGING_P (temp) = 1;
6556 return temp;
6559 case ARRAY_REF:
6560 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6561 abort ();
6564 tree array = TREE_OPERAND (exp, 0);
6565 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6566 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6567 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6568 HOST_WIDE_INT i;
6570 /* Optimize the special-case of a zero lower bound.
6572 We convert the low_bound to sizetype to avoid some problems
6573 with constant folding. (E.g. suppose the lower bound is 1,
6574 and its mode is QI. Without the conversion, (ARRAY
6575 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6576 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6578 if (! integer_zerop (low_bound))
6579 index = size_diffop (index, convert (sizetype, low_bound));
6581 /* Fold an expression like: "foo"[2].
6582 This is not done in fold so it won't happen inside &.
6583 Don't fold if this is for wide characters since it's too
6584 difficult to do correctly and this is a very rare case. */
6586 if (TREE_CODE (array) == STRING_CST
6587 && TREE_CODE (index) == INTEGER_CST
6588 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6589 && GET_MODE_CLASS (mode) == MODE_INT
6590 && GET_MODE_SIZE (mode) == 1)
6591 return
6592 GEN_INT (TREE_STRING_POINTER (array)[TREE_INT_CST_LOW (index)]);
6594 /* If this is a constant index into a constant array,
6595 just get the value from the array. Handle both the cases when
6596 we have an explicit constructor and when our operand is a variable
6597 that was declared const. */
6599 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6600 && TREE_CODE (index) == INTEGER_CST
6601 && 0 > compare_tree_int (index,
6602 list_length (CONSTRUCTOR_ELTS
6603 (TREE_OPERAND (exp, 0)))))
6605 tree elem;
6607 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6608 i = TREE_INT_CST_LOW (index);
6609 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6612 if (elem)
6613 return expand_expr (fold (TREE_VALUE (elem)), target,
6614 tmode, ro_modifier);
6617 else if (optimize >= 1
6618 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6619 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6620 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6622 if (TREE_CODE (index) == INTEGER_CST)
6624 tree init = DECL_INITIAL (array);
6626 if (TREE_CODE (init) == CONSTRUCTOR)
6628 tree elem;
6630 for (elem = CONSTRUCTOR_ELTS (init);
6631 (elem
6632 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6633 elem = TREE_CHAIN (elem))
6636 if (elem)
6637 return expand_expr (fold (TREE_VALUE (elem)), target,
6638 tmode, ro_modifier);
6640 else if (TREE_CODE (init) == STRING_CST
6641 && 0 > compare_tree_int (index,
6642 TREE_STRING_LENGTH (init)))
6644 tree type = TREE_TYPE (TREE_TYPE (init));
6645 enum machine_mode mode = TYPE_MODE (type);
6647 if (GET_MODE_CLASS (mode) == MODE_INT
6648 && GET_MODE_SIZE (mode) == 1)
6649 return (GEN_INT
6650 (TREE_STRING_POINTER
6651 (init)[TREE_INT_CST_LOW (index)]));
6656 /* Fall through. */
6658 case COMPONENT_REF:
6659 case BIT_FIELD_REF:
6660 /* If the operand is a CONSTRUCTOR, we can just extract the
6661 appropriate field if it is present. Don't do this if we have
6662 already written the data since we want to refer to that copy
6663 and varasm.c assumes that's what we'll do. */
6664 if (code != ARRAY_REF
6665 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6666 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6668 tree elt;
6670 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6671 elt = TREE_CHAIN (elt))
6672 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6673 /* We can normally use the value of the field in the
6674 CONSTRUCTOR. However, if this is a bitfield in
6675 an integral mode that we can fit in a HOST_WIDE_INT,
6676 we must mask only the number of bits in the bitfield,
6677 since this is done implicitly by the constructor. If
6678 the bitfield does not meet either of those conditions,
6679 we can't do this optimization. */
6680 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6681 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6682 == MODE_INT)
6683 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6684 <= HOST_BITS_PER_WIDE_INT))))
6686 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6687 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6689 HOST_WIDE_INT bitsize
6690 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6692 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6694 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6695 op0 = expand_and (op0, op1, target);
6697 else
6699 enum machine_mode imode
6700 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6701 tree count
6702 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6705 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6706 target, 0);
6707 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6708 target, 0);
6712 return op0;
6717 enum machine_mode mode1;
6718 HOST_WIDE_INT bitsize, bitpos;
6719 tree offset;
6720 int volatilep = 0;
6721 unsigned int alignment;
6722 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6723 &mode1, &unsignedp, &volatilep,
6724 &alignment);
6726 /* If we got back the original object, something is wrong. Perhaps
6727 we are evaluating an expression too early. In any event, don't
6728 infinitely recurse. */
6729 if (tem == exp)
6730 abort ();
6732 /* If TEM's type is a union of variable size, pass TARGET to the inner
6733 computation, since it will need a temporary and TARGET is known
6734 to have to do. This occurs in unchecked conversion in Ada. */
6736 op0 = expand_expr (tem,
6737 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6738 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6739 != INTEGER_CST)
6740 ? target : NULL_RTX),
6741 VOIDmode,
6742 (modifier == EXPAND_INITIALIZER
6743 || modifier == EXPAND_CONST_ADDRESS)
6744 ? modifier : EXPAND_NORMAL);
6746 /* If this is a constant, put it into a register if it is a
6747 legitimate constant and OFFSET is 0 and memory if it isn't. */
6748 if (CONSTANT_P (op0))
6750 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6751 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6752 && offset == 0)
6753 op0 = force_reg (mode, op0);
6754 else
6755 op0 = validize_mem (force_const_mem (mode, op0));
6758 if (offset != 0)
6760 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
6762 /* If this object is in memory, put it into a register.
6763 This case can't occur in C, but can in Ada if we have
6764 unchecked conversion of an expression from a scalar type to
6765 an array or record type. */
6766 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6767 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
6769 rtx memloc = assign_temp (TREE_TYPE (tem), 1, 1, 1);
6771 mark_temp_addr_taken (memloc);
6772 emit_move_insn (memloc, op0);
6773 op0 = memloc;
6776 if (GET_CODE (op0) != MEM)
6777 abort ();
6779 if (GET_MODE (offset_rtx) != ptr_mode)
6781 #ifdef POINTERS_EXTEND_UNSIGNED
6782 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
6783 #else
6784 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6785 #endif
6788 /* A constant address in OP0 can have VOIDmode, we must not try
6789 to call force_reg for that case. Avoid that case. */
6790 if (GET_CODE (op0) == MEM
6791 && GET_MODE (op0) == BLKmode
6792 && GET_MODE (XEXP (op0, 0)) != VOIDmode
6793 && bitsize != 0
6794 && (bitpos % bitsize) == 0
6795 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6796 && alignment == GET_MODE_ALIGNMENT (mode1))
6798 rtx temp = change_address (op0, mode1,
6799 plus_constant (XEXP (op0, 0),
6800 (bitpos /
6801 BITS_PER_UNIT)));
6802 if (GET_CODE (XEXP (temp, 0)) == REG)
6803 op0 = temp;
6804 else
6805 op0 = change_address (op0, mode1,
6806 force_reg (GET_MODE (XEXP (temp, 0)),
6807 XEXP (temp, 0)));
6808 bitpos = 0;
6811 op0 = change_address (op0, VOIDmode,
6812 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
6813 force_reg (ptr_mode,
6814 offset_rtx)));
6817 /* Don't forget about volatility even if this is a bitfield. */
6818 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6820 op0 = copy_rtx (op0);
6821 MEM_VOLATILE_P (op0) = 1;
6824 /* Check the access. */
6825 if (cfun != 0 && current_function_check_memory_usage
6826 && GET_CODE (op0) == MEM)
6828 enum memory_use_mode memory_usage;
6829 memory_usage = get_memory_usage_from_modifier (modifier);
6831 if (memory_usage != MEMORY_USE_DONT)
6833 rtx to;
6834 int size;
6836 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
6837 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
6839 /* Check the access right of the pointer. */
6840 in_check_memory_usage = 1;
6841 if (size > BITS_PER_UNIT)
6842 emit_library_call (chkr_check_addr_libfunc,
6843 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, to,
6844 Pmode, GEN_INT (size / BITS_PER_UNIT),
6845 TYPE_MODE (sizetype),
6846 GEN_INT (memory_usage),
6847 TYPE_MODE (integer_type_node));
6848 in_check_memory_usage = 0;
6852 /* In cases where an aligned union has an unaligned object
6853 as a field, we might be extracting a BLKmode value from
6854 an integer-mode (e.g., SImode) object. Handle this case
6855 by doing the extract into an object as wide as the field
6856 (which we know to be the width of a basic mode), then
6857 storing into memory, and changing the mode to BLKmode.
6858 If we ultimately want the address (EXPAND_CONST_ADDRESS or
6859 EXPAND_INITIALIZER), then we must not copy to a temporary. */
6860 if (mode1 == VOIDmode
6861 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6862 || (modifier != EXPAND_CONST_ADDRESS
6863 && modifier != EXPAND_INITIALIZER
6864 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
6865 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6866 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6867 /* If the field isn't aligned enough to fetch as a memref,
6868 fetch it as a bit field. */
6869 || (mode1 != BLKmode
6870 && SLOW_UNALIGNED_ACCESS (mode1, alignment)
6871 && ((TYPE_ALIGN (TREE_TYPE (tem))
6872 < GET_MODE_ALIGNMENT (mode))
6873 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
6874 /* If the type and the field are a constant size and the
6875 size of the type isn't the same size as the bitfield,
6876 we must use bitfield operations. */
6877 || ((bitsize >= 0
6878 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
6879 == INTEGER_CST)
6880 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
6881 bitsize)))))
6882 || (modifier != EXPAND_CONST_ADDRESS
6883 && modifier != EXPAND_INITIALIZER
6884 && mode == BLKmode
6885 && SLOW_UNALIGNED_ACCESS (mode, alignment)
6886 && (TYPE_ALIGN (type) > alignment
6887 || bitpos % TYPE_ALIGN (type) != 0)))
6889 enum machine_mode ext_mode = mode;
6891 if (ext_mode == BLKmode
6892 && ! (target != 0 && GET_CODE (op0) == MEM
6893 && GET_CODE (target) == MEM
6894 && bitpos % BITS_PER_UNIT == 0))
6895 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6897 if (ext_mode == BLKmode)
6899 /* In this case, BITPOS must start at a byte boundary and
6900 TARGET, if specified, must be a MEM. */
6901 if (GET_CODE (op0) != MEM
6902 || (target != 0 && GET_CODE (target) != MEM)
6903 || bitpos % BITS_PER_UNIT != 0)
6904 abort ();
6906 op0 = change_address (op0, VOIDmode,
6907 plus_constant (XEXP (op0, 0),
6908 bitpos / BITS_PER_UNIT));
6909 if (target == 0)
6910 target = assign_temp (type, 0, 1, 1);
6912 emit_block_move (target, op0,
6913 bitsize == -1 ? expr_size (exp)
6914 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6915 / BITS_PER_UNIT),
6916 BITS_PER_UNIT);
6918 return target;
6921 op0 = validize_mem (op0);
6923 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
6924 mark_reg_pointer (XEXP (op0, 0), alignment);
6926 op0 = extract_bit_field (op0, bitsize, bitpos,
6927 unsignedp, target, ext_mode, ext_mode,
6928 alignment,
6929 int_size_in_bytes (TREE_TYPE (tem)));
6931 /* If the result is a record type and BITSIZE is narrower than
6932 the mode of OP0, an integral mode, and this is a big endian
6933 machine, we must put the field into the high-order bits. */
6934 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
6935 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
6936 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
6937 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
6938 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
6939 - bitsize),
6940 op0, 1);
6942 if (mode == BLKmode)
6944 rtx new = assign_stack_temp (ext_mode,
6945 bitsize / BITS_PER_UNIT, 0);
6947 emit_move_insn (new, op0);
6948 op0 = copy_rtx (new);
6949 PUT_MODE (op0, BLKmode);
6950 MEM_SET_IN_STRUCT_P (op0, 1);
6953 return op0;
6956 /* If the result is BLKmode, use that to access the object
6957 now as well. */
6958 if (mode == BLKmode)
6959 mode1 = BLKmode;
6961 /* Get a reference to just this component. */
6962 if (modifier == EXPAND_CONST_ADDRESS
6963 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6965 rtx new = gen_rtx_MEM (mode1,
6966 plus_constant (XEXP (op0, 0),
6967 (bitpos / BITS_PER_UNIT)));
6969 MEM_COPY_ATTRIBUTES (new, op0);
6970 op0 = new;
6972 else
6973 op0 = change_address (op0, mode1,
6974 plus_constant (XEXP (op0, 0),
6975 (bitpos / BITS_PER_UNIT)));
6977 set_mem_attributes (op0, exp, 0);
6978 if (GET_CODE (XEXP (op0, 0)) == REG)
6979 mark_reg_pointer (XEXP (op0, 0), alignment);
6981 MEM_VOLATILE_P (op0) |= volatilep;
6982 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
6983 || modifier == EXPAND_CONST_ADDRESS
6984 || modifier == EXPAND_INITIALIZER)
6985 return op0;
6986 else if (target == 0)
6987 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6989 convert_move (target, op0, unsignedp);
6990 return target;
6993 /* Intended for a reference to a buffer of a file-object in Pascal.
6994 But it's not certain that a special tree code will really be
6995 necessary for these. INDIRECT_REF might work for them. */
6996 case BUFFER_REF:
6997 abort ();
6999 case IN_EXPR:
7001 /* Pascal set IN expression.
7003 Algorithm:
7004 rlo = set_low - (set_low%bits_per_word);
7005 the_word = set [ (index - rlo)/bits_per_word ];
7006 bit_index = index % bits_per_word;
7007 bitmask = 1 << bit_index;
7008 return !!(the_word & bitmask); */
7010 tree set = TREE_OPERAND (exp, 0);
7011 tree index = TREE_OPERAND (exp, 1);
7012 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7013 tree set_type = TREE_TYPE (set);
7014 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7015 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7016 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7017 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7018 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7019 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7020 rtx setaddr = XEXP (setval, 0);
7021 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7022 rtx rlow;
7023 rtx diff, quo, rem, addr, bit, result;
7025 /* If domain is empty, answer is no. Likewise if index is constant
7026 and out of bounds. */
7027 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7028 && TREE_CODE (set_low_bound) == INTEGER_CST
7029 && tree_int_cst_lt (set_high_bound, set_low_bound))
7030 || (TREE_CODE (index) == INTEGER_CST
7031 && TREE_CODE (set_low_bound) == INTEGER_CST
7032 && tree_int_cst_lt (index, set_low_bound))
7033 || (TREE_CODE (set_high_bound) == INTEGER_CST
7034 && TREE_CODE (index) == INTEGER_CST
7035 && tree_int_cst_lt (set_high_bound, index))))
7036 return const0_rtx;
7038 if (target == 0)
7039 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7041 /* If we get here, we have to generate the code for both cases
7042 (in range and out of range). */
7044 op0 = gen_label_rtx ();
7045 op1 = gen_label_rtx ();
7047 if (! (GET_CODE (index_val) == CONST_INT
7048 && GET_CODE (lo_r) == CONST_INT))
7050 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7051 GET_MODE (index_val), iunsignedp, 0, op1);
7054 if (! (GET_CODE (index_val) == CONST_INT
7055 && GET_CODE (hi_r) == CONST_INT))
7057 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7058 GET_MODE (index_val), iunsignedp, 0, op1);
7061 /* Calculate the element number of bit zero in the first word
7062 of the set. */
7063 if (GET_CODE (lo_r) == CONST_INT)
7064 rlow = GEN_INT (INTVAL (lo_r)
7065 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7066 else
7067 rlow = expand_binop (index_mode, and_optab, lo_r,
7068 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7069 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7071 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7072 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7074 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7075 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7076 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7077 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7079 addr = memory_address (byte_mode,
7080 expand_binop (index_mode, add_optab, diff,
7081 setaddr, NULL_RTX, iunsignedp,
7082 OPTAB_LIB_WIDEN));
7084 /* Extract the bit we want to examine. */
7085 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7086 gen_rtx_MEM (byte_mode, addr),
7087 make_tree (TREE_TYPE (index), rem),
7088 NULL_RTX, 1);
7089 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7090 GET_MODE (target) == byte_mode ? target : 0,
7091 1, OPTAB_LIB_WIDEN);
7093 if (result != target)
7094 convert_move (target, result, 1);
7096 /* Output the code to handle the out-of-range case. */
7097 emit_jump (op0);
7098 emit_label (op1);
7099 emit_move_insn (target, const0_rtx);
7100 emit_label (op0);
7101 return target;
7104 case WITH_CLEANUP_EXPR:
7105 if (RTL_EXPR_RTL (exp) == 0)
7107 RTL_EXPR_RTL (exp)
7108 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7109 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
7111 /* That's it for this cleanup. */
7112 TREE_OPERAND (exp, 2) = 0;
7114 return RTL_EXPR_RTL (exp);
7116 case CLEANUP_POINT_EXPR:
7118 /* Start a new binding layer that will keep track of all cleanup
7119 actions to be performed. */
7120 expand_start_bindings (2);
7122 target_temp_slot_level = temp_slot_level;
7124 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7125 /* If we're going to use this value, load it up now. */
7126 if (! ignore)
7127 op0 = force_not_mem (op0);
7128 preserve_temp_slots (op0);
7129 expand_end_bindings (NULL_TREE, 0, 0);
7131 return op0;
7133 case CALL_EXPR:
7134 /* Check for a built-in function. */
7135 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7136 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7137 == FUNCTION_DECL)
7138 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7140 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7141 == BUILT_IN_FRONTEND)
7142 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7143 else
7144 return expand_builtin (exp, target, subtarget, tmode, ignore);
7147 return expand_call (exp, target, ignore);
7149 case NON_LVALUE_EXPR:
7150 case NOP_EXPR:
7151 case CONVERT_EXPR:
7152 case REFERENCE_EXPR:
7153 if (TREE_OPERAND (exp, 0) == error_mark_node)
7154 return const0_rtx;
7156 if (TREE_CODE (type) == UNION_TYPE)
7158 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7160 /* If both input and output are BLKmode, this conversion
7161 isn't actually doing anything unless we need to make the
7162 alignment stricter. */
7163 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode
7164 && (TYPE_ALIGN (type) <= TYPE_ALIGN (valtype)
7165 || TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT))
7166 return expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7167 modifier);
7169 if (target == 0)
7171 if (mode != BLKmode)
7172 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7173 else
7174 target = assign_temp (type, 0, 1, 1);
7177 if (GET_CODE (target) == MEM)
7178 /* Store data into beginning of memory target. */
7179 store_expr (TREE_OPERAND (exp, 0),
7180 change_address (target, TYPE_MODE (valtype), 0), 0);
7182 else if (GET_CODE (target) == REG)
7183 /* Store this field into a union of the proper type. */
7184 store_field (target,
7185 MIN ((int_size_in_bytes (TREE_TYPE
7186 (TREE_OPERAND (exp, 0)))
7187 * BITS_PER_UNIT),
7188 GET_MODE_BITSIZE (mode)),
7189 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7190 VOIDmode, 0, BITS_PER_UNIT,
7191 int_size_in_bytes (type), 0);
7192 else
7193 abort ();
7195 /* Return the entire union. */
7196 return target;
7199 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7201 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7202 ro_modifier);
7204 /* If the signedness of the conversion differs and OP0 is
7205 a promoted SUBREG, clear that indication since we now
7206 have to do the proper extension. */
7207 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7208 && GET_CODE (op0) == SUBREG)
7209 SUBREG_PROMOTED_VAR_P (op0) = 0;
7211 return op0;
7214 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
7215 if (GET_MODE (op0) == mode)
7216 return op0;
7218 /* If OP0 is a constant, just convert it into the proper mode. */
7219 if (CONSTANT_P (op0))
7220 return
7221 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7222 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7224 if (modifier == EXPAND_INITIALIZER)
7225 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7227 if (target == 0)
7228 return
7229 convert_to_mode (mode, op0,
7230 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7231 else
7232 convert_move (target, op0,
7233 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7234 return target;
7236 case PLUS_EXPR:
7237 /* We come here from MINUS_EXPR when the second operand is a
7238 constant. */
7239 plus_expr:
7240 this_optab = ! unsignedp && flag_trapv
7241 && (GET_MODE_CLASS(mode) == MODE_INT)
7242 ? addv_optab : add_optab;
7244 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7245 something else, make sure we add the register to the constant and
7246 then to the other thing. This case can occur during strength
7247 reduction and doing it this way will produce better code if the
7248 frame pointer or argument pointer is eliminated.
7250 fold-const.c will ensure that the constant is always in the inner
7251 PLUS_EXPR, so the only case we need to do anything about is if
7252 sp, ap, or fp is our second argument, in which case we must swap
7253 the innermost first argument and our second argument. */
7255 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7256 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7257 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7258 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7259 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7260 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7262 tree t = TREE_OPERAND (exp, 1);
7264 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7265 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7268 /* If the result is to be ptr_mode and we are adding an integer to
7269 something, we might be forming a constant. So try to use
7270 plus_constant. If it produces a sum and we can't accept it,
7271 use force_operand. This allows P = &ARR[const] to generate
7272 efficient code on machines where a SYMBOL_REF is not a valid
7273 address.
7275 If this is an EXPAND_SUM call, always return the sum. */
7276 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7277 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7279 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7280 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7281 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7283 rtx constant_part;
7285 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7286 EXPAND_SUM);
7287 /* Use immed_double_const to ensure that the constant is
7288 truncated according to the mode of OP1, then sign extended
7289 to a HOST_WIDE_INT. Using the constant directly can result
7290 in non-canonical RTL in a 64x32 cross compile. */
7291 constant_part
7292 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7293 (HOST_WIDE_INT) 0,
7294 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7295 op1 = plus_constant (op1, INTVAL (constant_part));
7296 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7297 op1 = force_operand (op1, target);
7298 return op1;
7301 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7302 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7303 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7305 rtx constant_part;
7307 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7308 EXPAND_SUM);
7309 if (! CONSTANT_P (op0))
7311 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7312 VOIDmode, modifier);
7313 /* Don't go to both_summands if modifier
7314 says it's not right to return a PLUS. */
7315 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7316 goto binop2;
7317 goto both_summands;
7319 /* Use immed_double_const to ensure that the constant is
7320 truncated according to the mode of OP1, then sign extended
7321 to a HOST_WIDE_INT. Using the constant directly can result
7322 in non-canonical RTL in a 64x32 cross compile. */
7323 constant_part
7324 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7325 (HOST_WIDE_INT) 0,
7326 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7327 op0 = plus_constant (op0, INTVAL (constant_part));
7328 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7329 op0 = force_operand (op0, target);
7330 return op0;
7334 /* No sense saving up arithmetic to be done
7335 if it's all in the wrong mode to form part of an address.
7336 And force_operand won't know whether to sign-extend or
7337 zero-extend. */
7338 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7339 || mode != ptr_mode)
7340 goto binop;
7342 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7343 subtarget = 0;
7345 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7346 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
7348 both_summands:
7349 /* Make sure any term that's a sum with a constant comes last. */
7350 if (GET_CODE (op0) == PLUS
7351 && CONSTANT_P (XEXP (op0, 1)))
7353 temp = op0;
7354 op0 = op1;
7355 op1 = temp;
7357 /* If adding to a sum including a constant,
7358 associate it to put the constant outside. */
7359 if (GET_CODE (op1) == PLUS
7360 && CONSTANT_P (XEXP (op1, 1)))
7362 rtx constant_term = const0_rtx;
7364 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7365 if (temp != 0)
7366 op0 = temp;
7367 /* Ensure that MULT comes first if there is one. */
7368 else if (GET_CODE (op0) == MULT)
7369 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7370 else
7371 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7373 /* Let's also eliminate constants from op0 if possible. */
7374 op0 = eliminate_constant_term (op0, &constant_term);
7376 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7377 their sum should be a constant. Form it into OP1, since the
7378 result we want will then be OP0 + OP1. */
7380 temp = simplify_binary_operation (PLUS, mode, constant_term,
7381 XEXP (op1, 1));
7382 if (temp != 0)
7383 op1 = temp;
7384 else
7385 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7388 /* Put a constant term last and put a multiplication first. */
7389 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7390 temp = op1, op1 = op0, op0 = temp;
7392 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7393 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7395 case MINUS_EXPR:
7396 /* For initializers, we are allowed to return a MINUS of two
7397 symbolic constants. Here we handle all cases when both operands
7398 are constant. */
7399 /* Handle difference of two symbolic constants,
7400 for the sake of an initializer. */
7401 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7402 && really_constant_p (TREE_OPERAND (exp, 0))
7403 && really_constant_p (TREE_OPERAND (exp, 1)))
7405 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
7406 VOIDmode, ro_modifier);
7407 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7408 VOIDmode, ro_modifier);
7410 /* If the last operand is a CONST_INT, use plus_constant of
7411 the negated constant. Else make the MINUS. */
7412 if (GET_CODE (op1) == CONST_INT)
7413 return plus_constant (op0, - INTVAL (op1));
7414 else
7415 return gen_rtx_MINUS (mode, op0, op1);
7417 /* Convert A - const to A + (-const). */
7418 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7420 tree negated = fold (build1 (NEGATE_EXPR, type,
7421 TREE_OPERAND (exp, 1)));
7423 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7424 /* If we can't negate the constant in TYPE, leave it alone and
7425 expand_binop will negate it for us. We used to try to do it
7426 here in the signed version of TYPE, but that doesn't work
7427 on POINTER_TYPEs. */;
7428 else
7430 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7431 goto plus_expr;
7434 this_optab = ! unsignedp && flag_trapv
7435 && (GET_MODE_CLASS(mode) == MODE_INT)
7436 ? subv_optab : sub_optab;
7437 goto binop;
7439 case MULT_EXPR:
7440 /* If first operand is constant, swap them.
7441 Thus the following special case checks need only
7442 check the second operand. */
7443 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7445 register tree t1 = TREE_OPERAND (exp, 0);
7446 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7447 TREE_OPERAND (exp, 1) = t1;
7450 /* Attempt to return something suitable for generating an
7451 indexed address, for machines that support that. */
7453 if (modifier == EXPAND_SUM && mode == ptr_mode
7454 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7455 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7457 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7458 EXPAND_SUM);
7460 /* Apply distributive law if OP0 is x+c. */
7461 if (GET_CODE (op0) == PLUS
7462 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7463 return
7464 gen_rtx_PLUS
7465 (mode,
7466 gen_rtx_MULT
7467 (mode, XEXP (op0, 0),
7468 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7469 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7470 * INTVAL (XEXP (op0, 1))));
7472 if (GET_CODE (op0) != REG)
7473 op0 = force_operand (op0, NULL_RTX);
7474 if (GET_CODE (op0) != REG)
7475 op0 = copy_to_mode_reg (mode, op0);
7477 return
7478 gen_rtx_MULT (mode, op0,
7479 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7482 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7483 subtarget = 0;
7485 /* Check for multiplying things that have been extended
7486 from a narrower type. If this machine supports multiplying
7487 in that narrower type with a result in the desired type,
7488 do it that way, and avoid the explicit type-conversion. */
7489 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7490 && TREE_CODE (type) == INTEGER_TYPE
7491 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7492 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7493 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7494 && int_fits_type_p (TREE_OPERAND (exp, 1),
7495 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7496 /* Don't use a widening multiply if a shift will do. */
7497 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7498 > HOST_BITS_PER_WIDE_INT)
7499 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7501 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7502 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7504 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7505 /* If both operands are extended, they must either both
7506 be zero-extended or both be sign-extended. */
7507 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7509 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7511 enum machine_mode innermode
7512 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7513 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7514 ? smul_widen_optab : umul_widen_optab);
7515 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7516 ? umul_widen_optab : smul_widen_optab);
7517 if (mode == GET_MODE_WIDER_MODE (innermode))
7519 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7521 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7522 NULL_RTX, VOIDmode, 0);
7523 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7524 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7525 VOIDmode, 0);
7526 else
7527 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7528 NULL_RTX, VOIDmode, 0);
7529 goto binop2;
7531 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7532 && innermode == word_mode)
7534 rtx htem;
7535 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7536 NULL_RTX, VOIDmode, 0);
7537 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7538 op1 = convert_modes (innermode, mode,
7539 expand_expr (TREE_OPERAND (exp, 1),
7540 NULL_RTX, VOIDmode, 0),
7541 unsignedp);
7542 else
7543 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7544 NULL_RTX, VOIDmode, 0);
7545 temp = expand_binop (mode, other_optab, op0, op1, target,
7546 unsignedp, OPTAB_LIB_WIDEN);
7547 htem = expand_mult_highpart_adjust (innermode,
7548 gen_highpart (innermode, temp),
7549 op0, op1,
7550 gen_highpart (innermode, temp),
7551 unsignedp);
7552 emit_move_insn (gen_highpart (innermode, temp), htem);
7553 return temp;
7557 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7558 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7559 return expand_mult (mode, op0, op1, target, unsignedp);
7561 case TRUNC_DIV_EXPR:
7562 case FLOOR_DIV_EXPR:
7563 case CEIL_DIV_EXPR:
7564 case ROUND_DIV_EXPR:
7565 case EXACT_DIV_EXPR:
7566 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7567 subtarget = 0;
7568 /* Possible optimization: compute the dividend with EXPAND_SUM
7569 then if the divisor is constant can optimize the case
7570 where some terms of the dividend have coeffs divisible by it. */
7571 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7572 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7573 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7575 case RDIV_EXPR:
7576 this_optab = flodiv_optab;
7577 goto binop;
7579 case TRUNC_MOD_EXPR:
7580 case FLOOR_MOD_EXPR:
7581 case CEIL_MOD_EXPR:
7582 case ROUND_MOD_EXPR:
7583 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7584 subtarget = 0;
7585 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7586 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7587 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7589 case FIX_ROUND_EXPR:
7590 case FIX_FLOOR_EXPR:
7591 case FIX_CEIL_EXPR:
7592 abort (); /* Not used for C. */
7594 case FIX_TRUNC_EXPR:
7595 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7596 if (target == 0)
7597 target = gen_reg_rtx (mode);
7598 expand_fix (target, op0, unsignedp);
7599 return target;
7601 case FLOAT_EXPR:
7602 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7603 if (target == 0)
7604 target = gen_reg_rtx (mode);
7605 /* expand_float can't figure out what to do if FROM has VOIDmode.
7606 So give it the correct mode. With -O, cse will optimize this. */
7607 if (GET_MODE (op0) == VOIDmode)
7608 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7609 op0);
7610 expand_float (target, op0,
7611 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7612 return target;
7614 case NEGATE_EXPR:
7615 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7616 temp = expand_unop (mode,
7617 ! unsignedp && flag_trapv
7618 && (GET_MODE_CLASS(mode) == MODE_INT)
7619 ? negv_optab : neg_optab, op0, target, 0);
7620 if (temp == 0)
7621 abort ();
7622 return temp;
7624 case ABS_EXPR:
7625 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7627 /* Handle complex values specially. */
7628 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7629 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7630 return expand_complex_abs (mode, op0, target, unsignedp);
7632 /* Unsigned abs is simply the operand. Testing here means we don't
7633 risk generating incorrect code below. */
7634 if (TREE_UNSIGNED (type))
7635 return op0;
7637 return expand_abs (mode, op0, target, unsignedp,
7638 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7640 case MAX_EXPR:
7641 case MIN_EXPR:
7642 target = original_target;
7643 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7644 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7645 || GET_MODE (target) != mode
7646 || (GET_CODE (target) == REG
7647 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7648 target = gen_reg_rtx (mode);
7649 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7650 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7652 /* First try to do it with a special MIN or MAX instruction.
7653 If that does not win, use a conditional jump to select the proper
7654 value. */
7655 this_optab = (TREE_UNSIGNED (type)
7656 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7657 : (code == MIN_EXPR ? smin_optab : smax_optab));
7659 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7660 OPTAB_WIDEN);
7661 if (temp != 0)
7662 return temp;
7664 /* At this point, a MEM target is no longer useful; we will get better
7665 code without it. */
7667 if (GET_CODE (target) == MEM)
7668 target = gen_reg_rtx (mode);
7670 if (target != op0)
7671 emit_move_insn (target, op0);
7673 op0 = gen_label_rtx ();
7675 /* If this mode is an integer too wide to compare properly,
7676 compare word by word. Rely on cse to optimize constant cases. */
7677 if (GET_MODE_CLASS (mode) == MODE_INT
7678 && ! can_compare_p (GE, mode, ccp_jump))
7680 if (code == MAX_EXPR)
7681 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7682 target, op1, NULL_RTX, op0);
7683 else
7684 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7685 op1, target, NULL_RTX, op0);
7687 else
7689 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7690 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7691 unsignedp, mode, NULL_RTX, 0, NULL_RTX,
7692 op0);
7694 emit_move_insn (target, op1);
7695 emit_label (op0);
7696 return target;
7698 case BIT_NOT_EXPR:
7699 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7700 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7701 if (temp == 0)
7702 abort ();
7703 return temp;
7705 case FFS_EXPR:
7706 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7707 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7708 if (temp == 0)
7709 abort ();
7710 return temp;
7712 /* ??? Can optimize bitwise operations with one arg constant.
7713 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7714 and (a bitwise1 b) bitwise2 b (etc)
7715 but that is probably not worth while. */
7717 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7718 boolean values when we want in all cases to compute both of them. In
7719 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7720 as actual zero-or-1 values and then bitwise anding. In cases where
7721 there cannot be any side effects, better code would be made by
7722 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7723 how to recognize those cases. */
7725 case TRUTH_AND_EXPR:
7726 case BIT_AND_EXPR:
7727 this_optab = and_optab;
7728 goto binop;
7730 case TRUTH_OR_EXPR:
7731 case BIT_IOR_EXPR:
7732 this_optab = ior_optab;
7733 goto binop;
7735 case TRUTH_XOR_EXPR:
7736 case BIT_XOR_EXPR:
7737 this_optab = xor_optab;
7738 goto binop;
7740 case LSHIFT_EXPR:
7741 case RSHIFT_EXPR:
7742 case LROTATE_EXPR:
7743 case RROTATE_EXPR:
7744 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7745 subtarget = 0;
7746 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7747 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7748 unsignedp);
7750 /* Could determine the answer when only additive constants differ. Also,
7751 the addition of one can be handled by changing the condition. */
7752 case LT_EXPR:
7753 case LE_EXPR:
7754 case GT_EXPR:
7755 case GE_EXPR:
7756 case EQ_EXPR:
7757 case NE_EXPR:
7758 case UNORDERED_EXPR:
7759 case ORDERED_EXPR:
7760 case UNLT_EXPR:
7761 case UNLE_EXPR:
7762 case UNGT_EXPR:
7763 case UNGE_EXPR:
7764 case UNEQ_EXPR:
7765 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7766 if (temp != 0)
7767 return temp;
7769 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7770 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7771 && original_target
7772 && GET_CODE (original_target) == REG
7773 && (GET_MODE (original_target)
7774 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7776 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7777 VOIDmode, 0);
7779 if (temp != original_target)
7780 temp = copy_to_reg (temp);
7782 op1 = gen_label_rtx ();
7783 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7784 GET_MODE (temp), unsignedp, 0, op1);
7785 emit_move_insn (temp, const1_rtx);
7786 emit_label (op1);
7787 return temp;
7790 /* If no set-flag instruction, must generate a conditional
7791 store into a temporary variable. Drop through
7792 and handle this like && and ||. */
7794 case TRUTH_ANDIF_EXPR:
7795 case TRUTH_ORIF_EXPR:
7796 if (! ignore
7797 && (target == 0 || ! safe_from_p (target, exp, 1)
7798 /* Make sure we don't have a hard reg (such as function's return
7799 value) live across basic blocks, if not optimizing. */
7800 || (!optimize && GET_CODE (target) == REG
7801 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7802 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7804 if (target)
7805 emit_clr_insn (target);
7807 op1 = gen_label_rtx ();
7808 jumpifnot (exp, op1);
7810 if (target)
7811 emit_0_to_1_insn (target);
7813 emit_label (op1);
7814 return ignore ? const0_rtx : target;
7816 case TRUTH_NOT_EXPR:
7817 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7818 /* The parser is careful to generate TRUTH_NOT_EXPR
7819 only with operands that are always zero or one. */
7820 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7821 target, 1, OPTAB_LIB_WIDEN);
7822 if (temp == 0)
7823 abort ();
7824 return temp;
7826 case COMPOUND_EXPR:
7827 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7828 emit_queue ();
7829 return expand_expr (TREE_OPERAND (exp, 1),
7830 (ignore ? const0_rtx : target),
7831 VOIDmode, 0);
7833 case COND_EXPR:
7834 /* If we would have a "singleton" (see below) were it not for a
7835 conversion in each arm, bring that conversion back out. */
7836 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7837 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
7838 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
7839 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
7841 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
7842 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
7844 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
7845 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7846 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
7847 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
7848 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
7849 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7850 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
7851 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
7852 return expand_expr (build1 (NOP_EXPR, type,
7853 build (COND_EXPR, TREE_TYPE (true),
7854 TREE_OPERAND (exp, 0),
7855 true, false)),
7856 target, tmode, modifier);
7860 /* Note that COND_EXPRs whose type is a structure or union
7861 are required to be constructed to contain assignments of
7862 a temporary variable, so that we can evaluate them here
7863 for side effect only. If type is void, we must do likewise. */
7865 /* If an arm of the branch requires a cleanup,
7866 only that cleanup is performed. */
7868 tree singleton = 0;
7869 tree binary_op = 0, unary_op = 0;
7871 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
7872 convert it to our mode, if necessary. */
7873 if (integer_onep (TREE_OPERAND (exp, 1))
7874 && integer_zerop (TREE_OPERAND (exp, 2))
7875 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7877 if (ignore)
7879 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
7880 ro_modifier);
7881 return const0_rtx;
7884 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
7885 if (GET_MODE (op0) == mode)
7886 return op0;
7888 if (target == 0)
7889 target = gen_reg_rtx (mode);
7890 convert_move (target, op0, unsignedp);
7891 return target;
7894 /* Check for X ? A + B : A. If we have this, we can copy A to the
7895 output and conditionally add B. Similarly for unary operations.
7896 Don't do this if X has side-effects because those side effects
7897 might affect A or B and the "?" operation is a sequence point in
7898 ANSI. (operand_equal_p tests for side effects.) */
7900 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
7901 && operand_equal_p (TREE_OPERAND (exp, 2),
7902 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7903 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
7904 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
7905 && operand_equal_p (TREE_OPERAND (exp, 1),
7906 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7907 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
7908 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
7909 && operand_equal_p (TREE_OPERAND (exp, 2),
7910 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7911 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
7912 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
7913 && operand_equal_p (TREE_OPERAND (exp, 1),
7914 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7915 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
7917 /* If we are not to produce a result, we have no target. Otherwise,
7918 if a target was specified use it; it will not be used as an
7919 intermediate target unless it is safe. If no target, use a
7920 temporary. */
7922 if (ignore)
7923 temp = 0;
7924 else if (original_target
7925 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
7926 || (singleton && GET_CODE (original_target) == REG
7927 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
7928 && original_target == var_rtx (singleton)))
7929 && GET_MODE (original_target) == mode
7930 #ifdef HAVE_conditional_move
7931 && (! can_conditionally_move_p (mode)
7932 || GET_CODE (original_target) == REG
7933 || TREE_ADDRESSABLE (type))
7934 #endif
7935 && ! (GET_CODE (original_target) == MEM
7936 && MEM_VOLATILE_P (original_target)))
7937 temp = original_target;
7938 else if (TREE_ADDRESSABLE (type))
7939 abort ();
7940 else
7941 temp = assign_temp (type, 0, 0, 1);
7943 /* If we had X ? A + C : A, with C a constant power of 2, and we can
7944 do the test of X as a store-flag operation, do this as
7945 A + ((X != 0) << log C). Similarly for other simple binary
7946 operators. Only do for C == 1 if BRANCH_COST is low. */
7947 if (temp && singleton && binary_op
7948 && (TREE_CODE (binary_op) == PLUS_EXPR
7949 || TREE_CODE (binary_op) == MINUS_EXPR
7950 || TREE_CODE (binary_op) == BIT_IOR_EXPR
7951 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
7952 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
7953 : integer_onep (TREE_OPERAND (binary_op, 1)))
7954 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7956 rtx result;
7957 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
7958 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
7959 ? addv_optab : add_optab)
7960 : TREE_CODE (binary_op) == MINUS_EXPR
7961 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
7962 ? subv_optab : sub_optab)
7963 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
7964 : xor_optab);
7966 /* If we had X ? A : A + 1, do this as A + (X == 0).
7968 We have to invert the truth value here and then put it
7969 back later if do_store_flag fails. We cannot simply copy
7970 TREE_OPERAND (exp, 0) to another variable and modify that
7971 because invert_truthvalue can modify the tree pointed to
7972 by its argument. */
7973 if (singleton == TREE_OPERAND (exp, 1))
7974 TREE_OPERAND (exp, 0)
7975 = invert_truthvalue (TREE_OPERAND (exp, 0));
7977 result = do_store_flag (TREE_OPERAND (exp, 0),
7978 (safe_from_p (temp, singleton, 1)
7979 ? temp : NULL_RTX),
7980 mode, BRANCH_COST <= 1);
7982 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
7983 result = expand_shift (LSHIFT_EXPR, mode, result,
7984 build_int_2 (tree_log2
7985 (TREE_OPERAND
7986 (binary_op, 1)),
7988 (safe_from_p (temp, singleton, 1)
7989 ? temp : NULL_RTX), 0);
7991 if (result)
7993 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
7994 return expand_binop (mode, boptab, op1, result, temp,
7995 unsignedp, OPTAB_LIB_WIDEN);
7997 else if (singleton == TREE_OPERAND (exp, 1))
7998 TREE_OPERAND (exp, 0)
7999 = invert_truthvalue (TREE_OPERAND (exp, 0));
8002 do_pending_stack_adjust ();
8003 NO_DEFER_POP;
8004 op0 = gen_label_rtx ();
8006 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8008 if (temp != 0)
8010 /* If the target conflicts with the other operand of the
8011 binary op, we can't use it. Also, we can't use the target
8012 if it is a hard register, because evaluating the condition
8013 might clobber it. */
8014 if ((binary_op
8015 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8016 || (GET_CODE (temp) == REG
8017 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8018 temp = gen_reg_rtx (mode);
8019 store_expr (singleton, temp, 0);
8021 else
8022 expand_expr (singleton,
8023 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8024 if (singleton == TREE_OPERAND (exp, 1))
8025 jumpif (TREE_OPERAND (exp, 0), op0);
8026 else
8027 jumpifnot (TREE_OPERAND (exp, 0), op0);
8029 start_cleanup_deferral ();
8030 if (binary_op && temp == 0)
8031 /* Just touch the other operand. */
8032 expand_expr (TREE_OPERAND (binary_op, 1),
8033 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8034 else if (binary_op)
8035 store_expr (build (TREE_CODE (binary_op), type,
8036 make_tree (type, temp),
8037 TREE_OPERAND (binary_op, 1)),
8038 temp, 0);
8039 else
8040 store_expr (build1 (TREE_CODE (unary_op), type,
8041 make_tree (type, temp)),
8042 temp, 0);
8043 op1 = op0;
8045 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8046 comparison operator. If we have one of these cases, set the
8047 output to A, branch on A (cse will merge these two references),
8048 then set the output to FOO. */
8049 else if (temp
8050 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8051 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8052 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8053 TREE_OPERAND (exp, 1), 0)
8054 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8055 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8056 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8058 if (GET_CODE (temp) == REG
8059 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8060 temp = gen_reg_rtx (mode);
8061 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8062 jumpif (TREE_OPERAND (exp, 0), op0);
8064 start_cleanup_deferral ();
8065 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8066 op1 = op0;
8068 else if (temp
8069 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8070 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8071 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8072 TREE_OPERAND (exp, 2), 0)
8073 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8074 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8075 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8077 if (GET_CODE (temp) == REG
8078 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8079 temp = gen_reg_rtx (mode);
8080 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8081 jumpifnot (TREE_OPERAND (exp, 0), op0);
8083 start_cleanup_deferral ();
8084 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8085 op1 = op0;
8087 else
8089 op1 = gen_label_rtx ();
8090 jumpifnot (TREE_OPERAND (exp, 0), op0);
8092 start_cleanup_deferral ();
8094 /* One branch of the cond can be void, if it never returns. For
8095 example A ? throw : E */
8096 if (temp != 0
8097 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8098 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8099 else
8100 expand_expr (TREE_OPERAND (exp, 1),
8101 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8102 end_cleanup_deferral ();
8103 emit_queue ();
8104 emit_jump_insn (gen_jump (op1));
8105 emit_barrier ();
8106 emit_label (op0);
8107 start_cleanup_deferral ();
8108 if (temp != 0
8109 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8110 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8111 else
8112 expand_expr (TREE_OPERAND (exp, 2),
8113 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8116 end_cleanup_deferral ();
8118 emit_queue ();
8119 emit_label (op1);
8120 OK_DEFER_POP;
8122 return temp;
8125 case TARGET_EXPR:
8127 /* Something needs to be initialized, but we didn't know
8128 where that thing was when building the tree. For example,
8129 it could be the return value of a function, or a parameter
8130 to a function which lays down in the stack, or a temporary
8131 variable which must be passed by reference.
8133 We guarantee that the expression will either be constructed
8134 or copied into our original target. */
8136 tree slot = TREE_OPERAND (exp, 0);
8137 tree cleanups = NULL_TREE;
8138 tree exp1;
8140 if (TREE_CODE (slot) != VAR_DECL)
8141 abort ();
8143 if (! ignore)
8144 target = original_target;
8146 /* Set this here so that if we get a target that refers to a
8147 register variable that's already been used, put_reg_into_stack
8148 knows that it should fix up those uses. */
8149 TREE_USED (slot) = 1;
8151 if (target == 0)
8153 if (DECL_RTL (slot) != 0)
8155 target = DECL_RTL (slot);
8156 /* If we have already expanded the slot, so don't do
8157 it again. (mrs) */
8158 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8159 return target;
8161 else
8163 target = assign_temp (type, 2, 0, 1);
8164 /* All temp slots at this level must not conflict. */
8165 preserve_temp_slots (target);
8166 DECL_RTL (slot) = target;
8167 if (TREE_ADDRESSABLE (slot))
8168 put_var_into_stack (slot);
8170 /* Since SLOT is not known to the called function
8171 to belong to its stack frame, we must build an explicit
8172 cleanup. This case occurs when we must build up a reference
8173 to pass the reference as an argument. In this case,
8174 it is very likely that such a reference need not be
8175 built here. */
8177 if (TREE_OPERAND (exp, 2) == 0)
8178 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
8179 cleanups = TREE_OPERAND (exp, 2);
8182 else
8184 /* This case does occur, when expanding a parameter which
8185 needs to be constructed on the stack. The target
8186 is the actual stack address that we want to initialize.
8187 The function we call will perform the cleanup in this case. */
8189 /* If we have already assigned it space, use that space,
8190 not target that we were passed in, as our target
8191 parameter is only a hint. */
8192 if (DECL_RTL (slot) != 0)
8194 target = DECL_RTL (slot);
8195 /* If we have already expanded the slot, so don't do
8196 it again. (mrs) */
8197 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8198 return target;
8200 else
8202 DECL_RTL (slot) = target;
8203 /* If we must have an addressable slot, then make sure that
8204 the RTL that we just stored in slot is OK. */
8205 if (TREE_ADDRESSABLE (slot))
8206 put_var_into_stack (slot);
8210 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8211 /* Mark it as expanded. */
8212 TREE_OPERAND (exp, 1) = NULL_TREE;
8214 store_expr (exp1, target, 0);
8216 expand_decl_cleanup (NULL_TREE, cleanups);
8218 return target;
8221 case INIT_EXPR:
8223 tree lhs = TREE_OPERAND (exp, 0);
8224 tree rhs = TREE_OPERAND (exp, 1);
8225 tree noncopied_parts = 0;
8226 tree lhs_type = TREE_TYPE (lhs);
8228 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8229 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
8230 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
8231 TYPE_NONCOPIED_PARTS (lhs_type));
8232 while (noncopied_parts != 0)
8234 expand_assignment (TREE_VALUE (noncopied_parts),
8235 TREE_PURPOSE (noncopied_parts), 0, 0);
8236 noncopied_parts = TREE_CHAIN (noncopied_parts);
8238 return temp;
8241 case MODIFY_EXPR:
8243 /* If lhs is complex, expand calls in rhs before computing it.
8244 That's so we don't compute a pointer and save it over a call.
8245 If lhs is simple, compute it first so we can give it as a
8246 target if the rhs is just a call. This avoids an extra temp and copy
8247 and that prevents a partial-subsumption which makes bad code.
8248 Actually we could treat component_ref's of vars like vars. */
8250 tree lhs = TREE_OPERAND (exp, 0);
8251 tree rhs = TREE_OPERAND (exp, 1);
8252 tree noncopied_parts = 0;
8253 tree lhs_type = TREE_TYPE (lhs);
8255 temp = 0;
8257 if (TREE_CODE (lhs) != VAR_DECL
8258 && TREE_CODE (lhs) != RESULT_DECL
8259 && TREE_CODE (lhs) != PARM_DECL
8260 && ! (TREE_CODE (lhs) == INDIRECT_REF
8261 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
8263 /* Check for |= or &= of a bitfield of size one into another bitfield
8264 of size 1. In this case, (unless we need the result of the
8265 assignment) we can do this more efficiently with a
8266 test followed by an assignment, if necessary.
8268 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8269 things change so we do, this code should be enhanced to
8270 support it. */
8271 if (ignore
8272 && TREE_CODE (lhs) == COMPONENT_REF
8273 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8274 || TREE_CODE (rhs) == BIT_AND_EXPR)
8275 && TREE_OPERAND (rhs, 0) == lhs
8276 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8277 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8278 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8280 rtx label = gen_label_rtx ();
8282 do_jump (TREE_OPERAND (rhs, 1),
8283 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8284 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8285 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8286 (TREE_CODE (rhs) == BIT_IOR_EXPR
8287 ? integer_one_node
8288 : integer_zero_node)),
8289 0, 0);
8290 do_pending_stack_adjust ();
8291 emit_label (label);
8292 return const0_rtx;
8295 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
8296 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
8297 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
8298 TYPE_NONCOPIED_PARTS (lhs_type));
8300 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8301 while (noncopied_parts != 0)
8303 expand_assignment (TREE_PURPOSE (noncopied_parts),
8304 TREE_VALUE (noncopied_parts), 0, 0);
8305 noncopied_parts = TREE_CHAIN (noncopied_parts);
8307 return temp;
8310 case RETURN_EXPR:
8311 if (!TREE_OPERAND (exp, 0))
8312 expand_null_return ();
8313 else
8314 expand_return (TREE_OPERAND (exp, 0));
8315 return const0_rtx;
8317 case PREINCREMENT_EXPR:
8318 case PREDECREMENT_EXPR:
8319 return expand_increment (exp, 0, ignore);
8321 case POSTINCREMENT_EXPR:
8322 case POSTDECREMENT_EXPR:
8323 /* Faster to treat as pre-increment if result is not used. */
8324 return expand_increment (exp, ! ignore, ignore);
8326 case ADDR_EXPR:
8327 /* If nonzero, TEMP will be set to the address of something that might
8328 be a MEM corresponding to a stack slot. */
8329 temp = 0;
8331 /* Are we taking the address of a nested function? */
8332 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8333 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8334 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8335 && ! TREE_STATIC (exp))
8337 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8338 op0 = force_operand (op0, target);
8340 /* If we are taking the address of something erroneous, just
8341 return a zero. */
8342 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8343 return const0_rtx;
8344 else
8346 /* We make sure to pass const0_rtx down if we came in with
8347 ignore set, to avoid doing the cleanups twice for something. */
8348 op0 = expand_expr (TREE_OPERAND (exp, 0),
8349 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8350 (modifier == EXPAND_INITIALIZER
8351 ? modifier : EXPAND_CONST_ADDRESS));
8353 /* If we are going to ignore the result, OP0 will have been set
8354 to const0_rtx, so just return it. Don't get confused and
8355 think we are taking the address of the constant. */
8356 if (ignore)
8357 return op0;
8359 op0 = protect_from_queue (op0, 0);
8361 /* We would like the object in memory. If it is a constant, we can
8362 have it be statically allocated into memory. For a non-constant,
8363 we need to allocate some memory and store the value into it. */
8365 if (CONSTANT_P (op0))
8366 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8367 op0);
8368 else if (GET_CODE (op0) == MEM)
8370 mark_temp_addr_taken (op0);
8371 temp = XEXP (op0, 0);
8374 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8375 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8376 || GET_CODE (op0) == PARALLEL)
8378 /* If this object is in a register, it must be not
8379 be BLKmode. */
8380 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8381 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8383 mark_temp_addr_taken (memloc);
8384 if (GET_CODE (op0) == PARALLEL)
8385 /* Handle calls that pass values in multiple non-contiguous
8386 locations. The Irix 6 ABI has examples of this. */
8387 emit_group_store (memloc, op0,
8388 int_size_in_bytes (inner_type),
8389 TYPE_ALIGN (inner_type));
8390 else
8391 emit_move_insn (memloc, op0);
8392 op0 = memloc;
8395 if (GET_CODE (op0) != MEM)
8396 abort ();
8398 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8400 temp = XEXP (op0, 0);
8401 #ifdef POINTERS_EXTEND_UNSIGNED
8402 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8403 && mode == ptr_mode)
8404 temp = convert_memory_address (ptr_mode, temp);
8405 #endif
8406 return temp;
8409 op0 = force_operand (XEXP (op0, 0), target);
8412 if (flag_force_addr && GET_CODE (op0) != REG)
8413 op0 = force_reg (Pmode, op0);
8415 if (GET_CODE (op0) == REG
8416 && ! REG_USERVAR_P (op0))
8417 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8419 /* If we might have had a temp slot, add an equivalent address
8420 for it. */
8421 if (temp != 0)
8422 update_temp_slot_address (temp, op0);
8424 #ifdef POINTERS_EXTEND_UNSIGNED
8425 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8426 && mode == ptr_mode)
8427 op0 = convert_memory_address (ptr_mode, op0);
8428 #endif
8430 return op0;
8432 case ENTRY_VALUE_EXPR:
8433 abort ();
8435 /* COMPLEX type for Extended Pascal & Fortran */
8436 case COMPLEX_EXPR:
8438 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8439 rtx insns;
8441 /* Get the rtx code of the operands. */
8442 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8443 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8445 if (! target)
8446 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8448 start_sequence ();
8450 /* Move the real (op0) and imaginary (op1) parts to their location. */
8451 emit_move_insn (gen_realpart (mode, target), op0);
8452 emit_move_insn (gen_imagpart (mode, target), op1);
8454 insns = get_insns ();
8455 end_sequence ();
8457 /* Complex construction should appear as a single unit. */
8458 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8459 each with a separate pseudo as destination.
8460 It's not correct for flow to treat them as a unit. */
8461 if (GET_CODE (target) != CONCAT)
8462 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8463 else
8464 emit_insns (insns);
8466 return target;
8469 case REALPART_EXPR:
8470 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8471 return gen_realpart (mode, op0);
8473 case IMAGPART_EXPR:
8474 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8475 return gen_imagpart (mode, op0);
8477 case CONJ_EXPR:
8479 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8480 rtx imag_t;
8481 rtx insns;
8483 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8485 if (! target)
8486 target = gen_reg_rtx (mode);
8488 start_sequence ();
8490 /* Store the realpart and the negated imagpart to target. */
8491 emit_move_insn (gen_realpart (partmode, target),
8492 gen_realpart (partmode, op0));
8494 imag_t = gen_imagpart (partmode, target);
8495 temp = expand_unop (partmode,
8496 ! unsignedp && flag_trapv
8497 && (GET_MODE_CLASS(partmode) == MODE_INT)
8498 ? negv_optab : neg_optab,
8499 gen_imagpart (partmode, op0), imag_t, 0);
8500 if (temp != imag_t)
8501 emit_move_insn (imag_t, temp);
8503 insns = get_insns ();
8504 end_sequence ();
8506 /* Conjugate should appear as a single unit
8507 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8508 each with a separate pseudo as destination.
8509 It's not correct for flow to treat them as a unit. */
8510 if (GET_CODE (target) != CONCAT)
8511 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8512 else
8513 emit_insns (insns);
8515 return target;
8518 case TRY_CATCH_EXPR:
8520 tree handler = TREE_OPERAND (exp, 1);
8522 expand_eh_region_start ();
8524 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8526 expand_eh_region_end (handler);
8528 return op0;
8531 case TRY_FINALLY_EXPR:
8533 tree try_block = TREE_OPERAND (exp, 0);
8534 tree finally_block = TREE_OPERAND (exp, 1);
8535 rtx finally_label = gen_label_rtx ();
8536 rtx done_label = gen_label_rtx ();
8537 rtx return_link = gen_reg_rtx (Pmode);
8538 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8539 (tree) finally_label, (tree) return_link);
8540 TREE_SIDE_EFFECTS (cleanup) = 1;
8542 /* Start a new binding layer that will keep track of all cleanup
8543 actions to be performed. */
8544 expand_start_bindings (2);
8546 target_temp_slot_level = temp_slot_level;
8548 expand_decl_cleanup (NULL_TREE, cleanup);
8549 op0 = expand_expr (try_block, target, tmode, modifier);
8551 preserve_temp_slots (op0);
8552 expand_end_bindings (NULL_TREE, 0, 0);
8553 emit_jump (done_label);
8554 emit_label (finally_label);
8555 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8556 emit_indirect_jump (return_link);
8557 emit_label (done_label);
8558 return op0;
8561 case GOTO_SUBROUTINE_EXPR:
8563 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8564 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8565 rtx return_address = gen_label_rtx ();
8566 emit_move_insn (return_link,
8567 gen_rtx_LABEL_REF (Pmode, return_address));
8568 emit_jump (subr);
8569 emit_label (return_address);
8570 return const0_rtx;
8573 case POPDCC_EXPR:
8575 rtx dcc = get_dynamic_cleanup_chain ();
8576 emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc)));
8577 return const0_rtx;
8580 case POPDHC_EXPR:
8582 rtx dhc = get_dynamic_handler_chain ();
8583 emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc)));
8584 return const0_rtx;
8587 case VA_ARG_EXPR:
8588 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8590 default:
8591 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8594 /* Here to do an ordinary binary operator, generating an instruction
8595 from the optab already placed in `this_optab'. */
8596 binop:
8597 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8598 subtarget = 0;
8599 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8600 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8601 binop2:
8602 temp = expand_binop (mode, this_optab, op0, op1, target,
8603 unsignedp, OPTAB_LIB_WIDEN);
8604 if (temp == 0)
8605 abort ();
8606 return temp;
8609 /* Similar to expand_expr, except that we don't specify a target, target
8610 mode, or modifier and we return the alignment of the inner type. This is
8611 used in cases where it is not necessary to align the result to the
8612 alignment of its type as long as we know the alignment of the result, for
8613 example for comparisons of BLKmode values. */
8615 static rtx
8616 expand_expr_unaligned (exp, palign)
8617 register tree exp;
8618 unsigned int *palign;
8620 register rtx op0;
8621 tree type = TREE_TYPE (exp);
8622 register enum machine_mode mode = TYPE_MODE (type);
8624 /* Default the alignment we return to that of the type. */
8625 *palign = TYPE_ALIGN (type);
8627 /* The only cases in which we do anything special is if the resulting mode
8628 is BLKmode. */
8629 if (mode != BLKmode)
8630 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8632 switch (TREE_CODE (exp))
8634 case CONVERT_EXPR:
8635 case NOP_EXPR:
8636 case NON_LVALUE_EXPR:
8637 /* Conversions between BLKmode values don't change the underlying
8638 alignment or value. */
8639 if (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == BLKmode)
8640 return expand_expr_unaligned (TREE_OPERAND (exp, 0), palign);
8641 break;
8643 case ARRAY_REF:
8644 /* Much of the code for this case is copied directly from expand_expr.
8645 We need to duplicate it here because we will do something different
8646 in the fall-through case, so we need to handle the same exceptions
8647 it does. */
8649 tree array = TREE_OPERAND (exp, 0);
8650 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
8651 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
8652 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
8653 HOST_WIDE_INT i;
8655 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
8656 abort ();
8658 /* Optimize the special-case of a zero lower bound.
8660 We convert the low_bound to sizetype to avoid some problems
8661 with constant folding. (E.g. suppose the lower bound is 1,
8662 and its mode is QI. Without the conversion, (ARRAY
8663 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8664 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
8666 if (! integer_zerop (low_bound))
8667 index = size_diffop (index, convert (sizetype, low_bound));
8669 /* If this is a constant index into a constant array,
8670 just get the value from the array. Handle both the cases when
8671 we have an explicit constructor and when our operand is a variable
8672 that was declared const. */
8674 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
8675 && host_integerp (index, 0)
8676 && 0 > compare_tree_int (index,
8677 list_length (CONSTRUCTOR_ELTS
8678 (TREE_OPERAND (exp, 0)))))
8680 tree elem;
8682 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
8683 i = tree_low_cst (index, 0);
8684 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
8687 if (elem)
8688 return expand_expr_unaligned (fold (TREE_VALUE (elem)), palign);
8691 else if (optimize >= 1
8692 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8693 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8694 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
8696 if (TREE_CODE (index) == INTEGER_CST)
8698 tree init = DECL_INITIAL (array);
8700 if (TREE_CODE (init) == CONSTRUCTOR)
8702 tree elem;
8704 for (elem = CONSTRUCTOR_ELTS (init);
8705 ! tree_int_cst_equal (TREE_PURPOSE (elem), index);
8706 elem = TREE_CHAIN (elem))
8709 if (elem)
8710 return expand_expr_unaligned (fold (TREE_VALUE (elem)),
8711 palign);
8716 /* Fall through. */
8718 case COMPONENT_REF:
8719 case BIT_FIELD_REF:
8720 /* If the operand is a CONSTRUCTOR, we can just extract the
8721 appropriate field if it is present. Don't do this if we have
8722 already written the data since we want to refer to that copy
8723 and varasm.c assumes that's what we'll do. */
8724 if (TREE_CODE (exp) != ARRAY_REF
8725 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8726 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
8728 tree elt;
8730 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
8731 elt = TREE_CHAIN (elt))
8732 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
8733 /* Note that unlike the case in expand_expr, we know this is
8734 BLKmode and hence not an integer. */
8735 return expand_expr_unaligned (TREE_VALUE (elt), palign);
8739 enum machine_mode mode1;
8740 HOST_WIDE_INT bitsize, bitpos;
8741 tree offset;
8742 int volatilep = 0;
8743 unsigned int alignment;
8744 int unsignedp;
8745 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
8746 &mode1, &unsignedp, &volatilep,
8747 &alignment);
8749 /* If we got back the original object, something is wrong. Perhaps
8750 we are evaluating an expression too early. In any event, don't
8751 infinitely recurse. */
8752 if (tem == exp)
8753 abort ();
8755 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8757 /* If this is a constant, put it into a register if it is a
8758 legitimate constant and OFFSET is 0 and memory if it isn't. */
8759 if (CONSTANT_P (op0))
8761 enum machine_mode inner_mode = TYPE_MODE (TREE_TYPE (tem));
8763 if (inner_mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
8764 && offset == 0)
8765 op0 = force_reg (inner_mode, op0);
8766 else
8767 op0 = validize_mem (force_const_mem (inner_mode, op0));
8770 if (offset != 0)
8772 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
8774 /* If this object is in a register, put it into memory.
8775 This case can't occur in C, but can in Ada if we have
8776 unchecked conversion of an expression from a scalar type to
8777 an array or record type. */
8778 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8779 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8781 rtx memloc = assign_temp (TREE_TYPE (tem), 1, 1, 1);
8783 mark_temp_addr_taken (memloc);
8784 emit_move_insn (memloc, op0);
8785 op0 = memloc;
8788 if (GET_CODE (op0) != MEM)
8789 abort ();
8791 if (GET_MODE (offset_rtx) != ptr_mode)
8793 #ifdef POINTERS_EXTEND_UNSIGNED
8794 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
8795 #else
8796 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
8797 #endif
8800 op0 = change_address (op0, VOIDmode,
8801 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
8802 force_reg (ptr_mode,
8803 offset_rtx)));
8806 /* Don't forget about volatility even if this is a bitfield. */
8807 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
8809 op0 = copy_rtx (op0);
8810 MEM_VOLATILE_P (op0) = 1;
8813 /* Check the access. */
8814 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
8816 rtx to;
8817 int size;
8819 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
8820 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
8822 /* Check the access right of the pointer. */
8823 in_check_memory_usage = 1;
8824 if (size > BITS_PER_UNIT)
8825 emit_library_call (chkr_check_addr_libfunc,
8826 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
8827 to, ptr_mode, GEN_INT (size / BITS_PER_UNIT),
8828 TYPE_MODE (sizetype),
8829 GEN_INT (MEMORY_USE_RO),
8830 TYPE_MODE (integer_type_node));
8831 in_check_memory_usage = 0;
8834 /* In cases where an aligned union has an unaligned object
8835 as a field, we might be extracting a BLKmode value from
8836 an integer-mode (e.g., SImode) object. Handle this case
8837 by doing the extract into an object as wide as the field
8838 (which we know to be the width of a basic mode), then
8839 storing into memory, and changing the mode to BLKmode.
8840 If we ultimately want the address (EXPAND_CONST_ADDRESS or
8841 EXPAND_INITIALIZER), then we must not copy to a temporary. */
8842 if (mode1 == VOIDmode
8843 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8844 || (SLOW_UNALIGNED_ACCESS (mode1, alignment)
8845 && (TYPE_ALIGN (type) > alignment
8846 || bitpos % TYPE_ALIGN (type) != 0)))
8848 enum machine_mode ext_mode = mode_for_size (bitsize, MODE_INT, 1);
8850 if (ext_mode == BLKmode)
8852 /* In this case, BITPOS must start at a byte boundary. */
8853 if (GET_CODE (op0) != MEM
8854 || bitpos % BITS_PER_UNIT != 0)
8855 abort ();
8857 op0 = change_address (op0, VOIDmode,
8858 plus_constant (XEXP (op0, 0),
8859 bitpos / BITS_PER_UNIT));
8861 else
8863 rtx new = assign_stack_temp (ext_mode,
8864 bitsize / BITS_PER_UNIT, 0);
8866 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
8867 unsignedp, NULL_RTX, ext_mode,
8868 ext_mode, alignment,
8869 int_size_in_bytes (TREE_TYPE (tem)));
8871 /* If the result is a record type and BITSIZE is narrower than
8872 the mode of OP0, an integral mode, and this is a big endian
8873 machine, we must put the field into the high-order bits. */
8874 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
8875 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
8876 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
8877 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
8878 size_int (GET_MODE_BITSIZE
8879 (GET_MODE (op0))
8880 - bitsize),
8881 op0, 1);
8883 emit_move_insn (new, op0);
8884 op0 = copy_rtx (new);
8885 PUT_MODE (op0, BLKmode);
8888 else
8889 /* Get a reference to just this component. */
8890 op0 = change_address (op0, mode1,
8891 plus_constant (XEXP (op0, 0),
8892 (bitpos / BITS_PER_UNIT)));
8894 MEM_ALIAS_SET (op0) = get_alias_set (exp);
8896 /* Adjust the alignment in case the bit position is not
8897 a multiple of the alignment of the inner object. */
8898 while (bitpos % alignment != 0)
8899 alignment >>= 1;
8901 if (GET_CODE (XEXP (op0, 0)) == REG)
8902 mark_reg_pointer (XEXP (op0, 0), alignment);
8904 MEM_IN_STRUCT_P (op0) = 1;
8905 MEM_VOLATILE_P (op0) |= volatilep;
8907 *palign = alignment;
8908 return op0;
8911 default:
8912 break;
8916 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8919 /* Return the tree node if a ARG corresponds to a string constant or zero
8920 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
8921 in bytes within the string that ARG is accessing. The type of the
8922 offset will be `sizetype'. */
8924 tree
8925 string_constant (arg, ptr_offset)
8926 tree arg;
8927 tree *ptr_offset;
8929 STRIP_NOPS (arg);
8931 if (TREE_CODE (arg) == ADDR_EXPR
8932 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8934 *ptr_offset = size_zero_node;
8935 return TREE_OPERAND (arg, 0);
8937 else if (TREE_CODE (arg) == PLUS_EXPR)
8939 tree arg0 = TREE_OPERAND (arg, 0);
8940 tree arg1 = TREE_OPERAND (arg, 1);
8942 STRIP_NOPS (arg0);
8943 STRIP_NOPS (arg1);
8945 if (TREE_CODE (arg0) == ADDR_EXPR
8946 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
8948 *ptr_offset = convert (sizetype, arg1);
8949 return TREE_OPERAND (arg0, 0);
8951 else if (TREE_CODE (arg1) == ADDR_EXPR
8952 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
8954 *ptr_offset = convert (sizetype, arg0);
8955 return TREE_OPERAND (arg1, 0);
8959 return 0;
8962 /* Expand code for a post- or pre- increment or decrement
8963 and return the RTX for the result.
8964 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
8966 static rtx
8967 expand_increment (exp, post, ignore)
8968 register tree exp;
8969 int post, ignore;
8971 register rtx op0, op1;
8972 register rtx temp, value;
8973 register tree incremented = TREE_OPERAND (exp, 0);
8974 optab this_optab = add_optab;
8975 int icode;
8976 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
8977 int op0_is_copy = 0;
8978 int single_insn = 0;
8979 /* 1 means we can't store into OP0 directly,
8980 because it is a subreg narrower than a word,
8981 and we don't dare clobber the rest of the word. */
8982 int bad_subreg = 0;
8984 /* Stabilize any component ref that might need to be
8985 evaluated more than once below. */
8986 if (!post
8987 || TREE_CODE (incremented) == BIT_FIELD_REF
8988 || (TREE_CODE (incremented) == COMPONENT_REF
8989 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
8990 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
8991 incremented = stabilize_reference (incremented);
8992 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
8993 ones into save exprs so that they don't accidentally get evaluated
8994 more than once by the code below. */
8995 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
8996 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
8997 incremented = save_expr (incremented);
8999 /* Compute the operands as RTX.
9000 Note whether OP0 is the actual lvalue or a copy of it:
9001 I believe it is a copy iff it is a register or subreg
9002 and insns were generated in computing it. */
9004 temp = get_last_insn ();
9005 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
9007 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9008 in place but instead must do sign- or zero-extension during assignment,
9009 so we copy it into a new register and let the code below use it as
9010 a copy.
9012 Note that we can safely modify this SUBREG since it is know not to be
9013 shared (it was made by the expand_expr call above). */
9015 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9017 if (post)
9018 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9019 else
9020 bad_subreg = 1;
9022 else if (GET_CODE (op0) == SUBREG
9023 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9025 /* We cannot increment this SUBREG in place. If we are
9026 post-incrementing, get a copy of the old value. Otherwise,
9027 just mark that we cannot increment in place. */
9028 if (post)
9029 op0 = copy_to_reg (op0);
9030 else
9031 bad_subreg = 1;
9034 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9035 && temp != get_last_insn ());
9036 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
9037 EXPAND_MEMORY_USE_BAD);
9039 /* Decide whether incrementing or decrementing. */
9040 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9041 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9042 this_optab = sub_optab;
9044 /* Convert decrement by a constant into a negative increment. */
9045 if (this_optab == sub_optab
9046 && GET_CODE (op1) == CONST_INT)
9048 op1 = GEN_INT (-INTVAL (op1));
9049 this_optab = add_optab;
9052 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9053 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9055 /* For a preincrement, see if we can do this with a single instruction. */
9056 if (!post)
9058 icode = (int) this_optab->handlers[(int) mode].insn_code;
9059 if (icode != (int) CODE_FOR_nothing
9060 /* Make sure that OP0 is valid for operands 0 and 1
9061 of the insn we want to queue. */
9062 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9063 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9064 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9065 single_insn = 1;
9068 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9069 then we cannot just increment OP0. We must therefore contrive to
9070 increment the original value. Then, for postincrement, we can return
9071 OP0 since it is a copy of the old value. For preincrement, expand here
9072 unless we can do it with a single insn.
9074 Likewise if storing directly into OP0 would clobber high bits
9075 we need to preserve (bad_subreg). */
9076 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9078 /* This is the easiest way to increment the value wherever it is.
9079 Problems with multiple evaluation of INCREMENTED are prevented
9080 because either (1) it is a component_ref or preincrement,
9081 in which case it was stabilized above, or (2) it is an array_ref
9082 with constant index in an array in a register, which is
9083 safe to reevaluate. */
9084 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9085 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9086 ? MINUS_EXPR : PLUS_EXPR),
9087 TREE_TYPE (exp),
9088 incremented,
9089 TREE_OPERAND (exp, 1));
9091 while (TREE_CODE (incremented) == NOP_EXPR
9092 || TREE_CODE (incremented) == CONVERT_EXPR)
9094 newexp = convert (TREE_TYPE (incremented), newexp);
9095 incremented = TREE_OPERAND (incremented, 0);
9098 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9099 return post ? op0 : temp;
9102 if (post)
9104 /* We have a true reference to the value in OP0.
9105 If there is an insn to add or subtract in this mode, queue it.
9106 Queueing the increment insn avoids the register shuffling
9107 that often results if we must increment now and first save
9108 the old value for subsequent use. */
9110 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9111 op0 = stabilize (op0);
9112 #endif
9114 icode = (int) this_optab->handlers[(int) mode].insn_code;
9115 if (icode != (int) CODE_FOR_nothing
9116 /* Make sure that OP0 is valid for operands 0 and 1
9117 of the insn we want to queue. */
9118 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9119 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9121 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9122 op1 = force_reg (mode, op1);
9124 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9126 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9128 rtx addr = (general_operand (XEXP (op0, 0), mode)
9129 ? force_reg (Pmode, XEXP (op0, 0))
9130 : copy_to_reg (XEXP (op0, 0)));
9131 rtx temp, result;
9133 op0 = change_address (op0, VOIDmode, addr);
9134 temp = force_reg (GET_MODE (op0), op0);
9135 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9136 op1 = force_reg (mode, op1);
9138 /* The increment queue is LIFO, thus we have to `queue'
9139 the instructions in reverse order. */
9140 enqueue_insn (op0, gen_move_insn (op0, temp));
9141 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9142 return result;
9146 /* Preincrement, or we can't increment with one simple insn. */
9147 if (post)
9148 /* Save a copy of the value before inc or dec, to return it later. */
9149 temp = value = copy_to_reg (op0);
9150 else
9151 /* Arrange to return the incremented value. */
9152 /* Copy the rtx because expand_binop will protect from the queue,
9153 and the results of that would be invalid for us to return
9154 if our caller does emit_queue before using our result. */
9155 temp = copy_rtx (value = op0);
9157 /* Increment however we can. */
9158 op1 = expand_binop (mode, this_optab, value, op1,
9159 current_function_check_memory_usage ? NULL_RTX : op0,
9160 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9161 /* Make sure the value is stored into OP0. */
9162 if (op1 != op0)
9163 emit_move_insn (op0, op1);
9165 return temp;
9168 /* At the start of a function, record that we have no previously-pushed
9169 arguments waiting to be popped. */
9171 void
9172 init_pending_stack_adjust ()
9174 pending_stack_adjust = 0;
9177 /* When exiting from function, if safe, clear out any pending stack adjust
9178 so the adjustment won't get done.
9180 Note, if the current function calls alloca, then it must have a
9181 frame pointer regardless of the value of flag_omit_frame_pointer. */
9183 void
9184 clear_pending_stack_adjust ()
9186 #ifdef EXIT_IGNORE_STACK
9187 if (optimize > 0
9188 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9189 && EXIT_IGNORE_STACK
9190 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9191 && ! flag_inline_functions)
9193 stack_pointer_delta -= pending_stack_adjust,
9194 pending_stack_adjust = 0;
9196 #endif
9199 /* Pop any previously-pushed arguments that have not been popped yet. */
9201 void
9202 do_pending_stack_adjust ()
9204 if (inhibit_defer_pop == 0)
9206 if (pending_stack_adjust != 0)
9207 adjust_stack (GEN_INT (pending_stack_adjust));
9208 pending_stack_adjust = 0;
9212 /* Expand conditional expressions. */
9214 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9215 LABEL is an rtx of code CODE_LABEL, in this function and all the
9216 functions here. */
9218 void
9219 jumpifnot (exp, label)
9220 tree exp;
9221 rtx label;
9223 do_jump (exp, label, NULL_RTX);
9226 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9228 void
9229 jumpif (exp, label)
9230 tree exp;
9231 rtx label;
9233 do_jump (exp, NULL_RTX, label);
9236 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9237 the result is zero, or IF_TRUE_LABEL if the result is one.
9238 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9239 meaning fall through in that case.
9241 do_jump always does any pending stack adjust except when it does not
9242 actually perform a jump. An example where there is no jump
9243 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9245 This function is responsible for optimizing cases such as
9246 &&, || and comparison operators in EXP. */
9248 void
9249 do_jump (exp, if_false_label, if_true_label)
9250 tree exp;
9251 rtx if_false_label, if_true_label;
9253 register enum tree_code code = TREE_CODE (exp);
9254 /* Some cases need to create a label to jump to
9255 in order to properly fall through.
9256 These cases set DROP_THROUGH_LABEL nonzero. */
9257 rtx drop_through_label = 0;
9258 rtx temp;
9259 int i;
9260 tree type;
9261 enum machine_mode mode;
9263 #ifdef MAX_INTEGER_COMPUTATION_MODE
9264 check_max_integer_computation_mode (exp);
9265 #endif
9267 emit_queue ();
9269 switch (code)
9271 case ERROR_MARK:
9272 break;
9274 case INTEGER_CST:
9275 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9276 if (temp)
9277 emit_jump (temp);
9278 break;
9280 #if 0
9281 /* This is not true with #pragma weak */
9282 case ADDR_EXPR:
9283 /* The address of something can never be zero. */
9284 if (if_true_label)
9285 emit_jump (if_true_label);
9286 break;
9287 #endif
9289 case NOP_EXPR:
9290 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9291 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9292 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
9293 goto normal;
9294 case CONVERT_EXPR:
9295 /* If we are narrowing the operand, we have to do the compare in the
9296 narrower mode. */
9297 if ((TYPE_PRECISION (TREE_TYPE (exp))
9298 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9299 goto normal;
9300 case NON_LVALUE_EXPR:
9301 case REFERENCE_EXPR:
9302 case ABS_EXPR:
9303 case NEGATE_EXPR:
9304 case LROTATE_EXPR:
9305 case RROTATE_EXPR:
9306 /* These cannot change zero->non-zero or vice versa. */
9307 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9308 break;
9310 case WITH_RECORD_EXPR:
9311 /* Put the object on the placeholder list, recurse through our first
9312 operand, and pop the list. */
9313 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9314 placeholder_list);
9315 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9316 placeholder_list = TREE_CHAIN (placeholder_list);
9317 break;
9319 #if 0
9320 /* This is never less insns than evaluating the PLUS_EXPR followed by
9321 a test and can be longer if the test is eliminated. */
9322 case PLUS_EXPR:
9323 /* Reduce to minus. */
9324 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9325 TREE_OPERAND (exp, 0),
9326 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9327 TREE_OPERAND (exp, 1))));
9328 /* Process as MINUS. */
9329 #endif
9331 case MINUS_EXPR:
9332 /* Non-zero iff operands of minus differ. */
9333 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9334 TREE_OPERAND (exp, 0),
9335 TREE_OPERAND (exp, 1)),
9336 NE, NE, if_false_label, if_true_label);
9337 break;
9339 case BIT_AND_EXPR:
9340 /* If we are AND'ing with a small constant, do this comparison in the
9341 smallest type that fits. If the machine doesn't have comparisons
9342 that small, it will be converted back to the wider comparison.
9343 This helps if we are testing the sign bit of a narrower object.
9344 combine can't do this for us because it can't know whether a
9345 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9347 if (! SLOW_BYTE_ACCESS
9348 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9349 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9350 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9351 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9352 && (type = type_for_mode (mode, 1)) != 0
9353 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9354 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9355 != CODE_FOR_nothing))
9357 do_jump (convert (type, exp), if_false_label, if_true_label);
9358 break;
9360 goto normal;
9362 case TRUTH_NOT_EXPR:
9363 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9364 break;
9366 case TRUTH_ANDIF_EXPR:
9367 if (if_false_label == 0)
9368 if_false_label = drop_through_label = gen_label_rtx ();
9369 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9370 start_cleanup_deferral ();
9371 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9372 end_cleanup_deferral ();
9373 break;
9375 case TRUTH_ORIF_EXPR:
9376 if (if_true_label == 0)
9377 if_true_label = drop_through_label = gen_label_rtx ();
9378 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9379 start_cleanup_deferral ();
9380 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9381 end_cleanup_deferral ();
9382 break;
9384 case COMPOUND_EXPR:
9385 push_temp_slots ();
9386 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9387 preserve_temp_slots (NULL_RTX);
9388 free_temp_slots ();
9389 pop_temp_slots ();
9390 emit_queue ();
9391 do_pending_stack_adjust ();
9392 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9393 break;
9395 case COMPONENT_REF:
9396 case BIT_FIELD_REF:
9397 case ARRAY_REF:
9399 HOST_WIDE_INT bitsize, bitpos;
9400 int unsignedp;
9401 enum machine_mode mode;
9402 tree type;
9403 tree offset;
9404 int volatilep = 0;
9405 unsigned int alignment;
9407 /* Get description of this reference. We don't actually care
9408 about the underlying object here. */
9409 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9410 &unsignedp, &volatilep, &alignment);
9412 type = type_for_size (bitsize, unsignedp);
9413 if (! SLOW_BYTE_ACCESS
9414 && type != 0 && bitsize >= 0
9415 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9416 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9417 != CODE_FOR_nothing))
9419 do_jump (convert (type, exp), if_false_label, if_true_label);
9420 break;
9422 goto normal;
9425 case COND_EXPR:
9426 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9427 if (integer_onep (TREE_OPERAND (exp, 1))
9428 && integer_zerop (TREE_OPERAND (exp, 2)))
9429 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9431 else if (integer_zerop (TREE_OPERAND (exp, 1))
9432 && integer_onep (TREE_OPERAND (exp, 2)))
9433 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9435 else
9437 register rtx label1 = gen_label_rtx ();
9438 drop_through_label = gen_label_rtx ();
9440 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9442 start_cleanup_deferral ();
9443 /* Now the THEN-expression. */
9444 do_jump (TREE_OPERAND (exp, 1),
9445 if_false_label ? if_false_label : drop_through_label,
9446 if_true_label ? if_true_label : drop_through_label);
9447 /* In case the do_jump just above never jumps. */
9448 do_pending_stack_adjust ();
9449 emit_label (label1);
9451 /* Now the ELSE-expression. */
9452 do_jump (TREE_OPERAND (exp, 2),
9453 if_false_label ? if_false_label : drop_through_label,
9454 if_true_label ? if_true_label : drop_through_label);
9455 end_cleanup_deferral ();
9457 break;
9459 case EQ_EXPR:
9461 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9463 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9464 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9466 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9467 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9468 do_jump
9469 (fold
9470 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9471 fold (build (EQ_EXPR, TREE_TYPE (exp),
9472 fold (build1 (REALPART_EXPR,
9473 TREE_TYPE (inner_type),
9474 exp0)),
9475 fold (build1 (REALPART_EXPR,
9476 TREE_TYPE (inner_type),
9477 exp1)))),
9478 fold (build (EQ_EXPR, TREE_TYPE (exp),
9479 fold (build1 (IMAGPART_EXPR,
9480 TREE_TYPE (inner_type),
9481 exp0)),
9482 fold (build1 (IMAGPART_EXPR,
9483 TREE_TYPE (inner_type),
9484 exp1)))))),
9485 if_false_label, if_true_label);
9488 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9489 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9491 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9492 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9493 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9494 else
9495 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9496 break;
9499 case NE_EXPR:
9501 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9503 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9504 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9506 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9507 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9508 do_jump
9509 (fold
9510 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9511 fold (build (NE_EXPR, TREE_TYPE (exp),
9512 fold (build1 (REALPART_EXPR,
9513 TREE_TYPE (inner_type),
9514 exp0)),
9515 fold (build1 (REALPART_EXPR,
9516 TREE_TYPE (inner_type),
9517 exp1)))),
9518 fold (build (NE_EXPR, TREE_TYPE (exp),
9519 fold (build1 (IMAGPART_EXPR,
9520 TREE_TYPE (inner_type),
9521 exp0)),
9522 fold (build1 (IMAGPART_EXPR,
9523 TREE_TYPE (inner_type),
9524 exp1)))))),
9525 if_false_label, if_true_label);
9528 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9529 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9531 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9532 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9533 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9534 else
9535 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9536 break;
9539 case LT_EXPR:
9540 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9541 if (GET_MODE_CLASS (mode) == MODE_INT
9542 && ! can_compare_p (LT, mode, ccp_jump))
9543 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9544 else
9545 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9546 break;
9548 case LE_EXPR:
9549 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9550 if (GET_MODE_CLASS (mode) == MODE_INT
9551 && ! can_compare_p (LE, mode, ccp_jump))
9552 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9553 else
9554 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9555 break;
9557 case GT_EXPR:
9558 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9559 if (GET_MODE_CLASS (mode) == MODE_INT
9560 && ! can_compare_p (GT, mode, ccp_jump))
9561 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9562 else
9563 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9564 break;
9566 case GE_EXPR:
9567 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9568 if (GET_MODE_CLASS (mode) == MODE_INT
9569 && ! can_compare_p (GE, mode, ccp_jump))
9570 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9571 else
9572 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9573 break;
9575 case UNORDERED_EXPR:
9576 case ORDERED_EXPR:
9578 enum rtx_code cmp, rcmp;
9579 int do_rev;
9581 if (code == UNORDERED_EXPR)
9582 cmp = UNORDERED, rcmp = ORDERED;
9583 else
9584 cmp = ORDERED, rcmp = UNORDERED;
9585 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9587 do_rev = 0;
9588 if (! can_compare_p (cmp, mode, ccp_jump)
9589 && (can_compare_p (rcmp, mode, ccp_jump)
9590 /* If the target doesn't provide either UNORDERED or ORDERED
9591 comparisons, canonicalize on UNORDERED for the library. */
9592 || rcmp == UNORDERED))
9593 do_rev = 1;
9595 if (! do_rev)
9596 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9597 else
9598 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9600 break;
9603 enum rtx_code rcode1;
9604 enum tree_code tcode2;
9606 case UNLT_EXPR:
9607 rcode1 = UNLT;
9608 tcode2 = LT_EXPR;
9609 goto unordered_bcc;
9610 case UNLE_EXPR:
9611 rcode1 = UNLE;
9612 tcode2 = LE_EXPR;
9613 goto unordered_bcc;
9614 case UNGT_EXPR:
9615 rcode1 = UNGT;
9616 tcode2 = GT_EXPR;
9617 goto unordered_bcc;
9618 case UNGE_EXPR:
9619 rcode1 = UNGE;
9620 tcode2 = GE_EXPR;
9621 goto unordered_bcc;
9622 case UNEQ_EXPR:
9623 rcode1 = UNEQ;
9624 tcode2 = EQ_EXPR;
9625 goto unordered_bcc;
9627 unordered_bcc:
9628 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9629 if (can_compare_p (rcode1, mode, ccp_jump))
9630 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9631 if_true_label);
9632 else
9634 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9635 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9636 tree cmp0, cmp1;
9638 /* If the target doesn't support combined unordered
9639 compares, decompose into UNORDERED + comparison. */
9640 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9641 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9642 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9643 do_jump (exp, if_false_label, if_true_label);
9646 break;
9648 default:
9649 normal:
9650 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9651 #if 0
9652 /* This is not needed any more and causes poor code since it causes
9653 comparisons and tests from non-SI objects to have different code
9654 sequences. */
9655 /* Copy to register to avoid generating bad insns by cse
9656 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9657 if (!cse_not_expected && GET_CODE (temp) == MEM)
9658 temp = copy_to_reg (temp);
9659 #endif
9660 do_pending_stack_adjust ();
9661 /* Do any postincrements in the expression that was tested. */
9662 emit_queue ();
9664 if (GET_CODE (temp) == CONST_INT || GET_CODE (temp) == LABEL_REF)
9666 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9667 if (target)
9668 emit_jump (target);
9670 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9671 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9672 /* Note swapping the labels gives us not-equal. */
9673 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9674 else if (GET_MODE (temp) != VOIDmode)
9675 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9676 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9677 GET_MODE (temp), NULL_RTX, 0,
9678 if_false_label, if_true_label);
9679 else
9680 abort ();
9683 if (drop_through_label)
9685 /* If do_jump produces code that might be jumped around,
9686 do any stack adjusts from that code, before the place
9687 where control merges in. */
9688 do_pending_stack_adjust ();
9689 emit_label (drop_through_label);
9693 /* Given a comparison expression EXP for values too wide to be compared
9694 with one insn, test the comparison and jump to the appropriate label.
9695 The code of EXP is ignored; we always test GT if SWAP is 0,
9696 and LT if SWAP is 1. */
9698 static void
9699 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9700 tree exp;
9701 int swap;
9702 rtx if_false_label, if_true_label;
9704 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9705 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9706 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9707 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9709 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9712 /* Compare OP0 with OP1, word at a time, in mode MODE.
9713 UNSIGNEDP says to do unsigned comparison.
9714 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9716 void
9717 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9718 enum machine_mode mode;
9719 int unsignedp;
9720 rtx op0, op1;
9721 rtx if_false_label, if_true_label;
9723 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9724 rtx drop_through_label = 0;
9725 int i;
9727 if (! if_true_label || ! if_false_label)
9728 drop_through_label = gen_label_rtx ();
9729 if (! if_true_label)
9730 if_true_label = drop_through_label;
9731 if (! if_false_label)
9732 if_false_label = drop_through_label;
9734 /* Compare a word at a time, high order first. */
9735 for (i = 0; i < nwords; i++)
9737 rtx op0_word, op1_word;
9739 if (WORDS_BIG_ENDIAN)
9741 op0_word = operand_subword_force (op0, i, mode);
9742 op1_word = operand_subword_force (op1, i, mode);
9744 else
9746 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9747 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9750 /* All but high-order word must be compared as unsigned. */
9751 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9752 (unsignedp || i > 0), word_mode, NULL_RTX, 0,
9753 NULL_RTX, if_true_label);
9755 /* Consider lower words only if these are equal. */
9756 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9757 NULL_RTX, 0, NULL_RTX, if_false_label);
9760 if (if_false_label)
9761 emit_jump (if_false_label);
9762 if (drop_through_label)
9763 emit_label (drop_through_label);
9766 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9767 with one insn, test the comparison and jump to the appropriate label. */
9769 static void
9770 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9771 tree exp;
9772 rtx if_false_label, if_true_label;
9774 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9775 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9776 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9777 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9778 int i;
9779 rtx drop_through_label = 0;
9781 if (! if_false_label)
9782 drop_through_label = if_false_label = gen_label_rtx ();
9784 for (i = 0; i < nwords; i++)
9785 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
9786 operand_subword_force (op1, i, mode),
9787 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9788 word_mode, NULL_RTX, 0, if_false_label,
9789 NULL_RTX);
9791 if (if_true_label)
9792 emit_jump (if_true_label);
9793 if (drop_through_label)
9794 emit_label (drop_through_label);
9797 /* Jump according to whether OP0 is 0.
9798 We assume that OP0 has an integer mode that is too wide
9799 for the available compare insns. */
9801 void
9802 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9803 rtx op0;
9804 rtx if_false_label, if_true_label;
9806 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9807 rtx part;
9808 int i;
9809 rtx drop_through_label = 0;
9811 /* The fastest way of doing this comparison on almost any machine is to
9812 "or" all the words and compare the result. If all have to be loaded
9813 from memory and this is a very wide item, it's possible this may
9814 be slower, but that's highly unlikely. */
9816 part = gen_reg_rtx (word_mode);
9817 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
9818 for (i = 1; i < nwords && part != 0; i++)
9819 part = expand_binop (word_mode, ior_optab, part,
9820 operand_subword_force (op0, i, GET_MODE (op0)),
9821 part, 1, OPTAB_WIDEN);
9823 if (part != 0)
9825 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
9826 NULL_RTX, 0, if_false_label, if_true_label);
9828 return;
9831 /* If we couldn't do the "or" simply, do this with a series of compares. */
9832 if (! if_false_label)
9833 drop_through_label = if_false_label = gen_label_rtx ();
9835 for (i = 0; i < nwords; i++)
9836 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
9837 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0,
9838 if_false_label, NULL_RTX);
9840 if (if_true_label)
9841 emit_jump (if_true_label);
9843 if (drop_through_label)
9844 emit_label (drop_through_label);
9847 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
9848 (including code to compute the values to be compared)
9849 and set (CC0) according to the result.
9850 The decision as to signed or unsigned comparison must be made by the caller.
9852 We force a stack adjustment unless there are currently
9853 things pushed on the stack that aren't yet used.
9855 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9856 compared.
9858 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9859 size of MODE should be used. */
9862 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
9863 register rtx op0, op1;
9864 enum rtx_code code;
9865 int unsignedp;
9866 enum machine_mode mode;
9867 rtx size;
9868 unsigned int align;
9870 rtx tem;
9872 /* If one operand is constant, make it the second one. Only do this
9873 if the other operand is not constant as well. */
9875 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9876 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9878 tem = op0;
9879 op0 = op1;
9880 op1 = tem;
9881 code = swap_condition (code);
9884 if (flag_force_mem)
9886 op0 = force_not_mem (op0);
9887 op1 = force_not_mem (op1);
9890 do_pending_stack_adjust ();
9892 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9893 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9894 return tem;
9896 #if 0
9897 /* There's no need to do this now that combine.c can eliminate lots of
9898 sign extensions. This can be less efficient in certain cases on other
9899 machines. */
9901 /* If this is a signed equality comparison, we can do it as an
9902 unsigned comparison since zero-extension is cheaper than sign
9903 extension and comparisons with zero are done as unsigned. This is
9904 the case even on machines that can do fast sign extension, since
9905 zero-extension is easier to combine with other operations than
9906 sign-extension is. If we are comparing against a constant, we must
9907 convert it to what it would look like unsigned. */
9908 if ((code == EQ || code == NE) && ! unsignedp
9909 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9911 if (GET_CODE (op1) == CONST_INT
9912 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9913 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9914 unsignedp = 1;
9916 #endif
9918 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
9920 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
9923 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
9924 The decision as to signed or unsigned comparison must be made by the caller.
9926 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9927 compared.
9929 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9930 size of MODE should be used. */
9932 void
9933 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, align,
9934 if_false_label, if_true_label)
9935 register rtx op0, op1;
9936 enum rtx_code code;
9937 int unsignedp;
9938 enum machine_mode mode;
9939 rtx size;
9940 unsigned int align;
9941 rtx if_false_label, if_true_label;
9943 rtx tem;
9944 int dummy_true_label = 0;
9946 /* Reverse the comparison if that is safe and we want to jump if it is
9947 false. */
9948 if (! if_true_label && ! FLOAT_MODE_P (mode))
9950 if_true_label = if_false_label;
9951 if_false_label = 0;
9952 code = reverse_condition (code);
9955 /* If one operand is constant, make it the second one. Only do this
9956 if the other operand is not constant as well. */
9958 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9959 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9961 tem = op0;
9962 op0 = op1;
9963 op1 = tem;
9964 code = swap_condition (code);
9967 if (flag_force_mem)
9969 op0 = force_not_mem (op0);
9970 op1 = force_not_mem (op1);
9973 do_pending_stack_adjust ();
9975 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9976 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9978 if (tem == const_true_rtx)
9980 if (if_true_label)
9981 emit_jump (if_true_label);
9983 else
9985 if (if_false_label)
9986 emit_jump (if_false_label);
9988 return;
9991 #if 0
9992 /* There's no need to do this now that combine.c can eliminate lots of
9993 sign extensions. This can be less efficient in certain cases on other
9994 machines. */
9996 /* If this is a signed equality comparison, we can do it as an
9997 unsigned comparison since zero-extension is cheaper than sign
9998 extension and comparisons with zero are done as unsigned. This is
9999 the case even on machines that can do fast sign extension, since
10000 zero-extension is easier to combine with other operations than
10001 sign-extension is. If we are comparing against a constant, we must
10002 convert it to what it would look like unsigned. */
10003 if ((code == EQ || code == NE) && ! unsignedp
10004 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10006 if (GET_CODE (op1) == CONST_INT
10007 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10008 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10009 unsignedp = 1;
10011 #endif
10013 if (! if_true_label)
10015 dummy_true_label = 1;
10016 if_true_label = gen_label_rtx ();
10019 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, align,
10020 if_true_label);
10022 if (if_false_label)
10023 emit_jump (if_false_label);
10024 if (dummy_true_label)
10025 emit_label (if_true_label);
10028 /* Generate code for a comparison expression EXP (including code to compute
10029 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10030 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10031 generated code will drop through.
10032 SIGNED_CODE should be the rtx operation for this comparison for
10033 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10035 We force a stack adjustment unless there are currently
10036 things pushed on the stack that aren't yet used. */
10038 static void
10039 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10040 if_true_label)
10041 register tree exp;
10042 enum rtx_code signed_code, unsigned_code;
10043 rtx if_false_label, if_true_label;
10045 unsigned int align0, align1;
10046 register rtx op0, op1;
10047 register tree type;
10048 register enum machine_mode mode;
10049 int unsignedp;
10050 enum rtx_code code;
10052 /* Don't crash if the comparison was erroneous. */
10053 op0 = expand_expr_unaligned (TREE_OPERAND (exp, 0), &align0);
10054 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10055 return;
10057 op1 = expand_expr_unaligned (TREE_OPERAND (exp, 1), &align1);
10058 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
10059 return;
10061 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10062 mode = TYPE_MODE (type);
10063 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
10064 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
10065 || (GET_MODE_BITSIZE (mode)
10066 > GET_MODE_BITSIZE (TREE_TYPE (TREE_OPERAND (exp, 1))))))
10068 /* op0 might have been replaced by promoted constant, in which
10069 case the type of second argument should be used. */
10070 type = TREE_TYPE (TREE_OPERAND (exp, 1));
10071 mode = TYPE_MODE (type);
10073 unsignedp = TREE_UNSIGNED (type);
10074 code = unsignedp ? unsigned_code : signed_code;
10076 #ifdef HAVE_canonicalize_funcptr_for_compare
10077 /* If function pointers need to be "canonicalized" before they can
10078 be reliably compared, then canonicalize them. */
10079 if (HAVE_canonicalize_funcptr_for_compare
10080 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10081 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10082 == FUNCTION_TYPE))
10084 rtx new_op0 = gen_reg_rtx (mode);
10086 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10087 op0 = new_op0;
10090 if (HAVE_canonicalize_funcptr_for_compare
10091 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10092 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10093 == FUNCTION_TYPE))
10095 rtx new_op1 = gen_reg_rtx (mode);
10097 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10098 op1 = new_op1;
10100 #endif
10102 /* Do any postincrements in the expression that was tested. */
10103 emit_queue ();
10105 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10106 ((mode == BLKmode)
10107 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10108 MIN (align0, align1),
10109 if_false_label, if_true_label);
10112 /* Generate code to calculate EXP using a store-flag instruction
10113 and return an rtx for the result. EXP is either a comparison
10114 or a TRUTH_NOT_EXPR whose operand is a comparison.
10116 If TARGET is nonzero, store the result there if convenient.
10118 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10119 cheap.
10121 Return zero if there is no suitable set-flag instruction
10122 available on this machine.
10124 Once expand_expr has been called on the arguments of the comparison,
10125 we are committed to doing the store flag, since it is not safe to
10126 re-evaluate the expression. We emit the store-flag insn by calling
10127 emit_store_flag, but only expand the arguments if we have a reason
10128 to believe that emit_store_flag will be successful. If we think that
10129 it will, but it isn't, we have to simulate the store-flag with a
10130 set/jump/set sequence. */
10132 static rtx
10133 do_store_flag (exp, target, mode, only_cheap)
10134 tree exp;
10135 rtx target;
10136 enum machine_mode mode;
10137 int only_cheap;
10139 enum rtx_code code;
10140 tree arg0, arg1, type;
10141 tree tem;
10142 enum machine_mode operand_mode;
10143 int invert = 0;
10144 int unsignedp;
10145 rtx op0, op1;
10146 enum insn_code icode;
10147 rtx subtarget = target;
10148 rtx result, label;
10150 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10151 result at the end. We can't simply invert the test since it would
10152 have already been inverted if it were valid. This case occurs for
10153 some floating-point comparisons. */
10155 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10156 invert = 1, exp = TREE_OPERAND (exp, 0);
10158 arg0 = TREE_OPERAND (exp, 0);
10159 arg1 = TREE_OPERAND (exp, 1);
10161 /* Don't crash if the comparison was erroneous. */
10162 if (arg0 == error_mark_node || arg1 == error_mark_node)
10163 return const0_rtx;
10165 type = TREE_TYPE (arg0);
10166 operand_mode = TYPE_MODE (type);
10167 unsignedp = TREE_UNSIGNED (type);
10169 /* We won't bother with BLKmode store-flag operations because it would mean
10170 passing a lot of information to emit_store_flag. */
10171 if (operand_mode == BLKmode)
10172 return 0;
10174 /* We won't bother with store-flag operations involving function pointers
10175 when function pointers must be canonicalized before comparisons. */
10176 #ifdef HAVE_canonicalize_funcptr_for_compare
10177 if (HAVE_canonicalize_funcptr_for_compare
10178 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10179 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10180 == FUNCTION_TYPE))
10181 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10182 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10183 == FUNCTION_TYPE))))
10184 return 0;
10185 #endif
10187 STRIP_NOPS (arg0);
10188 STRIP_NOPS (arg1);
10190 /* Get the rtx comparison code to use. We know that EXP is a comparison
10191 operation of some type. Some comparisons against 1 and -1 can be
10192 converted to comparisons with zero. Do so here so that the tests
10193 below will be aware that we have a comparison with zero. These
10194 tests will not catch constants in the first operand, but constants
10195 are rarely passed as the first operand. */
10197 switch (TREE_CODE (exp))
10199 case EQ_EXPR:
10200 code = EQ;
10201 break;
10202 case NE_EXPR:
10203 code = NE;
10204 break;
10205 case LT_EXPR:
10206 if (integer_onep (arg1))
10207 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10208 else
10209 code = unsignedp ? LTU : LT;
10210 break;
10211 case LE_EXPR:
10212 if (! unsignedp && integer_all_onesp (arg1))
10213 arg1 = integer_zero_node, code = LT;
10214 else
10215 code = unsignedp ? LEU : LE;
10216 break;
10217 case GT_EXPR:
10218 if (! unsignedp && integer_all_onesp (arg1))
10219 arg1 = integer_zero_node, code = GE;
10220 else
10221 code = unsignedp ? GTU : GT;
10222 break;
10223 case GE_EXPR:
10224 if (integer_onep (arg1))
10225 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10226 else
10227 code = unsignedp ? GEU : GE;
10228 break;
10230 case UNORDERED_EXPR:
10231 code = UNORDERED;
10232 break;
10233 case ORDERED_EXPR:
10234 code = ORDERED;
10235 break;
10236 case UNLT_EXPR:
10237 code = UNLT;
10238 break;
10239 case UNLE_EXPR:
10240 code = UNLE;
10241 break;
10242 case UNGT_EXPR:
10243 code = UNGT;
10244 break;
10245 case UNGE_EXPR:
10246 code = UNGE;
10247 break;
10248 case UNEQ_EXPR:
10249 code = UNEQ;
10250 break;
10252 default:
10253 abort ();
10256 /* Put a constant second. */
10257 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10259 tem = arg0; arg0 = arg1; arg1 = tem;
10260 code = swap_condition (code);
10263 /* If this is an equality or inequality test of a single bit, we can
10264 do this by shifting the bit being tested to the low-order bit and
10265 masking the result with the constant 1. If the condition was EQ,
10266 we xor it with 1. This does not require an scc insn and is faster
10267 than an scc insn even if we have it. */
10269 if ((code == NE || code == EQ)
10270 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10271 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10273 tree inner = TREE_OPERAND (arg0, 0);
10274 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10275 int ops_unsignedp;
10277 /* If INNER is a right shift of a constant and it plus BITNUM does
10278 not overflow, adjust BITNUM and INNER. */
10280 if (TREE_CODE (inner) == RSHIFT_EXPR
10281 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10282 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10283 && bitnum < TYPE_PRECISION (type)
10284 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10285 bitnum - TYPE_PRECISION (type)))
10287 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10288 inner = TREE_OPERAND (inner, 0);
10291 /* If we are going to be able to omit the AND below, we must do our
10292 operations as unsigned. If we must use the AND, we have a choice.
10293 Normally unsigned is faster, but for some machines signed is. */
10294 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10295 #ifdef LOAD_EXTEND_OP
10296 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10297 #else
10299 #endif
10302 if (! get_subtarget (subtarget)
10303 || GET_MODE (subtarget) != operand_mode
10304 || ! safe_from_p (subtarget, inner, 1))
10305 subtarget = 0;
10307 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10309 if (bitnum != 0)
10310 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10311 size_int (bitnum), subtarget, ops_unsignedp);
10313 if (GET_MODE (op0) != mode)
10314 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10316 if ((code == EQ && ! invert) || (code == NE && invert))
10317 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10318 ops_unsignedp, OPTAB_LIB_WIDEN);
10320 /* Put the AND last so it can combine with more things. */
10321 if (bitnum != TYPE_PRECISION (type) - 1)
10322 op0 = expand_and (op0, const1_rtx, subtarget);
10324 return op0;
10327 /* Now see if we are likely to be able to do this. Return if not. */
10328 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10329 return 0;
10331 icode = setcc_gen_code[(int) code];
10332 if (icode == CODE_FOR_nothing
10333 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10335 /* We can only do this if it is one of the special cases that
10336 can be handled without an scc insn. */
10337 if ((code == LT && integer_zerop (arg1))
10338 || (! only_cheap && code == GE && integer_zerop (arg1)))
10340 else if (BRANCH_COST >= 0
10341 && ! only_cheap && (code == NE || code == EQ)
10342 && TREE_CODE (type) != REAL_TYPE
10343 && ((abs_optab->handlers[(int) operand_mode].insn_code
10344 != CODE_FOR_nothing)
10345 || (ffs_optab->handlers[(int) operand_mode].insn_code
10346 != CODE_FOR_nothing)))
10348 else
10349 return 0;
10352 if (! get_subtarget (target)
10353 || GET_MODE (subtarget) != operand_mode
10354 || ! safe_from_p (subtarget, arg1, 1))
10355 subtarget = 0;
10357 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10358 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10360 if (target == 0)
10361 target = gen_reg_rtx (mode);
10363 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10364 because, if the emit_store_flag does anything it will succeed and
10365 OP0 and OP1 will not be used subsequently. */
10367 result = emit_store_flag (target, code,
10368 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10369 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10370 operand_mode, unsignedp, 1);
10372 if (result)
10374 if (invert)
10375 result = expand_binop (mode, xor_optab, result, const1_rtx,
10376 result, 0, OPTAB_LIB_WIDEN);
10377 return result;
10380 /* If this failed, we have to do this with set/compare/jump/set code. */
10381 if (GET_CODE (target) != REG
10382 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10383 target = gen_reg_rtx (GET_MODE (target));
10385 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10386 result = compare_from_rtx (op0, op1, code, unsignedp,
10387 operand_mode, NULL_RTX, 0);
10388 if (GET_CODE (result) == CONST_INT)
10389 return (((result == const0_rtx && ! invert)
10390 || (result != const0_rtx && invert))
10391 ? const0_rtx : const1_rtx);
10393 label = gen_label_rtx ();
10394 if (bcc_gen_fctn[(int) code] == 0)
10395 abort ();
10397 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10398 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10399 emit_label (label);
10401 return target;
10404 /* Generate a tablejump instruction (used for switch statements). */
10406 #ifdef HAVE_tablejump
10408 /* INDEX is the value being switched on, with the lowest value
10409 in the table already subtracted.
10410 MODE is its expected mode (needed if INDEX is constant).
10411 RANGE is the length of the jump table.
10412 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10414 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10415 index value is out of range. */
10417 void
10418 do_tablejump (index, mode, range, table_label, default_label)
10419 rtx index, range, table_label, default_label;
10420 enum machine_mode mode;
10422 register rtx temp, vector;
10424 /* Do an unsigned comparison (in the proper mode) between the index
10425 expression and the value which represents the length of the range.
10426 Since we just finished subtracting the lower bound of the range
10427 from the index expression, this comparison allows us to simultaneously
10428 check that the original index expression value is both greater than
10429 or equal to the minimum value of the range and less than or equal to
10430 the maximum value of the range. */
10432 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10433 0, default_label);
10435 /* If index is in range, it must fit in Pmode.
10436 Convert to Pmode so we can index with it. */
10437 if (mode != Pmode)
10438 index = convert_to_mode (Pmode, index, 1);
10440 /* Don't let a MEM slip thru, because then INDEX that comes
10441 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10442 and break_out_memory_refs will go to work on it and mess it up. */
10443 #ifdef PIC_CASE_VECTOR_ADDRESS
10444 if (flag_pic && GET_CODE (index) != REG)
10445 index = copy_to_mode_reg (Pmode, index);
10446 #endif
10448 /* If flag_force_addr were to affect this address
10449 it could interfere with the tricky assumptions made
10450 about addresses that contain label-refs,
10451 which may be valid only very near the tablejump itself. */
10452 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10453 GET_MODE_SIZE, because this indicates how large insns are. The other
10454 uses should all be Pmode, because they are addresses. This code
10455 could fail if addresses and insns are not the same size. */
10456 index = gen_rtx_PLUS (Pmode,
10457 gen_rtx_MULT (Pmode, index,
10458 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10459 gen_rtx_LABEL_REF (Pmode, table_label));
10460 #ifdef PIC_CASE_VECTOR_ADDRESS
10461 if (flag_pic)
10462 index = PIC_CASE_VECTOR_ADDRESS (index);
10463 else
10464 #endif
10465 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10466 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10467 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10468 RTX_UNCHANGING_P (vector) = 1;
10469 convert_move (temp, vector, 0);
10471 emit_jump_insn (gen_tablejump (temp, table_label));
10473 /* If we are generating PIC code or if the table is PC-relative, the
10474 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10475 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10476 emit_barrier ();
10479 #endif /* HAVE_tablejump */