* config/i386/i386.md (mmx_pinsrw): Output operands in correct
[official-gcc.git] / gcc / expr.c
blobb28db7f67b8cc3b8bd10b9d1239bbaab54422b3d
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000
3 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "obstack.h"
28 #include "flags.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "except.h"
32 #include "function.h"
33 #include "insn-flags.h"
34 #include "insn-codes.h"
35 #include "insn-config.h"
36 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
37 #include "expr.h"
38 #include "recog.h"
39 #include "reload.h"
40 #include "output.h"
41 #include "typeclass.h"
42 #include "defaults.h"
43 #include "toplev.h"
44 #include "ggc.h"
45 #include "intl.h"
46 #include "tm_p.h"
48 #ifndef ACCUMULATE_OUTGOING_ARGS
49 #define ACCUMULATE_OUTGOING_ARGS 0
50 #endif
52 /* Supply a default definition for PUSH_ARGS. */
53 #ifndef PUSH_ARGS
54 #ifdef PUSH_ROUNDING
55 #define PUSH_ARGS !ACCUMULATE_OUTGOING_ARGS
56 #else
57 #define PUSH_ARGS 0
58 #endif
59 #endif
61 /* Decide whether a function's arguments should be processed
62 from first to last or from last to first.
64 They should if the stack and args grow in opposite directions, but
65 only if we have push insns. */
67 #ifdef PUSH_ROUNDING
69 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
70 #define PUSH_ARGS_REVERSED /* If it's last to first. */
71 #endif
73 #endif
75 #ifndef STACK_PUSH_CODE
76 #ifdef STACK_GROWS_DOWNWARD
77 #define STACK_PUSH_CODE PRE_DEC
78 #else
79 #define STACK_PUSH_CODE PRE_INC
80 #endif
81 #endif
83 /* Assume that case vectors are not pc-relative. */
84 #ifndef CASE_VECTOR_PC_RELATIVE
85 #define CASE_VECTOR_PC_RELATIVE 0
86 #endif
88 /* Hook called by safe_from_p for language-specific tree codes. It is
89 up to the language front-end to install a hook if it has any such
90 codes that safe_from_p needs to know about. Since same_from_p will
91 recursively explore the TREE_OPERANDs of an expression, this hook
92 should not reexamine those pieces. This routine may recursively
93 call safe_from_p; it should always pass `0' as the TOP_P
94 parameter. */
95 int (*lang_safe_from_p) PARAMS ((rtx, tree));
97 /* If this is nonzero, we do not bother generating VOLATILE
98 around volatile memory references, and we are willing to
99 output indirect addresses. If cse is to follow, we reject
100 indirect addresses so a useful potential cse is generated;
101 if it is used only once, instruction combination will produce
102 the same indirect address eventually. */
103 int cse_not_expected;
105 /* Don't check memory usage, since code is being emitted to check a memory
106 usage. Used when current_function_check_memory_usage is true, to avoid
107 infinite recursion. */
108 static int in_check_memory_usage;
110 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
111 static tree placeholder_list = 0;
113 /* This structure is used by move_by_pieces to describe the move to
114 be performed. */
115 struct move_by_pieces
117 rtx to;
118 rtx to_addr;
119 int autinc_to;
120 int explicit_inc_to;
121 rtx from;
122 rtx from_addr;
123 int autinc_from;
124 int explicit_inc_from;
125 unsigned HOST_WIDE_INT len;
126 HOST_WIDE_INT offset;
127 int reverse;
130 /* This structure is used by clear_by_pieces to describe the clear to
131 be performed. */
133 struct clear_by_pieces
135 rtx to;
136 rtx to_addr;
137 int autinc_to;
138 int explicit_inc_to;
139 unsigned HOST_WIDE_INT len;
140 HOST_WIDE_INT offset;
141 int reverse;
144 extern struct obstack permanent_obstack;
146 static rtx get_push_address PARAMS ((int));
148 static rtx enqueue_insn PARAMS ((rtx, rtx));
149 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
150 PARAMS ((unsigned HOST_WIDE_INT,
151 unsigned int));
152 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
153 struct move_by_pieces *));
154 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
155 unsigned int));
156 static void clear_by_pieces_1 PARAMS ((rtx (*) (rtx, ...),
157 enum machine_mode,
158 struct clear_by_pieces *));
159 static rtx get_subtarget PARAMS ((rtx));
160 static int is_zeros_p PARAMS ((tree));
161 static int mostly_zeros_p PARAMS ((tree));
162 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
163 HOST_WIDE_INT, enum machine_mode,
164 tree, tree, unsigned int, int,
165 int));
166 static void store_constructor PARAMS ((tree, rtx, unsigned int, int,
167 HOST_WIDE_INT));
168 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
169 HOST_WIDE_INT, enum machine_mode,
170 tree, enum machine_mode, int,
171 unsigned int, HOST_WIDE_INT, int));
172 static enum memory_use_mode
173 get_memory_usage_from_modifier PARAMS ((enum expand_modifier));
174 static tree save_noncopied_parts PARAMS ((tree, tree));
175 static tree init_noncopied_parts PARAMS ((tree, tree));
176 static int fixed_type_p PARAMS ((tree));
177 static rtx var_rtx PARAMS ((tree));
178 static int readonly_fields_p PARAMS ((tree));
179 static rtx expand_expr_unaligned PARAMS ((tree, unsigned int *));
180 static rtx expand_increment PARAMS ((tree, int, int));
181 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
182 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
183 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
184 rtx, rtx));
185 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
187 /* Record for each mode whether we can move a register directly to or
188 from an object of that mode in memory. If we can't, we won't try
189 to use that mode directly when accessing a field of that mode. */
191 static char direct_load[NUM_MACHINE_MODES];
192 static char direct_store[NUM_MACHINE_MODES];
194 /* If a memory-to-memory move would take MOVE_RATIO or more simple
195 move-instruction sequences, we will do a movstr or libcall instead. */
197 #ifndef MOVE_RATIO
198 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
199 #define MOVE_RATIO 2
200 #else
201 /* If we are optimizing for space (-Os), cut down the default move ratio. */
202 #define MOVE_RATIO (optimize_size ? 3 : 15)
203 #endif
204 #endif
206 /* This macro is used to determine whether move_by_pieces should be called
207 to perform a structure copy. */
208 #ifndef MOVE_BY_PIECES_P
209 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
210 (move_by_pieces_ninsns (SIZE, ALIGN) < MOVE_RATIO)
211 #endif
213 /* This array records the insn_code of insns to perform block moves. */
214 enum insn_code movstr_optab[NUM_MACHINE_MODES];
216 /* This array records the insn_code of insns to perform block clears. */
217 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
219 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
221 #ifndef SLOW_UNALIGNED_ACCESS
222 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
223 #endif
225 /* This is run once per compilation to set up which modes can be used
226 directly in memory and to initialize the block move optab. */
228 void
229 init_expr_once ()
231 rtx insn, pat;
232 enum machine_mode mode;
233 int num_clobbers;
234 rtx mem, mem1;
236 start_sequence ();
238 /* Try indexing by frame ptr and try by stack ptr.
239 It is known that on the Convex the stack ptr isn't a valid index.
240 With luck, one or the other is valid on any machine. */
241 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
242 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
244 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
245 pat = PATTERN (insn);
247 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
248 mode = (enum machine_mode) ((int) mode + 1))
250 int regno;
251 rtx reg;
253 direct_load[(int) mode] = direct_store[(int) mode] = 0;
254 PUT_MODE (mem, mode);
255 PUT_MODE (mem1, mode);
257 /* See if there is some register that can be used in this mode and
258 directly loaded or stored from memory. */
260 if (mode != VOIDmode && mode != BLKmode)
261 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
262 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
263 regno++)
265 if (! HARD_REGNO_MODE_OK (regno, mode))
266 continue;
268 reg = gen_rtx_REG (mode, regno);
270 SET_SRC (pat) = mem;
271 SET_DEST (pat) = reg;
272 if (recog (pat, insn, &num_clobbers) >= 0)
273 direct_load[(int) mode] = 1;
275 SET_SRC (pat) = mem1;
276 SET_DEST (pat) = reg;
277 if (recog (pat, insn, &num_clobbers) >= 0)
278 direct_load[(int) mode] = 1;
280 SET_SRC (pat) = reg;
281 SET_DEST (pat) = mem;
282 if (recog (pat, insn, &num_clobbers) >= 0)
283 direct_store[(int) mode] = 1;
285 SET_SRC (pat) = reg;
286 SET_DEST (pat) = mem1;
287 if (recog (pat, insn, &num_clobbers) >= 0)
288 direct_store[(int) mode] = 1;
292 end_sequence ();
295 /* This is run at the start of compiling a function. */
297 void
298 init_expr ()
300 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
302 pending_chain = 0;
303 pending_stack_adjust = 0;
304 stack_pointer_delta = 0;
305 inhibit_defer_pop = 0;
306 saveregs_value = 0;
307 apply_args_value = 0;
308 forced_labels = 0;
311 void
312 mark_expr_status (p)
313 struct expr_status *p;
315 if (p == NULL)
316 return;
318 ggc_mark_rtx (p->x_saveregs_value);
319 ggc_mark_rtx (p->x_apply_args_value);
320 ggc_mark_rtx (p->x_forced_labels);
323 void
324 free_expr_status (f)
325 struct function *f;
327 free (f->expr);
328 f->expr = NULL;
331 /* Small sanity check that the queue is empty at the end of a function. */
333 void
334 finish_expr_for_function ()
336 if (pending_chain)
337 abort ();
340 /* Manage the queue of increment instructions to be output
341 for POSTINCREMENT_EXPR expressions, etc. */
343 /* Queue up to increment (or change) VAR later. BODY says how:
344 BODY should be the same thing you would pass to emit_insn
345 to increment right away. It will go to emit_insn later on.
347 The value is a QUEUED expression to be used in place of VAR
348 where you want to guarantee the pre-incrementation value of VAR. */
350 static rtx
351 enqueue_insn (var, body)
352 rtx var, body;
354 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
355 body, pending_chain);
356 return pending_chain;
359 /* Use protect_from_queue to convert a QUEUED expression
360 into something that you can put immediately into an instruction.
361 If the queued incrementation has not happened yet,
362 protect_from_queue returns the variable itself.
363 If the incrementation has happened, protect_from_queue returns a temp
364 that contains a copy of the old value of the variable.
366 Any time an rtx which might possibly be a QUEUED is to be put
367 into an instruction, it must be passed through protect_from_queue first.
368 QUEUED expressions are not meaningful in instructions.
370 Do not pass a value through protect_from_queue and then hold
371 on to it for a while before putting it in an instruction!
372 If the queue is flushed in between, incorrect code will result. */
375 protect_from_queue (x, modify)
376 register rtx x;
377 int modify;
379 register RTX_CODE code = GET_CODE (x);
381 #if 0 /* A QUEUED can hang around after the queue is forced out. */
382 /* Shortcut for most common case. */
383 if (pending_chain == 0)
384 return x;
385 #endif
387 if (code != QUEUED)
389 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
390 use of autoincrement. Make a copy of the contents of the memory
391 location rather than a copy of the address, but not if the value is
392 of mode BLKmode. Don't modify X in place since it might be
393 shared. */
394 if (code == MEM && GET_MODE (x) != BLKmode
395 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
397 register rtx y = XEXP (x, 0);
398 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
400 MEM_COPY_ATTRIBUTES (new, x);
402 if (QUEUED_INSN (y))
404 register rtx temp = gen_reg_rtx (GET_MODE (new));
405 emit_insn_before (gen_move_insn (temp, new),
406 QUEUED_INSN (y));
407 return temp;
409 return new;
411 /* Otherwise, recursively protect the subexpressions of all
412 the kinds of rtx's that can contain a QUEUED. */
413 if (code == MEM)
415 rtx tem = protect_from_queue (XEXP (x, 0), 0);
416 if (tem != XEXP (x, 0))
418 x = copy_rtx (x);
419 XEXP (x, 0) = tem;
422 else if (code == PLUS || code == MULT)
424 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
425 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
426 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
428 x = copy_rtx (x);
429 XEXP (x, 0) = new0;
430 XEXP (x, 1) = new1;
433 return x;
435 /* If the increment has not happened, use the variable itself. */
436 if (QUEUED_INSN (x) == 0)
437 return QUEUED_VAR (x);
438 /* If the increment has happened and a pre-increment copy exists,
439 use that copy. */
440 if (QUEUED_COPY (x) != 0)
441 return QUEUED_COPY (x);
442 /* The increment has happened but we haven't set up a pre-increment copy.
443 Set one up now, and use it. */
444 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
445 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
446 QUEUED_INSN (x));
447 return QUEUED_COPY (x);
450 /* Return nonzero if X contains a QUEUED expression:
451 if it contains anything that will be altered by a queued increment.
452 We handle only combinations of MEM, PLUS, MINUS and MULT operators
453 since memory addresses generally contain only those. */
456 queued_subexp_p (x)
457 rtx x;
459 register enum rtx_code code = GET_CODE (x);
460 switch (code)
462 case QUEUED:
463 return 1;
464 case MEM:
465 return queued_subexp_p (XEXP (x, 0));
466 case MULT:
467 case PLUS:
468 case MINUS:
469 return (queued_subexp_p (XEXP (x, 0))
470 || queued_subexp_p (XEXP (x, 1)));
471 default:
472 return 0;
476 /* Perform all the pending incrementations. */
478 void
479 emit_queue ()
481 register rtx p;
482 while ((p = pending_chain))
484 rtx body = QUEUED_BODY (p);
486 if (GET_CODE (body) == SEQUENCE)
488 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
489 emit_insn (QUEUED_BODY (p));
491 else
492 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
493 pending_chain = QUEUED_NEXT (p);
497 /* Copy data from FROM to TO, where the machine modes are not the same.
498 Both modes may be integer, or both may be floating.
499 UNSIGNEDP should be nonzero if FROM is an unsigned type.
500 This causes zero-extension instead of sign-extension. */
502 void
503 convert_move (to, from, unsignedp)
504 register rtx to, from;
505 int unsignedp;
507 enum machine_mode to_mode = GET_MODE (to);
508 enum machine_mode from_mode = GET_MODE (from);
509 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
510 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
511 enum insn_code code;
512 rtx libcall;
514 /* rtx code for making an equivalent value. */
515 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
517 to = protect_from_queue (to, 1);
518 from = protect_from_queue (from, 0);
520 if (to_real != from_real)
521 abort ();
523 /* If FROM is a SUBREG that indicates that we have already done at least
524 the required extension, strip it. We don't handle such SUBREGs as
525 TO here. */
527 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
528 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
529 >= GET_MODE_SIZE (to_mode))
530 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
531 from = gen_lowpart (to_mode, from), from_mode = to_mode;
533 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
534 abort ();
536 if (to_mode == from_mode
537 || (from_mode == VOIDmode && CONSTANT_P (from)))
539 emit_move_insn (to, from);
540 return;
543 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
545 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
546 abort ();
548 if (VECTOR_MODE_P (to_mode))
549 from = gen_rtx_SUBREG (to_mode, from, 0);
550 else
551 to = gen_rtx_SUBREG (from_mode, to, 0);
553 emit_move_insn (to, from);
554 return;
557 if (to_real != from_real)
558 abort ();
560 if (to_real)
562 rtx value, insns;
564 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
566 /* Try converting directly if the insn is supported. */
567 if ((code = can_extend_p (to_mode, from_mode, 0))
568 != CODE_FOR_nothing)
570 emit_unop_insn (code, to, from, UNKNOWN);
571 return;
575 #ifdef HAVE_trunchfqf2
576 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
578 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
579 return;
581 #endif
582 #ifdef HAVE_trunctqfqf2
583 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
585 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
586 return;
588 #endif
589 #ifdef HAVE_truncsfqf2
590 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
592 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
593 return;
595 #endif
596 #ifdef HAVE_truncdfqf2
597 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
599 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
600 return;
602 #endif
603 #ifdef HAVE_truncxfqf2
604 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
606 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
607 return;
609 #endif
610 #ifdef HAVE_trunctfqf2
611 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
613 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
614 return;
616 #endif
618 #ifdef HAVE_trunctqfhf2
619 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
621 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
622 return;
624 #endif
625 #ifdef HAVE_truncsfhf2
626 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
628 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
629 return;
631 #endif
632 #ifdef HAVE_truncdfhf2
633 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
635 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
636 return;
638 #endif
639 #ifdef HAVE_truncxfhf2
640 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
642 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
643 return;
645 #endif
646 #ifdef HAVE_trunctfhf2
647 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
649 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
650 return;
652 #endif
654 #ifdef HAVE_truncsftqf2
655 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
657 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
658 return;
660 #endif
661 #ifdef HAVE_truncdftqf2
662 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
664 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
665 return;
667 #endif
668 #ifdef HAVE_truncxftqf2
669 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
671 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
672 return;
674 #endif
675 #ifdef HAVE_trunctftqf2
676 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
678 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
679 return;
681 #endif
683 #ifdef HAVE_truncdfsf2
684 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
686 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
687 return;
689 #endif
690 #ifdef HAVE_truncxfsf2
691 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
693 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
694 return;
696 #endif
697 #ifdef HAVE_trunctfsf2
698 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
700 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
701 return;
703 #endif
704 #ifdef HAVE_truncxfdf2
705 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
707 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
708 return;
710 #endif
711 #ifdef HAVE_trunctfdf2
712 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
714 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
715 return;
717 #endif
719 libcall = (rtx) 0;
720 switch (from_mode)
722 case SFmode:
723 switch (to_mode)
725 case DFmode:
726 libcall = extendsfdf2_libfunc;
727 break;
729 case XFmode:
730 libcall = extendsfxf2_libfunc;
731 break;
733 case TFmode:
734 libcall = extendsftf2_libfunc;
735 break;
737 default:
738 break;
740 break;
742 case DFmode:
743 switch (to_mode)
745 case SFmode:
746 libcall = truncdfsf2_libfunc;
747 break;
749 case XFmode:
750 libcall = extenddfxf2_libfunc;
751 break;
753 case TFmode:
754 libcall = extenddftf2_libfunc;
755 break;
757 default:
758 break;
760 break;
762 case XFmode:
763 switch (to_mode)
765 case SFmode:
766 libcall = truncxfsf2_libfunc;
767 break;
769 case DFmode:
770 libcall = truncxfdf2_libfunc;
771 break;
773 default:
774 break;
776 break;
778 case TFmode:
779 switch (to_mode)
781 case SFmode:
782 libcall = trunctfsf2_libfunc;
783 break;
785 case DFmode:
786 libcall = trunctfdf2_libfunc;
787 break;
789 default:
790 break;
792 break;
794 default:
795 break;
798 if (libcall == (rtx) 0)
799 /* This conversion is not implemented yet. */
800 abort ();
802 start_sequence ();
803 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
804 1, from, from_mode);
805 insns = get_insns ();
806 end_sequence ();
807 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
808 from));
809 return;
812 /* Now both modes are integers. */
814 /* Handle expanding beyond a word. */
815 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
816 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
818 rtx insns;
819 rtx lowpart;
820 rtx fill_value;
821 rtx lowfrom;
822 int i;
823 enum machine_mode lowpart_mode;
824 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
826 /* Try converting directly if the insn is supported. */
827 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
828 != CODE_FOR_nothing)
830 /* If FROM is a SUBREG, put it into a register. Do this
831 so that we always generate the same set of insns for
832 better cse'ing; if an intermediate assignment occurred,
833 we won't be doing the operation directly on the SUBREG. */
834 if (optimize > 0 && GET_CODE (from) == SUBREG)
835 from = force_reg (from_mode, from);
836 emit_unop_insn (code, to, from, equiv_code);
837 return;
839 /* Next, try converting via full word. */
840 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
841 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
842 != CODE_FOR_nothing))
844 if (GET_CODE (to) == REG)
845 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
846 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
847 emit_unop_insn (code, to,
848 gen_lowpart (word_mode, to), equiv_code);
849 return;
852 /* No special multiword conversion insn; do it by hand. */
853 start_sequence ();
855 /* Since we will turn this into a no conflict block, we must ensure
856 that the source does not overlap the target. */
858 if (reg_overlap_mentioned_p (to, from))
859 from = force_reg (from_mode, from);
861 /* Get a copy of FROM widened to a word, if necessary. */
862 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
863 lowpart_mode = word_mode;
864 else
865 lowpart_mode = from_mode;
867 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
869 lowpart = gen_lowpart (lowpart_mode, to);
870 emit_move_insn (lowpart, lowfrom);
872 /* Compute the value to put in each remaining word. */
873 if (unsignedp)
874 fill_value = const0_rtx;
875 else
877 #ifdef HAVE_slt
878 if (HAVE_slt
879 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
880 && STORE_FLAG_VALUE == -1)
882 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
883 lowpart_mode, 0, 0);
884 fill_value = gen_reg_rtx (word_mode);
885 emit_insn (gen_slt (fill_value));
887 else
888 #endif
890 fill_value
891 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
892 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
893 NULL_RTX, 0);
894 fill_value = convert_to_mode (word_mode, fill_value, 1);
898 /* Fill the remaining words. */
899 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
901 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
902 rtx subword = operand_subword (to, index, 1, to_mode);
904 if (subword == 0)
905 abort ();
907 if (fill_value != subword)
908 emit_move_insn (subword, fill_value);
911 insns = get_insns ();
912 end_sequence ();
914 emit_no_conflict_block (insns, to, from, NULL_RTX,
915 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
916 return;
919 /* Truncating multi-word to a word or less. */
920 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
921 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
923 if (!((GET_CODE (from) == MEM
924 && ! MEM_VOLATILE_P (from)
925 && direct_load[(int) to_mode]
926 && ! mode_dependent_address_p (XEXP (from, 0)))
927 || GET_CODE (from) == REG
928 || GET_CODE (from) == SUBREG))
929 from = force_reg (from_mode, from);
930 convert_move (to, gen_lowpart (word_mode, from), 0);
931 return;
934 /* Handle pointer conversion. */ /* SPEE 900220. */
935 if (to_mode == PQImode)
937 if (from_mode != QImode)
938 from = convert_to_mode (QImode, from, unsignedp);
940 #ifdef HAVE_truncqipqi2
941 if (HAVE_truncqipqi2)
943 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
944 return;
946 #endif /* HAVE_truncqipqi2 */
947 abort ();
950 if (from_mode == PQImode)
952 if (to_mode != QImode)
954 from = convert_to_mode (QImode, from, unsignedp);
955 from_mode = QImode;
957 else
959 #ifdef HAVE_extendpqiqi2
960 if (HAVE_extendpqiqi2)
962 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
963 return;
965 #endif /* HAVE_extendpqiqi2 */
966 abort ();
970 if (to_mode == PSImode)
972 if (from_mode != SImode)
973 from = convert_to_mode (SImode, from, unsignedp);
975 #ifdef HAVE_truncsipsi2
976 if (HAVE_truncsipsi2)
978 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
979 return;
981 #endif /* HAVE_truncsipsi2 */
982 abort ();
985 if (from_mode == PSImode)
987 if (to_mode != SImode)
989 from = convert_to_mode (SImode, from, unsignedp);
990 from_mode = SImode;
992 else
994 #ifdef HAVE_extendpsisi2
995 if (! unsignedp && HAVE_extendpsisi2)
997 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
998 return;
1000 #endif /* HAVE_extendpsisi2 */
1001 #ifdef HAVE_zero_extendpsisi2
1002 if (unsignedp && HAVE_zero_extendpsisi2)
1004 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1005 return;
1007 #endif /* HAVE_zero_extendpsisi2 */
1008 abort ();
1012 if (to_mode == PDImode)
1014 if (from_mode != DImode)
1015 from = convert_to_mode (DImode, from, unsignedp);
1017 #ifdef HAVE_truncdipdi2
1018 if (HAVE_truncdipdi2)
1020 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1021 return;
1023 #endif /* HAVE_truncdipdi2 */
1024 abort ();
1027 if (from_mode == PDImode)
1029 if (to_mode != DImode)
1031 from = convert_to_mode (DImode, from, unsignedp);
1032 from_mode = DImode;
1034 else
1036 #ifdef HAVE_extendpdidi2
1037 if (HAVE_extendpdidi2)
1039 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1040 return;
1042 #endif /* HAVE_extendpdidi2 */
1043 abort ();
1047 /* Now follow all the conversions between integers
1048 no more than a word long. */
1050 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1051 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1052 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1053 GET_MODE_BITSIZE (from_mode)))
1055 if (!((GET_CODE (from) == MEM
1056 && ! MEM_VOLATILE_P (from)
1057 && direct_load[(int) to_mode]
1058 && ! mode_dependent_address_p (XEXP (from, 0)))
1059 || GET_CODE (from) == REG
1060 || GET_CODE (from) == SUBREG))
1061 from = force_reg (from_mode, from);
1062 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1063 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1064 from = copy_to_reg (from);
1065 emit_move_insn (to, gen_lowpart (to_mode, from));
1066 return;
1069 /* Handle extension. */
1070 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1072 /* Convert directly if that works. */
1073 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1074 != CODE_FOR_nothing)
1076 emit_unop_insn (code, to, from, equiv_code);
1077 return;
1079 else
1081 enum machine_mode intermediate;
1082 rtx tmp;
1083 tree shift_amount;
1085 /* Search for a mode to convert via. */
1086 for (intermediate = from_mode; intermediate != VOIDmode;
1087 intermediate = GET_MODE_WIDER_MODE (intermediate))
1088 if (((can_extend_p (to_mode, intermediate, unsignedp)
1089 != CODE_FOR_nothing)
1090 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1091 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1092 GET_MODE_BITSIZE (intermediate))))
1093 && (can_extend_p (intermediate, from_mode, unsignedp)
1094 != CODE_FOR_nothing))
1096 convert_move (to, convert_to_mode (intermediate, from,
1097 unsignedp), unsignedp);
1098 return;
1101 /* No suitable intermediate mode.
1102 Generate what we need with shifts. */
1103 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1104 - GET_MODE_BITSIZE (from_mode), 0);
1105 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1106 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1107 to, unsignedp);
1108 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1109 to, unsignedp);
1110 if (tmp != to)
1111 emit_move_insn (to, tmp);
1112 return;
1116 /* Support special truncate insns for certain modes. */
1118 if (from_mode == DImode && to_mode == SImode)
1120 #ifdef HAVE_truncdisi2
1121 if (HAVE_truncdisi2)
1123 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1124 return;
1126 #endif
1127 convert_move (to, force_reg (from_mode, from), unsignedp);
1128 return;
1131 if (from_mode == DImode && to_mode == HImode)
1133 #ifdef HAVE_truncdihi2
1134 if (HAVE_truncdihi2)
1136 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1137 return;
1139 #endif
1140 convert_move (to, force_reg (from_mode, from), unsignedp);
1141 return;
1144 if (from_mode == DImode && to_mode == QImode)
1146 #ifdef HAVE_truncdiqi2
1147 if (HAVE_truncdiqi2)
1149 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1150 return;
1152 #endif
1153 convert_move (to, force_reg (from_mode, from), unsignedp);
1154 return;
1157 if (from_mode == SImode && to_mode == HImode)
1159 #ifdef HAVE_truncsihi2
1160 if (HAVE_truncsihi2)
1162 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1163 return;
1165 #endif
1166 convert_move (to, force_reg (from_mode, from), unsignedp);
1167 return;
1170 if (from_mode == SImode && to_mode == QImode)
1172 #ifdef HAVE_truncsiqi2
1173 if (HAVE_truncsiqi2)
1175 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1176 return;
1178 #endif
1179 convert_move (to, force_reg (from_mode, from), unsignedp);
1180 return;
1183 if (from_mode == HImode && to_mode == QImode)
1185 #ifdef HAVE_trunchiqi2
1186 if (HAVE_trunchiqi2)
1188 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1189 return;
1191 #endif
1192 convert_move (to, force_reg (from_mode, from), unsignedp);
1193 return;
1196 if (from_mode == TImode && to_mode == DImode)
1198 #ifdef HAVE_trunctidi2
1199 if (HAVE_trunctidi2)
1201 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1202 return;
1204 #endif
1205 convert_move (to, force_reg (from_mode, from), unsignedp);
1206 return;
1209 if (from_mode == TImode && to_mode == SImode)
1211 #ifdef HAVE_trunctisi2
1212 if (HAVE_trunctisi2)
1214 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1215 return;
1217 #endif
1218 convert_move (to, force_reg (from_mode, from), unsignedp);
1219 return;
1222 if (from_mode == TImode && to_mode == HImode)
1224 #ifdef HAVE_trunctihi2
1225 if (HAVE_trunctihi2)
1227 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1228 return;
1230 #endif
1231 convert_move (to, force_reg (from_mode, from), unsignedp);
1232 return;
1235 if (from_mode == TImode && to_mode == QImode)
1237 #ifdef HAVE_trunctiqi2
1238 if (HAVE_trunctiqi2)
1240 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1241 return;
1243 #endif
1244 convert_move (to, force_reg (from_mode, from), unsignedp);
1245 return;
1248 /* Handle truncation of volatile memrefs, and so on;
1249 the things that couldn't be truncated directly,
1250 and for which there was no special instruction. */
1251 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1253 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1254 emit_move_insn (to, temp);
1255 return;
1258 /* Mode combination is not recognized. */
1259 abort ();
1262 /* Return an rtx for a value that would result
1263 from converting X to mode MODE.
1264 Both X and MODE may be floating, or both integer.
1265 UNSIGNEDP is nonzero if X is an unsigned value.
1266 This can be done by referring to a part of X in place
1267 or by copying to a new temporary with conversion.
1269 This function *must not* call protect_from_queue
1270 except when putting X into an insn (in which case convert_move does it). */
1273 convert_to_mode (mode, x, unsignedp)
1274 enum machine_mode mode;
1275 rtx x;
1276 int unsignedp;
1278 return convert_modes (mode, VOIDmode, x, unsignedp);
1281 /* Return an rtx for a value that would result
1282 from converting X from mode OLDMODE to mode MODE.
1283 Both modes may be floating, or both integer.
1284 UNSIGNEDP is nonzero if X is an unsigned value.
1286 This can be done by referring to a part of X in place
1287 or by copying to a new temporary with conversion.
1289 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1291 This function *must not* call protect_from_queue
1292 except when putting X into an insn (in which case convert_move does it). */
1295 convert_modes (mode, oldmode, x, unsignedp)
1296 enum machine_mode mode, oldmode;
1297 rtx x;
1298 int unsignedp;
1300 register rtx temp;
1302 /* If FROM is a SUBREG that indicates that we have already done at least
1303 the required extension, strip it. */
1305 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1306 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1307 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1308 x = gen_lowpart (mode, x);
1310 if (GET_MODE (x) != VOIDmode)
1311 oldmode = GET_MODE (x);
1313 if (mode == oldmode)
1314 return x;
1316 /* There is one case that we must handle specially: If we are converting
1317 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1318 we are to interpret the constant as unsigned, gen_lowpart will do
1319 the wrong if the constant appears negative. What we want to do is
1320 make the high-order word of the constant zero, not all ones. */
1322 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1323 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1324 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1326 HOST_WIDE_INT val = INTVAL (x);
1328 if (oldmode != VOIDmode
1329 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1331 int width = GET_MODE_BITSIZE (oldmode);
1333 /* We need to zero extend VAL. */
1334 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1337 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1340 /* We can do this with a gen_lowpart if both desired and current modes
1341 are integer, and this is either a constant integer, a register, or a
1342 non-volatile MEM. Except for the constant case where MODE is no
1343 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1345 if ((GET_CODE (x) == CONST_INT
1346 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1347 || (GET_MODE_CLASS (mode) == MODE_INT
1348 && GET_MODE_CLASS (oldmode) == MODE_INT
1349 && (GET_CODE (x) == CONST_DOUBLE
1350 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1351 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1352 && direct_load[(int) mode])
1353 || (GET_CODE (x) == REG
1354 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1355 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1357 /* ?? If we don't know OLDMODE, we have to assume here that
1358 X does not need sign- or zero-extension. This may not be
1359 the case, but it's the best we can do. */
1360 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1361 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1363 HOST_WIDE_INT val = INTVAL (x);
1364 int width = GET_MODE_BITSIZE (oldmode);
1366 /* We must sign or zero-extend in this case. Start by
1367 zero-extending, then sign extend if we need to. */
1368 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1369 if (! unsignedp
1370 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1371 val |= (HOST_WIDE_INT) (-1) << width;
1373 return GEN_INT (val);
1376 return gen_lowpart (mode, x);
1379 temp = gen_reg_rtx (mode);
1380 convert_move (temp, x, unsignedp);
1381 return temp;
1384 /* This macro is used to determine what the largest unit size that
1385 move_by_pieces can use is. */
1387 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1388 move efficiently, as opposed to MOVE_MAX which is the maximum
1389 number of bytes we can move with a single instruction. */
1391 #ifndef MOVE_MAX_PIECES
1392 #define MOVE_MAX_PIECES MOVE_MAX
1393 #endif
1395 /* Generate several move instructions to copy LEN bytes
1396 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1397 The caller must pass FROM and TO
1398 through protect_from_queue before calling.
1399 ALIGN is maximum alignment we can assume. */
1401 void
1402 move_by_pieces (to, from, len, align)
1403 rtx to, from;
1404 unsigned HOST_WIDE_INT len;
1405 unsigned int align;
1407 struct move_by_pieces data;
1408 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1409 unsigned int max_size = MOVE_MAX_PIECES + 1;
1410 enum machine_mode mode = VOIDmode, tmode;
1411 enum insn_code icode;
1413 data.offset = 0;
1414 data.to_addr = to_addr;
1415 data.from_addr = from_addr;
1416 data.to = to;
1417 data.from = from;
1418 data.autinc_to
1419 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1420 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1421 data.autinc_from
1422 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1423 || GET_CODE (from_addr) == POST_INC
1424 || GET_CODE (from_addr) == POST_DEC);
1426 data.explicit_inc_from = 0;
1427 data.explicit_inc_to = 0;
1428 data.reverse
1429 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1430 if (data.reverse) data.offset = len;
1431 data.len = len;
1433 /* If copying requires more than two move insns,
1434 copy addresses to registers (to make displacements shorter)
1435 and use post-increment if available. */
1436 if (!(data.autinc_from && data.autinc_to)
1437 && move_by_pieces_ninsns (len, align) > 2)
1439 /* Find the mode of the largest move... */
1440 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1441 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1442 if (GET_MODE_SIZE (tmode) < max_size)
1443 mode = tmode;
1445 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1447 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1448 data.autinc_from = 1;
1449 data.explicit_inc_from = -1;
1451 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1453 data.from_addr = copy_addr_to_reg (from_addr);
1454 data.autinc_from = 1;
1455 data.explicit_inc_from = 1;
1457 if (!data.autinc_from && CONSTANT_P (from_addr))
1458 data.from_addr = copy_addr_to_reg (from_addr);
1459 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1461 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1462 data.autinc_to = 1;
1463 data.explicit_inc_to = -1;
1465 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1467 data.to_addr = copy_addr_to_reg (to_addr);
1468 data.autinc_to = 1;
1469 data.explicit_inc_to = 1;
1471 if (!data.autinc_to && CONSTANT_P (to_addr))
1472 data.to_addr = copy_addr_to_reg (to_addr);
1475 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1476 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1477 align = MOVE_MAX * BITS_PER_UNIT;
1479 /* First move what we can in the largest integer mode, then go to
1480 successively smaller modes. */
1482 while (max_size > 1)
1484 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1485 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1486 if (GET_MODE_SIZE (tmode) < max_size)
1487 mode = tmode;
1489 if (mode == VOIDmode)
1490 break;
1492 icode = mov_optab->handlers[(int) mode].insn_code;
1493 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1494 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1496 max_size = GET_MODE_SIZE (mode);
1499 /* The code above should have handled everything. */
1500 if (data.len > 0)
1501 abort ();
1504 /* Return number of insns required to move L bytes by pieces.
1505 ALIGN (in bytes) is maximum alignment we can assume. */
1507 static unsigned HOST_WIDE_INT
1508 move_by_pieces_ninsns (l, align)
1509 unsigned HOST_WIDE_INT l;
1510 unsigned int align;
1512 unsigned HOST_WIDE_INT n_insns = 0;
1513 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1515 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1516 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1517 align = MOVE_MAX * BITS_PER_UNIT;
1519 while (max_size > 1)
1521 enum machine_mode mode = VOIDmode, tmode;
1522 enum insn_code icode;
1524 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1525 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1526 if (GET_MODE_SIZE (tmode) < max_size)
1527 mode = tmode;
1529 if (mode == VOIDmode)
1530 break;
1532 icode = mov_optab->handlers[(int) mode].insn_code;
1533 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1534 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1536 max_size = GET_MODE_SIZE (mode);
1539 if (l)
1540 abort ();
1541 return n_insns;
1544 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1545 with move instructions for mode MODE. GENFUN is the gen_... function
1546 to make a move insn for that mode. DATA has all the other info. */
1548 static void
1549 move_by_pieces_1 (genfun, mode, data)
1550 rtx (*genfun) PARAMS ((rtx, ...));
1551 enum machine_mode mode;
1552 struct move_by_pieces *data;
1554 unsigned int size = GET_MODE_SIZE (mode);
1555 rtx to1, from1;
1557 while (data->len >= size)
1559 if (data->reverse)
1560 data->offset -= size;
1562 if (data->autinc_to)
1564 to1 = gen_rtx_MEM (mode, data->to_addr);
1565 MEM_COPY_ATTRIBUTES (to1, data->to);
1567 else
1568 to1 = change_address (data->to, mode,
1569 plus_constant (data->to_addr, data->offset));
1571 if (data->autinc_from)
1573 from1 = gen_rtx_MEM (mode, data->from_addr);
1574 MEM_COPY_ATTRIBUTES (from1, data->from);
1576 else
1577 from1 = change_address (data->from, mode,
1578 plus_constant (data->from_addr, data->offset));
1580 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1581 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1582 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1583 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1585 emit_insn ((*genfun) (to1, from1));
1587 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1588 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1589 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1590 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1592 if (! data->reverse)
1593 data->offset += size;
1595 data->len -= size;
1599 /* Emit code to move a block Y to a block X.
1600 This may be done with string-move instructions,
1601 with multiple scalar move instructions, or with a library call.
1603 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1604 with mode BLKmode.
1605 SIZE is an rtx that says how long they are.
1606 ALIGN is the maximum alignment we can assume they have.
1608 Return the address of the new block, if memcpy is called and returns it,
1609 0 otherwise. */
1612 emit_block_move (x, y, size, align)
1613 rtx x, y;
1614 rtx size;
1615 unsigned int align;
1617 rtx retval = 0;
1618 #ifdef TARGET_MEM_FUNCTIONS
1619 static tree fn;
1620 tree call_expr, arg_list;
1621 #endif
1623 if (GET_MODE (x) != BLKmode)
1624 abort ();
1626 if (GET_MODE (y) != BLKmode)
1627 abort ();
1629 x = protect_from_queue (x, 1);
1630 y = protect_from_queue (y, 0);
1631 size = protect_from_queue (size, 0);
1633 if (GET_CODE (x) != MEM)
1634 abort ();
1635 if (GET_CODE (y) != MEM)
1636 abort ();
1637 if (size == 0)
1638 abort ();
1640 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1641 move_by_pieces (x, y, INTVAL (size), align);
1642 else
1644 /* Try the most limited insn first, because there's no point
1645 including more than one in the machine description unless
1646 the more limited one has some advantage. */
1648 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1649 enum machine_mode mode;
1651 /* Since this is a move insn, we don't care about volatility. */
1652 volatile_ok = 1;
1654 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1655 mode = GET_MODE_WIDER_MODE (mode))
1657 enum insn_code code = movstr_optab[(int) mode];
1658 insn_operand_predicate_fn pred;
1660 if (code != CODE_FOR_nothing
1661 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1662 here because if SIZE is less than the mode mask, as it is
1663 returned by the macro, it will definitely be less than the
1664 actual mode mask. */
1665 && ((GET_CODE (size) == CONST_INT
1666 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1667 <= (GET_MODE_MASK (mode) >> 1)))
1668 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1669 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1670 || (*pred) (x, BLKmode))
1671 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1672 || (*pred) (y, BLKmode))
1673 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1674 || (*pred) (opalign, VOIDmode)))
1676 rtx op2;
1677 rtx last = get_last_insn ();
1678 rtx pat;
1680 op2 = convert_to_mode (mode, size, 1);
1681 pred = insn_data[(int) code].operand[2].predicate;
1682 if (pred != 0 && ! (*pred) (op2, mode))
1683 op2 = copy_to_mode_reg (mode, op2);
1685 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1686 if (pat)
1688 emit_insn (pat);
1689 volatile_ok = 0;
1690 return 0;
1692 else
1693 delete_insns_since (last);
1697 volatile_ok = 0;
1699 /* X, Y, or SIZE may have been passed through protect_from_queue.
1701 It is unsafe to save the value generated by protect_from_queue
1702 and reuse it later. Consider what happens if emit_queue is
1703 called before the return value from protect_from_queue is used.
1705 Expansion of the CALL_EXPR below will call emit_queue before
1706 we are finished emitting RTL for argument setup. So if we are
1707 not careful we could get the wrong value for an argument.
1709 To avoid this problem we go ahead and emit code to copy X, Y &
1710 SIZE into new pseudos. We can then place those new pseudos
1711 into an RTL_EXPR and use them later, even after a call to
1712 emit_queue.
1714 Note this is not strictly needed for library calls since they
1715 do not call emit_queue before loading their arguments. However,
1716 we may need to have library calls call emit_queue in the future
1717 since failing to do so could cause problems for targets which
1718 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1719 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1720 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1722 #ifdef TARGET_MEM_FUNCTIONS
1723 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1724 #else
1725 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1726 TREE_UNSIGNED (integer_type_node));
1727 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1728 #endif
1730 #ifdef TARGET_MEM_FUNCTIONS
1731 /* It is incorrect to use the libcall calling conventions to call
1732 memcpy in this context.
1734 This could be a user call to memcpy and the user may wish to
1735 examine the return value from memcpy.
1737 For targets where libcalls and normal calls have different conventions
1738 for returning pointers, we could end up generating incorrect code.
1740 So instead of using a libcall sequence we build up a suitable
1741 CALL_EXPR and expand the call in the normal fashion. */
1742 if (fn == NULL_TREE)
1744 tree fntype;
1746 /* This was copied from except.c, I don't know if all this is
1747 necessary in this context or not. */
1748 fn = get_identifier ("memcpy");
1749 fntype = build_pointer_type (void_type_node);
1750 fntype = build_function_type (fntype, NULL_TREE);
1751 fn = build_decl (FUNCTION_DECL, fn, fntype);
1752 ggc_add_tree_root (&fn, 1);
1753 DECL_EXTERNAL (fn) = 1;
1754 TREE_PUBLIC (fn) = 1;
1755 DECL_ARTIFICIAL (fn) = 1;
1756 make_decl_rtl (fn, NULL_PTR, 1);
1757 assemble_external (fn);
1760 /* We need to make an argument list for the function call.
1762 memcpy has three arguments, the first two are void * addresses and
1763 the last is a size_t byte count for the copy. */
1764 arg_list
1765 = build_tree_list (NULL_TREE,
1766 make_tree (build_pointer_type (void_type_node), x));
1767 TREE_CHAIN (arg_list)
1768 = build_tree_list (NULL_TREE,
1769 make_tree (build_pointer_type (void_type_node), y));
1770 TREE_CHAIN (TREE_CHAIN (arg_list))
1771 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1772 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1774 /* Now we have to build up the CALL_EXPR itself. */
1775 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1776 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1777 call_expr, arg_list, NULL_TREE);
1778 TREE_SIDE_EFFECTS (call_expr) = 1;
1780 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1781 #else
1782 emit_library_call (bcopy_libfunc, LCT_NORMAL,
1783 VOIDmode, 3, y, Pmode, x, Pmode,
1784 convert_to_mode (TYPE_MODE (integer_type_node), size,
1785 TREE_UNSIGNED (integer_type_node)),
1786 TYPE_MODE (integer_type_node));
1787 #endif
1790 return retval;
1793 /* Copy all or part of a value X into registers starting at REGNO.
1794 The number of registers to be filled is NREGS. */
1796 void
1797 move_block_to_reg (regno, x, nregs, mode)
1798 int regno;
1799 rtx x;
1800 int nregs;
1801 enum machine_mode mode;
1803 int i;
1804 #ifdef HAVE_load_multiple
1805 rtx pat;
1806 rtx last;
1807 #endif
1809 if (nregs == 0)
1810 return;
1812 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1813 x = validize_mem (force_const_mem (mode, x));
1815 /* See if the machine can do this with a load multiple insn. */
1816 #ifdef HAVE_load_multiple
1817 if (HAVE_load_multiple)
1819 last = get_last_insn ();
1820 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1821 GEN_INT (nregs));
1822 if (pat)
1824 emit_insn (pat);
1825 return;
1827 else
1828 delete_insns_since (last);
1830 #endif
1832 for (i = 0; i < nregs; i++)
1833 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1834 operand_subword_force (x, i, mode));
1837 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1838 The number of registers to be filled is NREGS. SIZE indicates the number
1839 of bytes in the object X. */
1841 void
1842 move_block_from_reg (regno, x, nregs, size)
1843 int regno;
1844 rtx x;
1845 int nregs;
1846 int size;
1848 int i;
1849 #ifdef HAVE_store_multiple
1850 rtx pat;
1851 rtx last;
1852 #endif
1853 enum machine_mode mode;
1855 /* If SIZE is that of a mode no bigger than a word, just use that
1856 mode's store operation. */
1857 if (size <= UNITS_PER_WORD
1858 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1860 emit_move_insn (change_address (x, mode, NULL),
1861 gen_rtx_REG (mode, regno));
1862 return;
1865 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1866 to the left before storing to memory. Note that the previous test
1867 doesn't handle all cases (e.g. SIZE == 3). */
1868 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1870 rtx tem = operand_subword (x, 0, 1, BLKmode);
1871 rtx shift;
1873 if (tem == 0)
1874 abort ();
1876 shift = expand_shift (LSHIFT_EXPR, word_mode,
1877 gen_rtx_REG (word_mode, regno),
1878 build_int_2 ((UNITS_PER_WORD - size)
1879 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1880 emit_move_insn (tem, shift);
1881 return;
1884 /* See if the machine can do this with a store multiple insn. */
1885 #ifdef HAVE_store_multiple
1886 if (HAVE_store_multiple)
1888 last = get_last_insn ();
1889 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1890 GEN_INT (nregs));
1891 if (pat)
1893 emit_insn (pat);
1894 return;
1896 else
1897 delete_insns_since (last);
1899 #endif
1901 for (i = 0; i < nregs; i++)
1903 rtx tem = operand_subword (x, i, 1, BLKmode);
1905 if (tem == 0)
1906 abort ();
1908 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1912 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1913 registers represented by a PARALLEL. SSIZE represents the total size of
1914 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1915 SRC in bits. */
1916 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1917 the balance will be in what would be the low-order memory addresses, i.e.
1918 left justified for big endian, right justified for little endian. This
1919 happens to be true for the targets currently using this support. If this
1920 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1921 would be needed. */
1923 void
1924 emit_group_load (dst, orig_src, ssize, align)
1925 rtx dst, orig_src;
1926 unsigned int align;
1927 int ssize;
1929 rtx *tmps, src;
1930 int start, i;
1932 if (GET_CODE (dst) != PARALLEL)
1933 abort ();
1935 /* Check for a NULL entry, used to indicate that the parameter goes
1936 both on the stack and in registers. */
1937 if (XEXP (XVECEXP (dst, 0, 0), 0))
1938 start = 0;
1939 else
1940 start = 1;
1942 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
1944 /* If we won't be loading directly from memory, protect the real source
1945 from strange tricks we might play. */
1946 src = orig_src;
1947 if (GET_CODE (src) != MEM && ! CONSTANT_P (src))
1949 if (GET_MODE (src) == VOIDmode)
1950 src = gen_reg_rtx (GET_MODE (dst));
1951 else
1952 src = gen_reg_rtx (GET_MODE (orig_src));
1953 emit_move_insn (src, orig_src);
1956 /* Process the pieces. */
1957 for (i = start; i < XVECLEN (dst, 0); i++)
1959 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1960 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1961 unsigned int bytelen = GET_MODE_SIZE (mode);
1962 int shift = 0;
1964 /* Handle trailing fragments that run over the size of the struct. */
1965 if (ssize >= 0 && bytepos + bytelen > ssize)
1967 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1968 bytelen = ssize - bytepos;
1969 if (bytelen <= 0)
1970 abort ();
1973 /* Optimize the access just a bit. */
1974 if (GET_CODE (src) == MEM
1975 && align >= GET_MODE_ALIGNMENT (mode)
1976 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1977 && bytelen == GET_MODE_SIZE (mode))
1979 tmps[i] = gen_reg_rtx (mode);
1980 emit_move_insn (tmps[i],
1981 change_address (src, mode,
1982 plus_constant (XEXP (src, 0),
1983 bytepos)));
1985 else if (GET_CODE (src) == CONCAT)
1987 if (bytepos == 0
1988 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
1989 tmps[i] = XEXP (src, 0);
1990 else if (bytepos == GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
1991 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
1992 tmps[i] = XEXP (src, 1);
1993 else
1994 abort ();
1996 else if ((CONSTANT_P (src)
1997 && (GET_MODE (src) == VOIDmode || GET_MODE (src) == mode))
1998 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
1999 tmps[i] = src;
2000 else
2001 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2002 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2003 mode, mode, align, ssize);
2005 if (BYTES_BIG_ENDIAN && shift)
2006 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2007 tmps[i], 0, OPTAB_WIDEN);
2010 emit_queue ();
2012 /* Copy the extracted pieces into the proper (probable) hard regs. */
2013 for (i = start; i < XVECLEN (dst, 0); i++)
2014 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2017 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2018 registers represented by a PARALLEL. SSIZE represents the total size of
2019 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
2021 void
2022 emit_group_store (orig_dst, src, ssize, align)
2023 rtx orig_dst, src;
2024 int ssize;
2025 unsigned int align;
2027 rtx *tmps, dst;
2028 int start, i;
2030 if (GET_CODE (src) != PARALLEL)
2031 abort ();
2033 /* Check for a NULL entry, used to indicate that the parameter goes
2034 both on the stack and in registers. */
2035 if (XEXP (XVECEXP (src, 0, 0), 0))
2036 start = 0;
2037 else
2038 start = 1;
2040 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2042 /* Copy the (probable) hard regs into pseudos. */
2043 for (i = start; i < XVECLEN (src, 0); i++)
2045 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2046 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2047 emit_move_insn (tmps[i], reg);
2049 emit_queue ();
2051 /* If we won't be storing directly into memory, protect the real destination
2052 from strange tricks we might play. */
2053 dst = orig_dst;
2054 if (GET_CODE (dst) == PARALLEL)
2056 rtx temp;
2058 /* We can get a PARALLEL dst if there is a conditional expression in
2059 a return statement. In that case, the dst and src are the same,
2060 so no action is necessary. */
2061 if (rtx_equal_p (dst, src))
2062 return;
2064 /* It is unclear if we can ever reach here, but we may as well handle
2065 it. Allocate a temporary, and split this into a store/load to/from
2066 the temporary. */
2068 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2069 emit_group_store (temp, src, ssize, align);
2070 emit_group_load (dst, temp, ssize, align);
2071 return;
2073 else if (GET_CODE (dst) != MEM)
2075 dst = gen_reg_rtx (GET_MODE (orig_dst));
2076 /* Make life a bit easier for combine. */
2077 emit_move_insn (dst, const0_rtx);
2080 /* Process the pieces. */
2081 for (i = start; i < XVECLEN (src, 0); i++)
2083 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2084 enum machine_mode mode = GET_MODE (tmps[i]);
2085 unsigned int bytelen = GET_MODE_SIZE (mode);
2087 /* Handle trailing fragments that run over the size of the struct. */
2088 if (ssize >= 0 && bytepos + bytelen > ssize)
2090 if (BYTES_BIG_ENDIAN)
2092 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2093 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2094 tmps[i], 0, OPTAB_WIDEN);
2096 bytelen = ssize - bytepos;
2099 /* Optimize the access just a bit. */
2100 if (GET_CODE (dst) == MEM
2101 && align >= GET_MODE_ALIGNMENT (mode)
2102 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2103 && bytelen == GET_MODE_SIZE (mode))
2104 emit_move_insn (change_address (dst, mode,
2105 plus_constant (XEXP (dst, 0),
2106 bytepos)),
2107 tmps[i]);
2108 else
2109 store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2110 mode, tmps[i], align, ssize);
2113 emit_queue ();
2115 /* Copy from the pseudo into the (probable) hard reg. */
2116 if (GET_CODE (dst) == REG)
2117 emit_move_insn (orig_dst, dst);
2120 /* Generate code to copy a BLKmode object of TYPE out of a
2121 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2122 is null, a stack temporary is created. TGTBLK is returned.
2124 The primary purpose of this routine is to handle functions
2125 that return BLKmode structures in registers. Some machines
2126 (the PA for example) want to return all small structures
2127 in registers regardless of the structure's alignment. */
2130 copy_blkmode_from_reg (tgtblk, srcreg, type)
2131 rtx tgtblk;
2132 rtx srcreg;
2133 tree type;
2135 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2136 rtx src = NULL, dst = NULL;
2137 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2138 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2140 if (tgtblk == 0)
2142 tgtblk = assign_stack_temp (BLKmode, bytes, 0);
2143 MEM_SET_IN_STRUCT_P (tgtblk, AGGREGATE_TYPE_P (type));
2144 preserve_temp_slots (tgtblk);
2147 /* This code assumes srcreg is at least a full word. If it isn't,
2148 copy it into a new pseudo which is a full word. */
2149 if (GET_MODE (srcreg) != BLKmode
2150 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2151 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2153 /* Structures whose size is not a multiple of a word are aligned
2154 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2155 machine, this means we must skip the empty high order bytes when
2156 calculating the bit offset. */
2157 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2158 big_endian_correction
2159 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2161 /* Copy the structure BITSIZE bites at a time.
2163 We could probably emit more efficient code for machines which do not use
2164 strict alignment, but it doesn't seem worth the effort at the current
2165 time. */
2166 for (bitpos = 0, xbitpos = big_endian_correction;
2167 bitpos < bytes * BITS_PER_UNIT;
2168 bitpos += bitsize, xbitpos += bitsize)
2170 /* We need a new source operand each time xbitpos is on a
2171 word boundary and when xbitpos == big_endian_correction
2172 (the first time through). */
2173 if (xbitpos % BITS_PER_WORD == 0
2174 || xbitpos == big_endian_correction)
2175 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, BLKmode);
2177 /* We need a new destination operand each time bitpos is on
2178 a word boundary. */
2179 if (bitpos % BITS_PER_WORD == 0)
2180 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2182 /* Use xbitpos for the source extraction (right justified) and
2183 xbitpos for the destination store (left justified). */
2184 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2185 extract_bit_field (src, bitsize,
2186 xbitpos % BITS_PER_WORD, 1,
2187 NULL_RTX, word_mode, word_mode,
2188 bitsize, BITS_PER_WORD),
2189 bitsize, BITS_PER_WORD);
2192 return tgtblk;
2195 /* Add a USE expression for REG to the (possibly empty) list pointed
2196 to by CALL_FUSAGE. REG must denote a hard register. */
2198 void
2199 use_reg (call_fusage, reg)
2200 rtx *call_fusage, reg;
2202 if (GET_CODE (reg) != REG
2203 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2204 abort ();
2206 *call_fusage
2207 = gen_rtx_EXPR_LIST (VOIDmode,
2208 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2211 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2212 starting at REGNO. All of these registers must be hard registers. */
2214 void
2215 use_regs (call_fusage, regno, nregs)
2216 rtx *call_fusage;
2217 int regno;
2218 int nregs;
2220 int i;
2222 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2223 abort ();
2225 for (i = 0; i < nregs; i++)
2226 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2229 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2230 PARALLEL REGS. This is for calls that pass values in multiple
2231 non-contiguous locations. The Irix 6 ABI has examples of this. */
2233 void
2234 use_group_regs (call_fusage, regs)
2235 rtx *call_fusage;
2236 rtx regs;
2238 int i;
2240 for (i = 0; i < XVECLEN (regs, 0); i++)
2242 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2244 /* A NULL entry means the parameter goes both on the stack and in
2245 registers. This can also be a MEM for targets that pass values
2246 partially on the stack and partially in registers. */
2247 if (reg != 0 && GET_CODE (reg) == REG)
2248 use_reg (call_fusage, reg);
2252 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2253 rtx with BLKmode). The caller must pass TO through protect_from_queue
2254 before calling. ALIGN is maximum alignment we can assume. */
2256 static void
2257 clear_by_pieces (to, len, align)
2258 rtx to;
2259 unsigned HOST_WIDE_INT len;
2260 unsigned int align;
2262 struct clear_by_pieces data;
2263 rtx to_addr = XEXP (to, 0);
2264 unsigned HOST_WIDE_INT max_size = MOVE_MAX_PIECES + 1;
2265 enum machine_mode mode = VOIDmode, tmode;
2266 enum insn_code icode;
2268 data.offset = 0;
2269 data.to_addr = to_addr;
2270 data.to = to;
2271 data.autinc_to
2272 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2273 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2275 data.explicit_inc_to = 0;
2276 data.reverse
2277 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2278 if (data.reverse)
2279 data.offset = len;
2280 data.len = len;
2282 /* If copying requires more than two move insns,
2283 copy addresses to registers (to make displacements shorter)
2284 and use post-increment if available. */
2285 if (!data.autinc_to
2286 && move_by_pieces_ninsns (len, align) > 2)
2288 /* Determine the main mode we'll be using. */
2289 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2290 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2291 if (GET_MODE_SIZE (tmode) < max_size)
2292 mode = tmode;
2294 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
2296 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
2297 data.autinc_to = 1;
2298 data.explicit_inc_to = -1;
2301 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse
2302 && ! data.autinc_to)
2304 data.to_addr = copy_addr_to_reg (to_addr);
2305 data.autinc_to = 1;
2306 data.explicit_inc_to = 1;
2309 if ( !data.autinc_to && CONSTANT_P (to_addr))
2310 data.to_addr = copy_addr_to_reg (to_addr);
2313 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2314 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2315 align = MOVE_MAX * BITS_PER_UNIT;
2317 /* First move what we can in the largest integer mode, then go to
2318 successively smaller modes. */
2320 while (max_size > 1)
2322 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2323 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2324 if (GET_MODE_SIZE (tmode) < max_size)
2325 mode = tmode;
2327 if (mode == VOIDmode)
2328 break;
2330 icode = mov_optab->handlers[(int) mode].insn_code;
2331 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2332 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2334 max_size = GET_MODE_SIZE (mode);
2337 /* The code above should have handled everything. */
2338 if (data.len != 0)
2339 abort ();
2342 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2343 with move instructions for mode MODE. GENFUN is the gen_... function
2344 to make a move insn for that mode. DATA has all the other info. */
2346 static void
2347 clear_by_pieces_1 (genfun, mode, data)
2348 rtx (*genfun) PARAMS ((rtx, ...));
2349 enum machine_mode mode;
2350 struct clear_by_pieces *data;
2352 unsigned int size = GET_MODE_SIZE (mode);
2353 rtx to1;
2355 while (data->len >= size)
2357 if (data->reverse)
2358 data->offset -= size;
2360 if (data->autinc_to)
2362 to1 = gen_rtx_MEM (mode, data->to_addr);
2363 MEM_COPY_ATTRIBUTES (to1, data->to);
2365 else
2366 to1 = change_address (data->to, mode,
2367 plus_constant (data->to_addr, data->offset));
2369 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2370 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2372 emit_insn ((*genfun) (to1, const0_rtx));
2374 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2375 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2377 if (! data->reverse)
2378 data->offset += size;
2380 data->len -= size;
2384 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2385 its length in bytes and ALIGN is the maximum alignment we can is has.
2387 If we call a function that returns the length of the block, return it. */
2390 clear_storage (object, size, align)
2391 rtx object;
2392 rtx size;
2393 unsigned int align;
2395 #ifdef TARGET_MEM_FUNCTIONS
2396 static tree fn;
2397 tree call_expr, arg_list;
2398 #endif
2399 rtx retval = 0;
2401 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2402 just move a zero. Otherwise, do this a piece at a time. */
2403 if (GET_MODE (object) != BLKmode
2404 && GET_CODE (size) == CONST_INT
2405 && GET_MODE_SIZE (GET_MODE (object)) == INTVAL (size))
2406 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2407 else
2409 object = protect_from_queue (object, 1);
2410 size = protect_from_queue (size, 0);
2412 if (GET_CODE (size) == CONST_INT
2413 && MOVE_BY_PIECES_P (INTVAL (size), align))
2414 clear_by_pieces (object, INTVAL (size), align);
2415 else
2417 /* Try the most limited insn first, because there's no point
2418 including more than one in the machine description unless
2419 the more limited one has some advantage. */
2421 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2422 enum machine_mode mode;
2424 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2425 mode = GET_MODE_WIDER_MODE (mode))
2427 enum insn_code code = clrstr_optab[(int) mode];
2428 insn_operand_predicate_fn pred;
2430 if (code != CODE_FOR_nothing
2431 /* We don't need MODE to be narrower than
2432 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2433 the mode mask, as it is returned by the macro, it will
2434 definitely be less than the actual mode mask. */
2435 && ((GET_CODE (size) == CONST_INT
2436 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2437 <= (GET_MODE_MASK (mode) >> 1)))
2438 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2439 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2440 || (*pred) (object, BLKmode))
2441 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2442 || (*pred) (opalign, VOIDmode)))
2444 rtx op1;
2445 rtx last = get_last_insn ();
2446 rtx pat;
2448 op1 = convert_to_mode (mode, size, 1);
2449 pred = insn_data[(int) code].operand[1].predicate;
2450 if (pred != 0 && ! (*pred) (op1, mode))
2451 op1 = copy_to_mode_reg (mode, op1);
2453 pat = GEN_FCN ((int) code) (object, op1, opalign);
2454 if (pat)
2456 emit_insn (pat);
2457 return 0;
2459 else
2460 delete_insns_since (last);
2464 /* OBJECT or SIZE may have been passed through protect_from_queue.
2466 It is unsafe to save the value generated by protect_from_queue
2467 and reuse it later. Consider what happens if emit_queue is
2468 called before the return value from protect_from_queue is used.
2470 Expansion of the CALL_EXPR below will call emit_queue before
2471 we are finished emitting RTL for argument setup. So if we are
2472 not careful we could get the wrong value for an argument.
2474 To avoid this problem we go ahead and emit code to copy OBJECT
2475 and SIZE into new pseudos. We can then place those new pseudos
2476 into an RTL_EXPR and use them later, even after a call to
2477 emit_queue.
2479 Note this is not strictly needed for library calls since they
2480 do not call emit_queue before loading their arguments. However,
2481 we may need to have library calls call emit_queue in the future
2482 since failing to do so could cause problems for targets which
2483 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2484 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2486 #ifdef TARGET_MEM_FUNCTIONS
2487 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2488 #else
2489 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2490 TREE_UNSIGNED (integer_type_node));
2491 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2492 #endif
2494 #ifdef TARGET_MEM_FUNCTIONS
2495 /* It is incorrect to use the libcall calling conventions to call
2496 memset in this context.
2498 This could be a user call to memset and the user may wish to
2499 examine the return value from memset.
2501 For targets where libcalls and normal calls have different
2502 conventions for returning pointers, we could end up generating
2503 incorrect code.
2505 So instead of using a libcall sequence we build up a suitable
2506 CALL_EXPR and expand the call in the normal fashion. */
2507 if (fn == NULL_TREE)
2509 tree fntype;
2511 /* This was copied from except.c, I don't know if all this is
2512 necessary in this context or not. */
2513 fn = get_identifier ("memset");
2514 fntype = build_pointer_type (void_type_node);
2515 fntype = build_function_type (fntype, NULL_TREE);
2516 fn = build_decl (FUNCTION_DECL, fn, fntype);
2517 ggc_add_tree_root (&fn, 1);
2518 DECL_EXTERNAL (fn) = 1;
2519 TREE_PUBLIC (fn) = 1;
2520 DECL_ARTIFICIAL (fn) = 1;
2521 make_decl_rtl (fn, NULL_PTR, 1);
2522 assemble_external (fn);
2525 /* We need to make an argument list for the function call.
2527 memset has three arguments, the first is a void * addresses, the
2528 second a integer with the initialization value, the last is a
2529 size_t byte count for the copy. */
2530 arg_list
2531 = build_tree_list (NULL_TREE,
2532 make_tree (build_pointer_type (void_type_node),
2533 object));
2534 TREE_CHAIN (arg_list)
2535 = build_tree_list (NULL_TREE,
2536 make_tree (integer_type_node, const0_rtx));
2537 TREE_CHAIN (TREE_CHAIN (arg_list))
2538 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2539 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2541 /* Now we have to build up the CALL_EXPR itself. */
2542 call_expr = build1 (ADDR_EXPR,
2543 build_pointer_type (TREE_TYPE (fn)), fn);
2544 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2545 call_expr, arg_list, NULL_TREE);
2546 TREE_SIDE_EFFECTS (call_expr) = 1;
2548 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2549 #else
2550 emit_library_call (bzero_libfunc, LCT_NORMAL,
2551 VOIDmode, 2, object, Pmode, size,
2552 TYPE_MODE (integer_type_node));
2553 #endif
2557 return retval;
2560 /* Generate code to copy Y into X.
2561 Both Y and X must have the same mode, except that
2562 Y can be a constant with VOIDmode.
2563 This mode cannot be BLKmode; use emit_block_move for that.
2565 Return the last instruction emitted. */
2568 emit_move_insn (x, y)
2569 rtx x, y;
2571 enum machine_mode mode = GET_MODE (x);
2573 x = protect_from_queue (x, 1);
2574 y = protect_from_queue (y, 0);
2576 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2577 abort ();
2579 /* Never force constant_p_rtx to memory. */
2580 if (GET_CODE (y) == CONSTANT_P_RTX)
2582 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2583 y = force_const_mem (mode, y);
2585 /* If X or Y are memory references, verify that their addresses are valid
2586 for the machine. */
2587 if (GET_CODE (x) == MEM
2588 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2589 && ! push_operand (x, GET_MODE (x)))
2590 || (flag_force_addr
2591 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2592 x = change_address (x, VOIDmode, XEXP (x, 0));
2594 if (GET_CODE (y) == MEM
2595 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2596 || (flag_force_addr
2597 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2598 y = change_address (y, VOIDmode, XEXP (y, 0));
2600 if (mode == BLKmode)
2601 abort ();
2603 return emit_move_insn_1 (x, y);
2606 /* Low level part of emit_move_insn.
2607 Called just like emit_move_insn, but assumes X and Y
2608 are basically valid. */
2611 emit_move_insn_1 (x, y)
2612 rtx x, y;
2614 enum machine_mode mode = GET_MODE (x);
2615 enum machine_mode submode;
2616 enum mode_class class = GET_MODE_CLASS (mode);
2617 unsigned int i;
2619 if (mode >= MAX_MACHINE_MODE)
2620 abort ();
2622 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2623 return
2624 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2626 /* Expand complex moves by moving real part and imag part, if possible. */
2627 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2628 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2629 * BITS_PER_UNIT),
2630 (class == MODE_COMPLEX_INT
2631 ? MODE_INT : MODE_FLOAT),
2633 && (mov_optab->handlers[(int) submode].insn_code
2634 != CODE_FOR_nothing))
2636 /* Don't split destination if it is a stack push. */
2637 int stack = push_operand (x, GET_MODE (x));
2639 /* If this is a stack, push the highpart first, so it
2640 will be in the argument order.
2642 In that case, change_address is used only to convert
2643 the mode, not to change the address. */
2644 if (stack)
2646 /* Note that the real part always precedes the imag part in memory
2647 regardless of machine's endianness. */
2648 #ifdef STACK_GROWS_DOWNWARD
2649 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2650 (gen_rtx_MEM (submode, XEXP (x, 0)),
2651 gen_imagpart (submode, y)));
2652 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2653 (gen_rtx_MEM (submode, XEXP (x, 0)),
2654 gen_realpart (submode, y)));
2655 #else
2656 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2657 (gen_rtx_MEM (submode, XEXP (x, 0)),
2658 gen_realpart (submode, y)));
2659 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2660 (gen_rtx_MEM (submode, XEXP (x, 0)),
2661 gen_imagpart (submode, y)));
2662 #endif
2664 else
2666 rtx realpart_x, realpart_y;
2667 rtx imagpart_x, imagpart_y;
2669 /* If this is a complex value with each part being smaller than a
2670 word, the usual calling sequence will likely pack the pieces into
2671 a single register. Unfortunately, SUBREG of hard registers only
2672 deals in terms of words, so we have a problem converting input
2673 arguments to the CONCAT of two registers that is used elsewhere
2674 for complex values. If this is before reload, we can copy it into
2675 memory and reload. FIXME, we should see about using extract and
2676 insert on integer registers, but complex short and complex char
2677 variables should be rarely used. */
2678 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2679 && (reload_in_progress | reload_completed) == 0)
2681 int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2682 int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2684 if (packed_dest_p || packed_src_p)
2686 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2687 ? MODE_FLOAT : MODE_INT);
2689 enum machine_mode reg_mode =
2690 mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2692 if (reg_mode != BLKmode)
2694 rtx mem = assign_stack_temp (reg_mode,
2695 GET_MODE_SIZE (mode), 0);
2697 rtx cmem = change_address (mem, mode, NULL_RTX);
2699 cfun->cannot_inline = N_("function using short complex types cannot be inline");
2701 if (packed_dest_p)
2703 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2704 emit_move_insn_1 (cmem, y);
2705 return emit_move_insn_1 (sreg, mem);
2707 else
2709 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2710 emit_move_insn_1 (mem, sreg);
2711 return emit_move_insn_1 (x, cmem);
2717 realpart_x = gen_realpart (submode, x);
2718 realpart_y = gen_realpart (submode, y);
2719 imagpart_x = gen_imagpart (submode, x);
2720 imagpart_y = gen_imagpart (submode, y);
2722 /* Show the output dies here. This is necessary for SUBREGs
2723 of pseudos since we cannot track their lifetimes correctly;
2724 hard regs shouldn't appear here except as return values.
2725 We never want to emit such a clobber after reload. */
2726 if (x != y
2727 && ! (reload_in_progress || reload_completed)
2728 && (GET_CODE (realpart_x) == SUBREG
2729 || GET_CODE (imagpart_x) == SUBREG))
2731 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2734 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2735 (realpart_x, realpart_y));
2736 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2737 (imagpart_x, imagpart_y));
2740 return get_last_insn ();
2743 /* This will handle any multi-word mode that lacks a move_insn pattern.
2744 However, you will get better code if you define such patterns,
2745 even if they must turn into multiple assembler instructions. */
2746 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2748 rtx last_insn = 0;
2749 rtx seq, inner;
2750 int need_clobber;
2752 #ifdef PUSH_ROUNDING
2754 /* If X is a push on the stack, do the push now and replace
2755 X with a reference to the stack pointer. */
2756 if (push_operand (x, GET_MODE (x)))
2758 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2759 x = change_address (x, VOIDmode, stack_pointer_rtx);
2761 #endif
2763 /* If we are in reload, see if either operand is a MEM whose address
2764 is scheduled for replacement. */
2765 if (reload_in_progress && GET_CODE (x) == MEM
2766 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
2768 rtx new = gen_rtx_MEM (GET_MODE (x), inner);
2770 MEM_COPY_ATTRIBUTES (new, x);
2771 x = new;
2773 if (reload_in_progress && GET_CODE (y) == MEM
2774 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
2776 rtx new = gen_rtx_MEM (GET_MODE (y), inner);
2778 MEM_COPY_ATTRIBUTES (new, y);
2779 y = new;
2782 start_sequence ();
2784 need_clobber = 0;
2785 for (i = 0;
2786 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2787 i++)
2789 rtx xpart = operand_subword (x, i, 1, mode);
2790 rtx ypart = operand_subword (y, i, 1, mode);
2792 /* If we can't get a part of Y, put Y into memory if it is a
2793 constant. Otherwise, force it into a register. If we still
2794 can't get a part of Y, abort. */
2795 if (ypart == 0 && CONSTANT_P (y))
2797 y = force_const_mem (mode, y);
2798 ypart = operand_subword (y, i, 1, mode);
2800 else if (ypart == 0)
2801 ypart = operand_subword_force (y, i, mode);
2803 if (xpart == 0 || ypart == 0)
2804 abort ();
2806 need_clobber |= (GET_CODE (xpart) == SUBREG);
2808 last_insn = emit_move_insn (xpart, ypart);
2811 seq = gen_sequence ();
2812 end_sequence ();
2814 /* Show the output dies here. This is necessary for SUBREGs
2815 of pseudos since we cannot track their lifetimes correctly;
2816 hard regs shouldn't appear here except as return values.
2817 We never want to emit such a clobber after reload. */
2818 if (x != y
2819 && ! (reload_in_progress || reload_completed)
2820 && need_clobber != 0)
2822 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2825 emit_insn (seq);
2827 return last_insn;
2829 else
2830 abort ();
2833 /* Pushing data onto the stack. */
2835 /* Push a block of length SIZE (perhaps variable)
2836 and return an rtx to address the beginning of the block.
2837 Note that it is not possible for the value returned to be a QUEUED.
2838 The value may be virtual_outgoing_args_rtx.
2840 EXTRA is the number of bytes of padding to push in addition to SIZE.
2841 BELOW nonzero means this padding comes at low addresses;
2842 otherwise, the padding comes at high addresses. */
2845 push_block (size, extra, below)
2846 rtx size;
2847 int extra, below;
2849 register rtx temp;
2851 size = convert_modes (Pmode, ptr_mode, size, 1);
2852 if (CONSTANT_P (size))
2853 anti_adjust_stack (plus_constant (size, extra));
2854 else if (GET_CODE (size) == REG && extra == 0)
2855 anti_adjust_stack (size);
2856 else
2858 temp = copy_to_mode_reg (Pmode, size);
2859 if (extra != 0)
2860 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2861 temp, 0, OPTAB_LIB_WIDEN);
2862 anti_adjust_stack (temp);
2865 #ifndef STACK_GROWS_DOWNWARD
2866 #ifdef ARGS_GROW_DOWNWARD
2867 if (!ACCUMULATE_OUTGOING_ARGS)
2868 #else
2869 if (0)
2870 #endif
2871 #else
2872 if (1)
2873 #endif
2875 /* Return the lowest stack address when STACK or ARGS grow downward and
2876 we are not aaccumulating outgoing arguments (the c4x port uses such
2877 conventions). */
2878 temp = virtual_outgoing_args_rtx;
2879 if (extra != 0 && below)
2880 temp = plus_constant (temp, extra);
2882 else
2884 if (GET_CODE (size) == CONST_INT)
2885 temp = plus_constant (virtual_outgoing_args_rtx,
2886 -INTVAL (size) - (below ? 0 : extra));
2887 else if (extra != 0 && !below)
2888 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2889 negate_rtx (Pmode, plus_constant (size, extra)));
2890 else
2891 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2892 negate_rtx (Pmode, size));
2895 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2899 gen_push_operand ()
2901 return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2904 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2905 block of SIZE bytes. */
2907 static rtx
2908 get_push_address (size)
2909 int size;
2911 register rtx temp;
2913 if (STACK_PUSH_CODE == POST_DEC)
2914 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2915 else if (STACK_PUSH_CODE == POST_INC)
2916 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
2917 else
2918 temp = stack_pointer_rtx;
2920 return copy_to_reg (temp);
2923 /* Generate code to push X onto the stack, assuming it has mode MODE and
2924 type TYPE.
2925 MODE is redundant except when X is a CONST_INT (since they don't
2926 carry mode info).
2927 SIZE is an rtx for the size of data to be copied (in bytes),
2928 needed only if X is BLKmode.
2930 ALIGN is maximum alignment we can assume.
2932 If PARTIAL and REG are both nonzero, then copy that many of the first
2933 words of X into registers starting with REG, and push the rest of X.
2934 The amount of space pushed is decreased by PARTIAL words,
2935 rounded *down* to a multiple of PARM_BOUNDARY.
2936 REG must be a hard register in this case.
2937 If REG is zero but PARTIAL is not, take any all others actions for an
2938 argument partially in registers, but do not actually load any
2939 registers.
2941 EXTRA is the amount in bytes of extra space to leave next to this arg.
2942 This is ignored if an argument block has already been allocated.
2944 On a machine that lacks real push insns, ARGS_ADDR is the address of
2945 the bottom of the argument block for this call. We use indexing off there
2946 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2947 argument block has not been preallocated.
2949 ARGS_SO_FAR is the size of args previously pushed for this call.
2951 REG_PARM_STACK_SPACE is nonzero if functions require stack space
2952 for arguments passed in registers. If nonzero, it will be the number
2953 of bytes required. */
2955 void
2956 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2957 args_addr, args_so_far, reg_parm_stack_space,
2958 alignment_pad)
2959 register rtx x;
2960 enum machine_mode mode;
2961 tree type;
2962 rtx size;
2963 unsigned int align;
2964 int partial;
2965 rtx reg;
2966 int extra;
2967 rtx args_addr;
2968 rtx args_so_far;
2969 int reg_parm_stack_space;
2970 rtx alignment_pad;
2972 rtx xinner;
2973 enum direction stack_direction
2974 #ifdef STACK_GROWS_DOWNWARD
2975 = downward;
2976 #else
2977 = upward;
2978 #endif
2980 /* Decide where to pad the argument: `downward' for below,
2981 `upward' for above, or `none' for don't pad it.
2982 Default is below for small data on big-endian machines; else above. */
2983 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2985 /* Invert direction if stack is post-update. */
2986 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2987 if (where_pad != none)
2988 where_pad = (where_pad == downward ? upward : downward);
2990 xinner = x = protect_from_queue (x, 0);
2992 if (mode == BLKmode)
2994 /* Copy a block into the stack, entirely or partially. */
2996 register rtx temp;
2997 int used = partial * UNITS_PER_WORD;
2998 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2999 int skip;
3001 if (size == 0)
3002 abort ();
3004 used -= offset;
3006 /* USED is now the # of bytes we need not copy to the stack
3007 because registers will take care of them. */
3009 if (partial != 0)
3010 xinner = change_address (xinner, BLKmode,
3011 plus_constant (XEXP (xinner, 0), used));
3013 /* If the partial register-part of the arg counts in its stack size,
3014 skip the part of stack space corresponding to the registers.
3015 Otherwise, start copying to the beginning of the stack space,
3016 by setting SKIP to 0. */
3017 skip = (reg_parm_stack_space == 0) ? 0 : used;
3019 #ifdef PUSH_ROUNDING
3020 /* Do it with several push insns if that doesn't take lots of insns
3021 and if there is no difficulty with push insns that skip bytes
3022 on the stack for alignment purposes. */
3023 if (args_addr == 0
3024 && PUSH_ARGS
3025 && GET_CODE (size) == CONST_INT
3026 && skip == 0
3027 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3028 /* Here we avoid the case of a structure whose weak alignment
3029 forces many pushes of a small amount of data,
3030 and such small pushes do rounding that causes trouble. */
3031 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3032 || align >= BIGGEST_ALIGNMENT
3033 || PUSH_ROUNDING (align) == align)
3034 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3036 /* Push padding now if padding above and stack grows down,
3037 or if padding below and stack grows up.
3038 But if space already allocated, this has already been done. */
3039 if (extra && args_addr == 0
3040 && where_pad != none && where_pad != stack_direction)
3041 anti_adjust_stack (GEN_INT (extra));
3043 stack_pointer_delta += INTVAL (size) - used;
3044 move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
3045 INTVAL (size) - used, align);
3047 if (current_function_check_memory_usage && ! in_check_memory_usage)
3049 rtx temp;
3051 in_check_memory_usage = 1;
3052 temp = get_push_address (INTVAL (size) - used);
3053 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3054 emit_library_call (chkr_copy_bitmap_libfunc,
3055 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3056 Pmode, XEXP (xinner, 0), Pmode,
3057 GEN_INT (INTVAL (size) - used),
3058 TYPE_MODE (sizetype));
3059 else
3060 emit_library_call (chkr_set_right_libfunc,
3061 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3062 Pmode, GEN_INT (INTVAL (size) - used),
3063 TYPE_MODE (sizetype),
3064 GEN_INT (MEMORY_USE_RW),
3065 TYPE_MODE (integer_type_node));
3066 in_check_memory_usage = 0;
3069 else
3070 #endif /* PUSH_ROUNDING */
3072 rtx target;
3074 /* Otherwise make space on the stack and copy the data
3075 to the address of that space. */
3077 /* Deduct words put into registers from the size we must copy. */
3078 if (partial != 0)
3080 if (GET_CODE (size) == CONST_INT)
3081 size = GEN_INT (INTVAL (size) - used);
3082 else
3083 size = expand_binop (GET_MODE (size), sub_optab, size,
3084 GEN_INT (used), NULL_RTX, 0,
3085 OPTAB_LIB_WIDEN);
3088 /* Get the address of the stack space.
3089 In this case, we do not deal with EXTRA separately.
3090 A single stack adjust will do. */
3091 if (! args_addr)
3093 temp = push_block (size, extra, where_pad == downward);
3094 extra = 0;
3096 else if (GET_CODE (args_so_far) == CONST_INT)
3097 temp = memory_address (BLKmode,
3098 plus_constant (args_addr,
3099 skip + INTVAL (args_so_far)));
3100 else
3101 temp = memory_address (BLKmode,
3102 plus_constant (gen_rtx_PLUS (Pmode,
3103 args_addr,
3104 args_so_far),
3105 skip));
3106 if (current_function_check_memory_usage && ! in_check_memory_usage)
3108 in_check_memory_usage = 1;
3109 target = copy_to_reg (temp);
3110 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3111 emit_library_call (chkr_copy_bitmap_libfunc,
3112 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3113 target, Pmode,
3114 XEXP (xinner, 0), Pmode,
3115 size, TYPE_MODE (sizetype));
3116 else
3117 emit_library_call (chkr_set_right_libfunc,
3118 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3119 target, Pmode,
3120 size, TYPE_MODE (sizetype),
3121 GEN_INT (MEMORY_USE_RW),
3122 TYPE_MODE (integer_type_node));
3123 in_check_memory_usage = 0;
3126 target = gen_rtx_MEM (BLKmode, temp);
3128 if (type != 0)
3130 set_mem_attributes (target, type, 1);
3131 /* Function incoming arguments may overlap with sibling call
3132 outgoing arguments and we cannot allow reordering of reads
3133 from function arguments with stores to outgoing arguments
3134 of sibling calls. */
3135 MEM_ALIAS_SET (target) = 0;
3138 /* TEMP is the address of the block. Copy the data there. */
3139 if (GET_CODE (size) == CONST_INT
3140 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3142 move_by_pieces (target, xinner, INTVAL (size), align);
3143 goto ret;
3145 else
3147 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3148 enum machine_mode mode;
3150 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3151 mode != VOIDmode;
3152 mode = GET_MODE_WIDER_MODE (mode))
3154 enum insn_code code = movstr_optab[(int) mode];
3155 insn_operand_predicate_fn pred;
3157 if (code != CODE_FOR_nothing
3158 && ((GET_CODE (size) == CONST_INT
3159 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3160 <= (GET_MODE_MASK (mode) >> 1)))
3161 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3162 && (!(pred = insn_data[(int) code].operand[0].predicate)
3163 || ((*pred) (target, BLKmode)))
3164 && (!(pred = insn_data[(int) code].operand[1].predicate)
3165 || ((*pred) (xinner, BLKmode)))
3166 && (!(pred = insn_data[(int) code].operand[3].predicate)
3167 || ((*pred) (opalign, VOIDmode))))
3169 rtx op2 = convert_to_mode (mode, size, 1);
3170 rtx last = get_last_insn ();
3171 rtx pat;
3173 pred = insn_data[(int) code].operand[2].predicate;
3174 if (pred != 0 && ! (*pred) (op2, mode))
3175 op2 = copy_to_mode_reg (mode, op2);
3177 pat = GEN_FCN ((int) code) (target, xinner,
3178 op2, opalign);
3179 if (pat)
3181 emit_insn (pat);
3182 goto ret;
3184 else
3185 delete_insns_since (last);
3190 if (!ACCUMULATE_OUTGOING_ARGS)
3192 /* If the source is referenced relative to the stack pointer,
3193 copy it to another register to stabilize it. We do not need
3194 to do this if we know that we won't be changing sp. */
3196 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3197 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3198 temp = copy_to_reg (temp);
3201 /* Make inhibit_defer_pop nonzero around the library call
3202 to force it to pop the bcopy-arguments right away. */
3203 NO_DEFER_POP;
3204 #ifdef TARGET_MEM_FUNCTIONS
3205 emit_library_call (memcpy_libfunc, LCT_NORMAL,
3206 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3207 convert_to_mode (TYPE_MODE (sizetype),
3208 size, TREE_UNSIGNED (sizetype)),
3209 TYPE_MODE (sizetype));
3210 #else
3211 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3212 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3213 convert_to_mode (TYPE_MODE (integer_type_node),
3214 size,
3215 TREE_UNSIGNED (integer_type_node)),
3216 TYPE_MODE (integer_type_node));
3217 #endif
3218 OK_DEFER_POP;
3221 else if (partial > 0)
3223 /* Scalar partly in registers. */
3225 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3226 int i;
3227 int not_stack;
3228 /* # words of start of argument
3229 that we must make space for but need not store. */
3230 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3231 int args_offset = INTVAL (args_so_far);
3232 int skip;
3234 /* Push padding now if padding above and stack grows down,
3235 or if padding below and stack grows up.
3236 But if space already allocated, this has already been done. */
3237 if (extra && args_addr == 0
3238 && where_pad != none && where_pad != stack_direction)
3239 anti_adjust_stack (GEN_INT (extra));
3241 /* If we make space by pushing it, we might as well push
3242 the real data. Otherwise, we can leave OFFSET nonzero
3243 and leave the space uninitialized. */
3244 if (args_addr == 0)
3245 offset = 0;
3247 /* Now NOT_STACK gets the number of words that we don't need to
3248 allocate on the stack. */
3249 not_stack = partial - offset;
3251 /* If the partial register-part of the arg counts in its stack size,
3252 skip the part of stack space corresponding to the registers.
3253 Otherwise, start copying to the beginning of the stack space,
3254 by setting SKIP to 0. */
3255 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3257 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3258 x = validize_mem (force_const_mem (mode, x));
3260 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3261 SUBREGs of such registers are not allowed. */
3262 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3263 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3264 x = copy_to_reg (x);
3266 /* Loop over all the words allocated on the stack for this arg. */
3267 /* We can do it by words, because any scalar bigger than a word
3268 has a size a multiple of a word. */
3269 #ifndef PUSH_ARGS_REVERSED
3270 for (i = not_stack; i < size; i++)
3271 #else
3272 for (i = size - 1; i >= not_stack; i--)
3273 #endif
3274 if (i >= not_stack + offset)
3275 emit_push_insn (operand_subword_force (x, i, mode),
3276 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3277 0, args_addr,
3278 GEN_INT (args_offset + ((i - not_stack + skip)
3279 * UNITS_PER_WORD)),
3280 reg_parm_stack_space, alignment_pad);
3282 else
3284 rtx addr;
3285 rtx target = NULL_RTX;
3286 rtx dest;
3288 /* Push padding now if padding above and stack grows down,
3289 or if padding below and stack grows up.
3290 But if space already allocated, this has already been done. */
3291 if (extra && args_addr == 0
3292 && where_pad != none && where_pad != stack_direction)
3293 anti_adjust_stack (GEN_INT (extra));
3295 #ifdef PUSH_ROUNDING
3296 if (args_addr == 0 && PUSH_ARGS)
3298 addr = gen_push_operand ();
3299 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3301 else
3302 #endif
3304 if (GET_CODE (args_so_far) == CONST_INT)
3305 addr
3306 = memory_address (mode,
3307 plus_constant (args_addr,
3308 INTVAL (args_so_far)));
3309 else
3310 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3311 args_so_far));
3312 target = addr;
3315 dest = gen_rtx_MEM (mode, addr);
3316 if (type != 0)
3318 set_mem_attributes (dest, type, 1);
3319 /* Function incoming arguments may overlap with sibling call
3320 outgoing arguments and we cannot allow reordering of reads
3321 from function arguments with stores to outgoing arguments
3322 of sibling calls. */
3323 MEM_ALIAS_SET (dest) = 0;
3326 emit_move_insn (dest, x);
3328 if (current_function_check_memory_usage && ! in_check_memory_usage)
3330 in_check_memory_usage = 1;
3331 if (target == 0)
3332 target = get_push_address (GET_MODE_SIZE (mode));
3334 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3335 emit_library_call (chkr_copy_bitmap_libfunc,
3336 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3337 Pmode, XEXP (x, 0), Pmode,
3338 GEN_INT (GET_MODE_SIZE (mode)),
3339 TYPE_MODE (sizetype));
3340 else
3341 emit_library_call (chkr_set_right_libfunc,
3342 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3343 Pmode, GEN_INT (GET_MODE_SIZE (mode)),
3344 TYPE_MODE (sizetype),
3345 GEN_INT (MEMORY_USE_RW),
3346 TYPE_MODE (integer_type_node));
3347 in_check_memory_usage = 0;
3351 ret:
3352 /* If part should go in registers, copy that part
3353 into the appropriate registers. Do this now, at the end,
3354 since mem-to-mem copies above may do function calls. */
3355 if (partial > 0 && reg != 0)
3357 /* Handle calls that pass values in multiple non-contiguous locations.
3358 The Irix 6 ABI has examples of this. */
3359 if (GET_CODE (reg) == PARALLEL)
3360 emit_group_load (reg, x, -1, align); /* ??? size? */
3361 else
3362 move_block_to_reg (REGNO (reg), x, partial, mode);
3365 if (extra && args_addr == 0 && where_pad == stack_direction)
3366 anti_adjust_stack (GEN_INT (extra));
3368 if (alignment_pad && args_addr == 0)
3369 anti_adjust_stack (alignment_pad);
3372 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3373 operations. */
3375 static rtx
3376 get_subtarget (x)
3377 rtx x;
3379 return ((x == 0
3380 /* Only registers can be subtargets. */
3381 || GET_CODE (x) != REG
3382 /* If the register is readonly, it can't be set more than once. */
3383 || RTX_UNCHANGING_P (x)
3384 /* Don't use hard regs to avoid extending their life. */
3385 || REGNO (x) < FIRST_PSEUDO_REGISTER
3386 /* Avoid subtargets inside loops,
3387 since they hide some invariant expressions. */
3388 || preserve_subexpressions_p ())
3389 ? 0 : x);
3392 /* Expand an assignment that stores the value of FROM into TO.
3393 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3394 (This may contain a QUEUED rtx;
3395 if the value is constant, this rtx is a constant.)
3396 Otherwise, the returned value is NULL_RTX.
3398 SUGGEST_REG is no longer actually used.
3399 It used to mean, copy the value through a register
3400 and return that register, if that is possible.
3401 We now use WANT_VALUE to decide whether to do this. */
3404 expand_assignment (to, from, want_value, suggest_reg)
3405 tree to, from;
3406 int want_value;
3407 int suggest_reg ATTRIBUTE_UNUSED;
3409 register rtx to_rtx = 0;
3410 rtx result;
3412 /* Don't crash if the lhs of the assignment was erroneous. */
3414 if (TREE_CODE (to) == ERROR_MARK)
3416 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3417 return want_value ? result : NULL_RTX;
3420 /* Assignment of a structure component needs special treatment
3421 if the structure component's rtx is not simply a MEM.
3422 Assignment of an array element at a constant index, and assignment of
3423 an array element in an unaligned packed structure field, has the same
3424 problem. */
3426 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3427 || TREE_CODE (to) == ARRAY_REF)
3429 enum machine_mode mode1;
3430 HOST_WIDE_INT bitsize, bitpos;
3431 tree offset;
3432 int unsignedp;
3433 int volatilep = 0;
3434 tree tem;
3435 unsigned int alignment;
3437 push_temp_slots ();
3438 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3439 &unsignedp, &volatilep, &alignment);
3441 /* If we are going to use store_bit_field and extract_bit_field,
3442 make sure to_rtx will be safe for multiple use. */
3444 if (mode1 == VOIDmode && want_value)
3445 tem = stabilize_reference (tem);
3447 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3448 if (offset != 0)
3450 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3452 if (GET_CODE (to_rtx) != MEM)
3453 abort ();
3455 if (GET_MODE (offset_rtx) != ptr_mode)
3457 #ifdef POINTERS_EXTEND_UNSIGNED
3458 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
3459 #else
3460 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3461 #endif
3464 /* A constant address in TO_RTX can have VOIDmode, we must not try
3465 to call force_reg for that case. Avoid that case. */
3466 if (GET_CODE (to_rtx) == MEM
3467 && GET_MODE (to_rtx) == BLKmode
3468 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3469 && bitsize
3470 && (bitpos % bitsize) == 0
3471 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3472 && alignment == GET_MODE_ALIGNMENT (mode1))
3474 rtx temp = change_address (to_rtx, mode1,
3475 plus_constant (XEXP (to_rtx, 0),
3476 (bitpos /
3477 BITS_PER_UNIT)));
3478 if (GET_CODE (XEXP (temp, 0)) == REG)
3479 to_rtx = temp;
3480 else
3481 to_rtx = change_address (to_rtx, mode1,
3482 force_reg (GET_MODE (XEXP (temp, 0)),
3483 XEXP (temp, 0)));
3484 bitpos = 0;
3487 to_rtx = change_address (to_rtx, VOIDmode,
3488 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3489 force_reg (ptr_mode,
3490 offset_rtx)));
3493 if (volatilep)
3495 if (GET_CODE (to_rtx) == MEM)
3497 /* When the offset is zero, to_rtx is the address of the
3498 structure we are storing into, and hence may be shared.
3499 We must make a new MEM before setting the volatile bit. */
3500 if (offset == 0)
3501 to_rtx = copy_rtx (to_rtx);
3503 MEM_VOLATILE_P (to_rtx) = 1;
3505 #if 0 /* This was turned off because, when a field is volatile
3506 in an object which is not volatile, the object may be in a register,
3507 and then we would abort over here. */
3508 else
3509 abort ();
3510 #endif
3513 if (TREE_CODE (to) == COMPONENT_REF
3514 && TREE_READONLY (TREE_OPERAND (to, 1)))
3516 if (offset == 0)
3517 to_rtx = copy_rtx (to_rtx);
3519 RTX_UNCHANGING_P (to_rtx) = 1;
3522 /* Check the access. */
3523 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3525 rtx to_addr;
3526 int size;
3527 int best_mode_size;
3528 enum machine_mode best_mode;
3530 best_mode = get_best_mode (bitsize, bitpos,
3531 TYPE_ALIGN (TREE_TYPE (tem)),
3532 mode1, volatilep);
3533 if (best_mode == VOIDmode)
3534 best_mode = QImode;
3536 best_mode_size = GET_MODE_BITSIZE (best_mode);
3537 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3538 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3539 size *= GET_MODE_SIZE (best_mode);
3541 /* Check the access right of the pointer. */
3542 in_check_memory_usage = 1;
3543 if (size)
3544 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
3545 VOIDmode, 3, to_addr, Pmode,
3546 GEN_INT (size), TYPE_MODE (sizetype),
3547 GEN_INT (MEMORY_USE_WO),
3548 TYPE_MODE (integer_type_node));
3549 in_check_memory_usage = 0;
3552 /* If this is a varying-length object, we must get the address of
3553 the source and do an explicit block move. */
3554 if (bitsize < 0)
3556 unsigned int from_align;
3557 rtx from_rtx = expand_expr_unaligned (from, &from_align);
3558 rtx inner_to_rtx
3559 = change_address (to_rtx, VOIDmode,
3560 plus_constant (XEXP (to_rtx, 0),
3561 bitpos / BITS_PER_UNIT));
3563 emit_block_move (inner_to_rtx, from_rtx, expr_size (from),
3564 MIN (alignment, from_align));
3565 free_temp_slots ();
3566 pop_temp_slots ();
3567 return to_rtx;
3569 else
3571 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3572 (want_value
3573 /* Spurious cast for HPUX compiler. */
3574 ? ((enum machine_mode)
3575 TYPE_MODE (TREE_TYPE (to)))
3576 : VOIDmode),
3577 unsignedp,
3578 alignment,
3579 int_size_in_bytes (TREE_TYPE (tem)),
3580 get_alias_set (to));
3582 preserve_temp_slots (result);
3583 free_temp_slots ();
3584 pop_temp_slots ();
3586 /* If the value is meaningful, convert RESULT to the proper mode.
3587 Otherwise, return nothing. */
3588 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3589 TYPE_MODE (TREE_TYPE (from)),
3590 result,
3591 TREE_UNSIGNED (TREE_TYPE (to)))
3592 : NULL_RTX);
3596 /* If the rhs is a function call and its value is not an aggregate,
3597 call the function before we start to compute the lhs.
3598 This is needed for correct code for cases such as
3599 val = setjmp (buf) on machines where reference to val
3600 requires loading up part of an address in a separate insn.
3602 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3603 since it might be a promoted variable where the zero- or sign- extension
3604 needs to be done. Handling this in the normal way is safe because no
3605 computation is done before the call. */
3606 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3607 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3608 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3609 && GET_CODE (DECL_RTL (to)) == REG))
3611 rtx value;
3613 push_temp_slots ();
3614 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3615 if (to_rtx == 0)
3616 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3618 /* Handle calls that return values in multiple non-contiguous locations.
3619 The Irix 6 ABI has examples of this. */
3620 if (GET_CODE (to_rtx) == PARALLEL)
3621 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3622 TYPE_ALIGN (TREE_TYPE (from)));
3623 else if (GET_MODE (to_rtx) == BLKmode)
3624 emit_block_move (to_rtx, value, expr_size (from),
3625 TYPE_ALIGN (TREE_TYPE (from)));
3626 else
3628 #ifdef POINTERS_EXTEND_UNSIGNED
3629 if (TREE_CODE (TREE_TYPE (to)) == REFERENCE_TYPE
3630 || TREE_CODE (TREE_TYPE (to)) == POINTER_TYPE)
3631 value = convert_memory_address (GET_MODE (to_rtx), value);
3632 #endif
3633 emit_move_insn (to_rtx, value);
3635 preserve_temp_slots (to_rtx);
3636 free_temp_slots ();
3637 pop_temp_slots ();
3638 return want_value ? to_rtx : NULL_RTX;
3641 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3642 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3644 if (to_rtx == 0)
3646 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3647 if (GET_CODE (to_rtx) == MEM)
3648 MEM_ALIAS_SET (to_rtx) = get_alias_set (to);
3651 /* Don't move directly into a return register. */
3652 if (TREE_CODE (to) == RESULT_DECL
3653 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3655 rtx temp;
3657 push_temp_slots ();
3658 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3660 if (GET_CODE (to_rtx) == PARALLEL)
3661 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)),
3662 TYPE_ALIGN (TREE_TYPE (from)));
3663 else
3664 emit_move_insn (to_rtx, temp);
3666 preserve_temp_slots (to_rtx);
3667 free_temp_slots ();
3668 pop_temp_slots ();
3669 return want_value ? to_rtx : NULL_RTX;
3672 /* In case we are returning the contents of an object which overlaps
3673 the place the value is being stored, use a safe function when copying
3674 a value through a pointer into a structure value return block. */
3675 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3676 && current_function_returns_struct
3677 && !current_function_returns_pcc_struct)
3679 rtx from_rtx, size;
3681 push_temp_slots ();
3682 size = expr_size (from);
3683 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3684 EXPAND_MEMORY_USE_DONT);
3686 /* Copy the rights of the bitmap. */
3687 if (current_function_check_memory_usage)
3688 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
3689 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3690 XEXP (from_rtx, 0), Pmode,
3691 convert_to_mode (TYPE_MODE (sizetype),
3692 size, TREE_UNSIGNED (sizetype)),
3693 TYPE_MODE (sizetype));
3695 #ifdef TARGET_MEM_FUNCTIONS
3696 emit_library_call (memcpy_libfunc, LCT_NORMAL,
3697 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3698 XEXP (from_rtx, 0), Pmode,
3699 convert_to_mode (TYPE_MODE (sizetype),
3700 size, TREE_UNSIGNED (sizetype)),
3701 TYPE_MODE (sizetype));
3702 #else
3703 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3704 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3705 XEXP (to_rtx, 0), Pmode,
3706 convert_to_mode (TYPE_MODE (integer_type_node),
3707 size, TREE_UNSIGNED (integer_type_node)),
3708 TYPE_MODE (integer_type_node));
3709 #endif
3711 preserve_temp_slots (to_rtx);
3712 free_temp_slots ();
3713 pop_temp_slots ();
3714 return want_value ? to_rtx : NULL_RTX;
3717 /* Compute FROM and store the value in the rtx we got. */
3719 push_temp_slots ();
3720 result = store_expr (from, to_rtx, want_value);
3721 preserve_temp_slots (result);
3722 free_temp_slots ();
3723 pop_temp_slots ();
3724 return want_value ? result : NULL_RTX;
3727 /* Generate code for computing expression EXP,
3728 and storing the value into TARGET.
3729 TARGET may contain a QUEUED rtx.
3731 If WANT_VALUE is nonzero, return a copy of the value
3732 not in TARGET, so that we can be sure to use the proper
3733 value in a containing expression even if TARGET has something
3734 else stored in it. If possible, we copy the value through a pseudo
3735 and return that pseudo. Or, if the value is constant, we try to
3736 return the constant. In some cases, we return a pseudo
3737 copied *from* TARGET.
3739 If the mode is BLKmode then we may return TARGET itself.
3740 It turns out that in BLKmode it doesn't cause a problem.
3741 because C has no operators that could combine two different
3742 assignments into the same BLKmode object with different values
3743 with no sequence point. Will other languages need this to
3744 be more thorough?
3746 If WANT_VALUE is 0, we return NULL, to make sure
3747 to catch quickly any cases where the caller uses the value
3748 and fails to set WANT_VALUE. */
3751 store_expr (exp, target, want_value)
3752 register tree exp;
3753 register rtx target;
3754 int want_value;
3756 register rtx temp;
3757 int dont_return_target = 0;
3759 if (TREE_CODE (exp) == COMPOUND_EXPR)
3761 /* Perform first part of compound expression, then assign from second
3762 part. */
3763 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3764 emit_queue ();
3765 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3767 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3769 /* For conditional expression, get safe form of the target. Then
3770 test the condition, doing the appropriate assignment on either
3771 side. This avoids the creation of unnecessary temporaries.
3772 For non-BLKmode, it is more efficient not to do this. */
3774 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3776 emit_queue ();
3777 target = protect_from_queue (target, 1);
3779 do_pending_stack_adjust ();
3780 NO_DEFER_POP;
3781 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3782 start_cleanup_deferral ();
3783 store_expr (TREE_OPERAND (exp, 1), target, 0);
3784 end_cleanup_deferral ();
3785 emit_queue ();
3786 emit_jump_insn (gen_jump (lab2));
3787 emit_barrier ();
3788 emit_label (lab1);
3789 start_cleanup_deferral ();
3790 store_expr (TREE_OPERAND (exp, 2), target, 0);
3791 end_cleanup_deferral ();
3792 emit_queue ();
3793 emit_label (lab2);
3794 OK_DEFER_POP;
3796 return want_value ? target : NULL_RTX;
3798 else if (queued_subexp_p (target))
3799 /* If target contains a postincrement, let's not risk
3800 using it as the place to generate the rhs. */
3802 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3804 /* Expand EXP into a new pseudo. */
3805 temp = gen_reg_rtx (GET_MODE (target));
3806 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3808 else
3809 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3811 /* If target is volatile, ANSI requires accessing the value
3812 *from* the target, if it is accessed. So make that happen.
3813 In no case return the target itself. */
3814 if (! MEM_VOLATILE_P (target) && want_value)
3815 dont_return_target = 1;
3817 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3818 && GET_MODE (target) != BLKmode)
3819 /* If target is in memory and caller wants value in a register instead,
3820 arrange that. Pass TARGET as target for expand_expr so that,
3821 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3822 We know expand_expr will not use the target in that case.
3823 Don't do this if TARGET is volatile because we are supposed
3824 to write it and then read it. */
3826 temp = expand_expr (exp, target, GET_MODE (target), 0);
3827 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3828 temp = copy_to_reg (temp);
3829 dont_return_target = 1;
3831 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3832 /* If this is an scalar in a register that is stored in a wider mode
3833 than the declared mode, compute the result into its declared mode
3834 and then convert to the wider mode. Our value is the computed
3835 expression. */
3837 /* If we don't want a value, we can do the conversion inside EXP,
3838 which will often result in some optimizations. Do the conversion
3839 in two steps: first change the signedness, if needed, then
3840 the extend. But don't do this if the type of EXP is a subtype
3841 of something else since then the conversion might involve
3842 more than just converting modes. */
3843 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3844 && TREE_TYPE (TREE_TYPE (exp)) == 0)
3846 if (TREE_UNSIGNED (TREE_TYPE (exp))
3847 != SUBREG_PROMOTED_UNSIGNED_P (target))
3849 = convert
3850 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3851 TREE_TYPE (exp)),
3852 exp);
3854 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3855 SUBREG_PROMOTED_UNSIGNED_P (target)),
3856 exp);
3859 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3861 /* If TEMP is a volatile MEM and we want a result value, make
3862 the access now so it gets done only once. Likewise if
3863 it contains TARGET. */
3864 if (GET_CODE (temp) == MEM && want_value
3865 && (MEM_VOLATILE_P (temp)
3866 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
3867 temp = copy_to_reg (temp);
3869 /* If TEMP is a VOIDmode constant, use convert_modes to make
3870 sure that we properly convert it. */
3871 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3872 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3873 TYPE_MODE (TREE_TYPE (exp)), temp,
3874 SUBREG_PROMOTED_UNSIGNED_P (target));
3876 convert_move (SUBREG_REG (target), temp,
3877 SUBREG_PROMOTED_UNSIGNED_P (target));
3879 /* If we promoted a constant, change the mode back down to match
3880 target. Otherwise, the caller might get confused by a result whose
3881 mode is larger than expected. */
3883 if (want_value && GET_MODE (temp) != GET_MODE (target)
3884 && GET_MODE (temp) != VOIDmode)
3886 temp = gen_rtx_SUBREG (GET_MODE (target), temp, 0);
3887 SUBREG_PROMOTED_VAR_P (temp) = 1;
3888 SUBREG_PROMOTED_UNSIGNED_P (temp)
3889 = SUBREG_PROMOTED_UNSIGNED_P (target);
3892 return want_value ? temp : NULL_RTX;
3894 else
3896 temp = expand_expr (exp, target, GET_MODE (target), 0);
3897 /* Return TARGET if it's a specified hardware register.
3898 If TARGET is a volatile mem ref, either return TARGET
3899 or return a reg copied *from* TARGET; ANSI requires this.
3901 Otherwise, if TEMP is not TARGET, return TEMP
3902 if it is constant (for efficiency),
3903 or if we really want the correct value. */
3904 if (!(target && GET_CODE (target) == REG
3905 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3906 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3907 && ! rtx_equal_p (temp, target)
3908 && (CONSTANT_P (temp) || want_value))
3909 dont_return_target = 1;
3912 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3913 the same as that of TARGET, adjust the constant. This is needed, for
3914 example, in case it is a CONST_DOUBLE and we want only a word-sized
3915 value. */
3916 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3917 && TREE_CODE (exp) != ERROR_MARK
3918 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3919 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3920 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3922 if (current_function_check_memory_usage
3923 && GET_CODE (target) == MEM
3924 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
3926 in_check_memory_usage = 1;
3927 if (GET_CODE (temp) == MEM)
3928 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
3929 VOIDmode, 3, XEXP (target, 0), Pmode,
3930 XEXP (temp, 0), Pmode,
3931 expr_size (exp), TYPE_MODE (sizetype));
3932 else
3933 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
3934 VOIDmode, 3, XEXP (target, 0), Pmode,
3935 expr_size (exp), TYPE_MODE (sizetype),
3936 GEN_INT (MEMORY_USE_WO),
3937 TYPE_MODE (integer_type_node));
3938 in_check_memory_usage = 0;
3941 /* If value was not generated in the target, store it there.
3942 Convert the value to TARGET's type first if nec. */
3943 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
3944 one or both of them are volatile memory refs, we have to distinguish
3945 two cases:
3946 - expand_expr has used TARGET. In this case, we must not generate
3947 another copy. This can be detected by TARGET being equal according
3948 to == .
3949 - expand_expr has not used TARGET - that means that the source just
3950 happens to have the same RTX form. Since temp will have been created
3951 by expand_expr, it will compare unequal according to == .
3952 We must generate a copy in this case, to reach the correct number
3953 of volatile memory references. */
3955 if ((! rtx_equal_p (temp, target)
3956 || (temp != target && (side_effects_p (temp)
3957 || side_effects_p (target))))
3958 && TREE_CODE (exp) != ERROR_MARK)
3960 target = protect_from_queue (target, 1);
3961 if (GET_MODE (temp) != GET_MODE (target)
3962 && GET_MODE (temp) != VOIDmode)
3964 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3965 if (dont_return_target)
3967 /* In this case, we will return TEMP,
3968 so make sure it has the proper mode.
3969 But don't forget to store the value into TARGET. */
3970 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3971 emit_move_insn (target, temp);
3973 else
3974 convert_move (target, temp, unsignedp);
3977 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3979 /* Handle copying a string constant into an array.
3980 The string constant may be shorter than the array.
3981 So copy just the string's actual length, and clear the rest. */
3982 rtx size;
3983 rtx addr;
3985 /* Get the size of the data type of the string,
3986 which is actually the size of the target. */
3987 size = expr_size (exp);
3988 if (GET_CODE (size) == CONST_INT
3989 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3990 emit_block_move (target, temp, size, TYPE_ALIGN (TREE_TYPE (exp)));
3991 else
3993 /* Compute the size of the data to copy from the string. */
3994 tree copy_size
3995 = size_binop (MIN_EXPR,
3996 make_tree (sizetype, size),
3997 size_int (TREE_STRING_LENGTH (exp)));
3998 unsigned int align = TYPE_ALIGN (TREE_TYPE (exp));
3999 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
4000 VOIDmode, 0);
4001 rtx label = 0;
4003 /* Copy that much. */
4004 emit_block_move (target, temp, copy_size_rtx,
4005 TYPE_ALIGN (TREE_TYPE (exp)));
4007 /* Figure out how much is left in TARGET that we have to clear.
4008 Do all calculations in ptr_mode. */
4010 addr = XEXP (target, 0);
4011 addr = convert_modes (ptr_mode, Pmode, addr, 1);
4013 if (GET_CODE (copy_size_rtx) == CONST_INT)
4015 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
4016 size = plus_constant (size, -TREE_STRING_LENGTH (exp));
4017 align = MIN (align, (BITS_PER_UNIT
4018 * (INTVAL (copy_size_rtx)
4019 & - INTVAL (copy_size_rtx))));
4021 else
4023 addr = force_reg (ptr_mode, addr);
4024 addr = expand_binop (ptr_mode, add_optab, addr,
4025 copy_size_rtx, NULL_RTX, 0,
4026 OPTAB_LIB_WIDEN);
4028 size = expand_binop (ptr_mode, sub_optab, size,
4029 copy_size_rtx, NULL_RTX, 0,
4030 OPTAB_LIB_WIDEN);
4032 align = BITS_PER_UNIT;
4033 label = gen_label_rtx ();
4034 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4035 GET_MODE (size), 0, 0, label);
4037 align = MIN (align, expr_align (copy_size));
4039 if (size != const0_rtx)
4041 rtx dest = gen_rtx_MEM (BLKmode, addr);
4043 MEM_COPY_ATTRIBUTES (dest, target);
4045 /* Be sure we can write on ADDR. */
4046 in_check_memory_usage = 1;
4047 if (current_function_check_memory_usage)
4048 emit_library_call (chkr_check_addr_libfunc,
4049 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
4050 addr, Pmode,
4051 size, TYPE_MODE (sizetype),
4052 GEN_INT (MEMORY_USE_WO),
4053 TYPE_MODE (integer_type_node));
4054 in_check_memory_usage = 0;
4055 clear_storage (dest, size, align);
4058 if (label)
4059 emit_label (label);
4062 /* Handle calls that return values in multiple non-contiguous locations.
4063 The Irix 6 ABI has examples of this. */
4064 else if (GET_CODE (target) == PARALLEL)
4065 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
4066 TYPE_ALIGN (TREE_TYPE (exp)));
4067 else if (GET_MODE (temp) == BLKmode)
4068 emit_block_move (target, temp, expr_size (exp),
4069 TYPE_ALIGN (TREE_TYPE (exp)));
4070 else
4071 emit_move_insn (target, temp);
4074 /* If we don't want a value, return NULL_RTX. */
4075 if (! want_value)
4076 return NULL_RTX;
4078 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4079 ??? The latter test doesn't seem to make sense. */
4080 else if (dont_return_target && GET_CODE (temp) != MEM)
4081 return temp;
4083 /* Return TARGET itself if it is a hard register. */
4084 else if (want_value && GET_MODE (target) != BLKmode
4085 && ! (GET_CODE (target) == REG
4086 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4087 return copy_to_reg (target);
4089 else
4090 return target;
4093 /* Return 1 if EXP just contains zeros. */
4095 static int
4096 is_zeros_p (exp)
4097 tree exp;
4099 tree elt;
4101 switch (TREE_CODE (exp))
4103 case CONVERT_EXPR:
4104 case NOP_EXPR:
4105 case NON_LVALUE_EXPR:
4106 return is_zeros_p (TREE_OPERAND (exp, 0));
4108 case INTEGER_CST:
4109 return integer_zerop (exp);
4111 case COMPLEX_CST:
4112 return
4113 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4115 case REAL_CST:
4116 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4118 case CONSTRUCTOR:
4119 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4120 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4121 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4122 if (! is_zeros_p (TREE_VALUE (elt)))
4123 return 0;
4125 return 1;
4127 default:
4128 return 0;
4132 /* Return 1 if EXP contains mostly (3/4) zeros. */
4134 static int
4135 mostly_zeros_p (exp)
4136 tree exp;
4138 if (TREE_CODE (exp) == CONSTRUCTOR)
4140 int elts = 0, zeros = 0;
4141 tree elt = CONSTRUCTOR_ELTS (exp);
4142 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4144 /* If there are no ranges of true bits, it is all zero. */
4145 return elt == NULL_TREE;
4147 for (; elt; elt = TREE_CHAIN (elt))
4149 /* We do not handle the case where the index is a RANGE_EXPR,
4150 so the statistic will be somewhat inaccurate.
4151 We do make a more accurate count in store_constructor itself,
4152 so since this function is only used for nested array elements,
4153 this should be close enough. */
4154 if (mostly_zeros_p (TREE_VALUE (elt)))
4155 zeros++;
4156 elts++;
4159 return 4 * zeros >= 3 * elts;
4162 return is_zeros_p (exp);
4165 /* Helper function for store_constructor.
4166 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4167 TYPE is the type of the CONSTRUCTOR, not the element type.
4168 ALIGN and CLEARED are as for store_constructor.
4169 ALIAS_SET is the alias set to use for any stores.
4171 This provides a recursive shortcut back to store_constructor when it isn't
4172 necessary to go through store_field. This is so that we can pass through
4173 the cleared field to let store_constructor know that we may not have to
4174 clear a substructure if the outer structure has already been cleared. */
4176 static void
4177 store_constructor_field (target, bitsize, bitpos,
4178 mode, exp, type, align, cleared, alias_set)
4179 rtx target;
4180 unsigned HOST_WIDE_INT bitsize;
4181 HOST_WIDE_INT bitpos;
4182 enum machine_mode mode;
4183 tree exp, type;
4184 unsigned int align;
4185 int cleared;
4186 int alias_set;
4188 if (TREE_CODE (exp) == CONSTRUCTOR
4189 && bitpos % BITS_PER_UNIT == 0
4190 /* If we have a non-zero bitpos for a register target, then we just
4191 let store_field do the bitfield handling. This is unlikely to
4192 generate unnecessary clear instructions anyways. */
4193 && (bitpos == 0 || GET_CODE (target) == MEM))
4195 if (bitpos != 0)
4196 target
4197 = change_address (target,
4198 GET_MODE (target) == BLKmode
4199 || 0 != (bitpos
4200 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4201 ? BLKmode : VOIDmode,
4202 plus_constant (XEXP (target, 0),
4203 bitpos / BITS_PER_UNIT));
4205 if (GET_CODE (target) == MEM)
4206 MEM_ALIAS_SET (target) = alias_set;
4207 store_constructor (exp, target, align, cleared, bitsize / BITS_PER_UNIT);
4209 else
4210 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, align,
4211 int_size_in_bytes (type), alias_set);
4214 /* Store the value of constructor EXP into the rtx TARGET.
4215 TARGET is either a REG or a MEM.
4216 ALIGN is the maximum known alignment for TARGET.
4217 CLEARED is true if TARGET is known to have been zero'd.
4218 SIZE is the number of bytes of TARGET we are allowed to modify: this
4219 may not be the same as the size of EXP if we are assigning to a field
4220 which has been packed to exclude padding bits. */
4222 static void
4223 store_constructor (exp, target, align, cleared, size)
4224 tree exp;
4225 rtx target;
4226 unsigned int align;
4227 int cleared;
4228 HOST_WIDE_INT size;
4230 tree type = TREE_TYPE (exp);
4231 #ifdef WORD_REGISTER_OPERATIONS
4232 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4233 #endif
4235 /* We know our target cannot conflict, since safe_from_p has been called. */
4236 #if 0
4237 /* Don't try copying piece by piece into a hard register
4238 since that is vulnerable to being clobbered by EXP.
4239 Instead, construct in a pseudo register and then copy it all. */
4240 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
4242 rtx temp = gen_reg_rtx (GET_MODE (target));
4243 store_constructor (exp, temp, align, cleared, size);
4244 emit_move_insn (target, temp);
4245 return;
4247 #endif
4249 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4250 || TREE_CODE (type) == QUAL_UNION_TYPE)
4252 register tree elt;
4254 /* Inform later passes that the whole union value is dead. */
4255 if ((TREE_CODE (type) == UNION_TYPE
4256 || TREE_CODE (type) == QUAL_UNION_TYPE)
4257 && ! cleared)
4259 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4261 /* If the constructor is empty, clear the union. */
4262 if (! CONSTRUCTOR_ELTS (exp) && ! cleared)
4263 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
4266 /* If we are building a static constructor into a register,
4267 set the initial value as zero so we can fold the value into
4268 a constant. But if more than one register is involved,
4269 this probably loses. */
4270 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4271 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4273 if (! cleared)
4274 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4276 cleared = 1;
4279 /* If the constructor has fewer fields than the structure
4280 or if we are initializing the structure to mostly zeros,
4281 clear the whole structure first. Don't do this is TARGET is
4282 register whose mode size isn't equal to SIZE since clear_storage
4283 can't handle this case. */
4284 else if (size > 0
4285 && ((list_length (CONSTRUCTOR_ELTS (exp))
4286 != fields_length (type))
4287 || mostly_zeros_p (exp))
4288 && (GET_CODE (target) != REG
4289 || GET_MODE_SIZE (GET_MODE (target)) == size))
4291 if (! cleared)
4292 clear_storage (target, GEN_INT (size), align);
4294 cleared = 1;
4296 else if (! cleared)
4297 /* Inform later passes that the old value is dead. */
4298 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4300 /* Store each element of the constructor into
4301 the corresponding field of TARGET. */
4303 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4305 register tree field = TREE_PURPOSE (elt);
4306 #ifdef WORD_REGISTER_OPERATIONS
4307 tree value = TREE_VALUE (elt);
4308 #endif
4309 register enum machine_mode mode;
4310 HOST_WIDE_INT bitsize;
4311 HOST_WIDE_INT bitpos = 0;
4312 int unsignedp;
4313 tree offset;
4314 rtx to_rtx = target;
4316 /* Just ignore missing fields.
4317 We cleared the whole structure, above,
4318 if any fields are missing. */
4319 if (field == 0)
4320 continue;
4322 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4323 continue;
4325 if (host_integerp (DECL_SIZE (field), 1))
4326 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4327 else
4328 bitsize = -1;
4330 unsignedp = TREE_UNSIGNED (field);
4331 mode = DECL_MODE (field);
4332 if (DECL_BIT_FIELD (field))
4333 mode = VOIDmode;
4335 offset = DECL_FIELD_OFFSET (field);
4336 if (host_integerp (offset, 0)
4337 && host_integerp (bit_position (field), 0))
4339 bitpos = int_bit_position (field);
4340 offset = 0;
4342 else
4343 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4345 if (offset)
4347 rtx offset_rtx;
4349 if (contains_placeholder_p (offset))
4350 offset = build (WITH_RECORD_EXPR, sizetype,
4351 offset, make_tree (TREE_TYPE (exp), target));
4353 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4354 if (GET_CODE (to_rtx) != MEM)
4355 abort ();
4357 if (GET_MODE (offset_rtx) != ptr_mode)
4359 #ifdef POINTERS_EXTEND_UNSIGNED
4360 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
4361 #else
4362 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4363 #endif
4366 to_rtx
4367 = change_address (to_rtx, VOIDmode,
4368 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
4369 force_reg (ptr_mode,
4370 offset_rtx)));
4371 align = DECL_OFFSET_ALIGN (field);
4374 if (TREE_READONLY (field))
4376 if (GET_CODE (to_rtx) == MEM)
4377 to_rtx = copy_rtx (to_rtx);
4379 RTX_UNCHANGING_P (to_rtx) = 1;
4382 #ifdef WORD_REGISTER_OPERATIONS
4383 /* If this initializes a field that is smaller than a word, at the
4384 start of a word, try to widen it to a full word.
4385 This special case allows us to output C++ member function
4386 initializations in a form that the optimizers can understand. */
4387 if (GET_CODE (target) == REG
4388 && bitsize < BITS_PER_WORD
4389 && bitpos % BITS_PER_WORD == 0
4390 && GET_MODE_CLASS (mode) == MODE_INT
4391 && TREE_CODE (value) == INTEGER_CST
4392 && exp_size >= 0
4393 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4395 tree type = TREE_TYPE (value);
4396 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4398 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4399 value = convert (type, value);
4401 if (BYTES_BIG_ENDIAN)
4402 value
4403 = fold (build (LSHIFT_EXPR, type, value,
4404 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4405 bitsize = BITS_PER_WORD;
4406 mode = word_mode;
4408 #endif
4409 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4410 TREE_VALUE (elt), type, align, cleared,
4411 DECL_NONADDRESSABLE_P (field)
4412 ? MEM_ALIAS_SET (to_rtx)
4413 : get_alias_set (TREE_TYPE (field)));
4416 else if (TREE_CODE (type) == ARRAY_TYPE)
4418 register tree elt;
4419 register int i;
4420 int need_to_clear;
4421 tree domain = TYPE_DOMAIN (type);
4422 tree elttype = TREE_TYPE (type);
4423 int const_bounds_p = (host_integerp (TYPE_MIN_VALUE (domain), 0)
4424 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4425 HOST_WIDE_INT minelt;
4426 HOST_WIDE_INT maxelt;
4428 /* If we have constant bounds for the range of the type, get them. */
4429 if (const_bounds_p)
4431 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4432 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4435 /* If the constructor has fewer elements than the array,
4436 clear the whole array first. Similarly if this is
4437 static constructor of a non-BLKmode object. */
4438 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4439 need_to_clear = 1;
4440 else
4442 HOST_WIDE_INT count = 0, zero_count = 0;
4443 need_to_clear = ! const_bounds_p;
4445 /* This loop is a more accurate version of the loop in
4446 mostly_zeros_p (it handles RANGE_EXPR in an index).
4447 It is also needed to check for missing elements. */
4448 for (elt = CONSTRUCTOR_ELTS (exp);
4449 elt != NULL_TREE && ! need_to_clear;
4450 elt = TREE_CHAIN (elt))
4452 tree index = TREE_PURPOSE (elt);
4453 HOST_WIDE_INT this_node_count;
4455 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4457 tree lo_index = TREE_OPERAND (index, 0);
4458 tree hi_index = TREE_OPERAND (index, 1);
4460 if (! host_integerp (lo_index, 1)
4461 || ! host_integerp (hi_index, 1))
4463 need_to_clear = 1;
4464 break;
4467 this_node_count = (tree_low_cst (hi_index, 1)
4468 - tree_low_cst (lo_index, 1) + 1);
4470 else
4471 this_node_count = 1;
4473 count += this_node_count;
4474 if (mostly_zeros_p (TREE_VALUE (elt)))
4475 zero_count += this_node_count;
4478 /* Clear the entire array first if there are any missing elements,
4479 or if the incidence of zero elements is >= 75%. */
4480 if (! need_to_clear
4481 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4482 need_to_clear = 1;
4485 if (need_to_clear && size > 0)
4487 if (! cleared)
4488 clear_storage (target, GEN_INT (size), align);
4489 cleared = 1;
4491 else
4492 /* Inform later passes that the old value is dead. */
4493 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4495 /* Store each element of the constructor into
4496 the corresponding element of TARGET, determined
4497 by counting the elements. */
4498 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4499 elt;
4500 elt = TREE_CHAIN (elt), i++)
4502 register enum machine_mode mode;
4503 HOST_WIDE_INT bitsize;
4504 HOST_WIDE_INT bitpos;
4505 int unsignedp;
4506 tree value = TREE_VALUE (elt);
4507 unsigned int align = TYPE_ALIGN (TREE_TYPE (value));
4508 tree index = TREE_PURPOSE (elt);
4509 rtx xtarget = target;
4511 if (cleared && is_zeros_p (value))
4512 continue;
4514 unsignedp = TREE_UNSIGNED (elttype);
4515 mode = TYPE_MODE (elttype);
4516 if (mode == BLKmode)
4517 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4518 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4519 : -1);
4520 else
4521 bitsize = GET_MODE_BITSIZE (mode);
4523 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4525 tree lo_index = TREE_OPERAND (index, 0);
4526 tree hi_index = TREE_OPERAND (index, 1);
4527 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4528 struct nesting *loop;
4529 HOST_WIDE_INT lo, hi, count;
4530 tree position;
4532 /* If the range is constant and "small", unroll the loop. */
4533 if (const_bounds_p
4534 && host_integerp (lo_index, 0)
4535 && host_integerp (hi_index, 0)
4536 && (lo = tree_low_cst (lo_index, 0),
4537 hi = tree_low_cst (hi_index, 0),
4538 count = hi - lo + 1,
4539 (GET_CODE (target) != MEM
4540 || count <= 2
4541 || (host_integerp (TYPE_SIZE (elttype), 1)
4542 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4543 <= 40 * 8)))))
4545 lo -= minelt; hi -= minelt;
4546 for (; lo <= hi; lo++)
4548 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4549 store_constructor_field
4550 (target, bitsize, bitpos, mode, value, type, align,
4551 cleared,
4552 TYPE_NONALIASED_COMPONENT (type)
4553 ? MEM_ALIAS_SET (target) : get_alias_set (elttype));
4556 else
4558 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4559 loop_top = gen_label_rtx ();
4560 loop_end = gen_label_rtx ();
4562 unsignedp = TREE_UNSIGNED (domain);
4564 index = build_decl (VAR_DECL, NULL_TREE, domain);
4566 DECL_RTL (index) = index_r
4567 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4568 &unsignedp, 0));
4570 if (TREE_CODE (value) == SAVE_EXPR
4571 && SAVE_EXPR_RTL (value) == 0)
4573 /* Make sure value gets expanded once before the
4574 loop. */
4575 expand_expr (value, const0_rtx, VOIDmode, 0);
4576 emit_queue ();
4578 store_expr (lo_index, index_r, 0);
4579 loop = expand_start_loop (0);
4581 /* Assign value to element index. */
4582 position
4583 = convert (ssizetype,
4584 fold (build (MINUS_EXPR, TREE_TYPE (index),
4585 index, TYPE_MIN_VALUE (domain))));
4586 position = size_binop (MULT_EXPR, position,
4587 convert (ssizetype,
4588 TYPE_SIZE_UNIT (elttype)));
4590 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4591 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4592 xtarget = change_address (target, mode, addr);
4593 if (TREE_CODE (value) == CONSTRUCTOR)
4594 store_constructor (value, xtarget, align, cleared,
4595 bitsize / BITS_PER_UNIT);
4596 else
4597 store_expr (value, xtarget, 0);
4599 expand_exit_loop_if_false (loop,
4600 build (LT_EXPR, integer_type_node,
4601 index, hi_index));
4603 expand_increment (build (PREINCREMENT_EXPR,
4604 TREE_TYPE (index),
4605 index, integer_one_node), 0, 0);
4606 expand_end_loop ();
4607 emit_label (loop_end);
4610 else if ((index != 0 && ! host_integerp (index, 0))
4611 || ! host_integerp (TYPE_SIZE (elttype), 1))
4613 rtx pos_rtx, addr;
4614 tree position;
4616 if (index == 0)
4617 index = ssize_int (1);
4619 if (minelt)
4620 index = convert (ssizetype,
4621 fold (build (MINUS_EXPR, index,
4622 TYPE_MIN_VALUE (domain))));
4624 position = size_binop (MULT_EXPR, index,
4625 convert (ssizetype,
4626 TYPE_SIZE_UNIT (elttype)));
4627 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4628 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4629 xtarget = change_address (target, mode, addr);
4630 store_expr (value, xtarget, 0);
4632 else
4634 if (index != 0)
4635 bitpos = ((tree_low_cst (index, 0) - minelt)
4636 * tree_low_cst (TYPE_SIZE (elttype), 1));
4637 else
4638 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4640 store_constructor_field (target, bitsize, bitpos, mode, value,
4641 type, align, cleared,
4642 TYPE_NONALIASED_COMPONENT (type)
4643 ? MEM_ALIAS_SET (target) :
4644 get_alias_set (elttype));
4650 /* Set constructor assignments. */
4651 else if (TREE_CODE (type) == SET_TYPE)
4653 tree elt = CONSTRUCTOR_ELTS (exp);
4654 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4655 tree domain = TYPE_DOMAIN (type);
4656 tree domain_min, domain_max, bitlength;
4658 /* The default implementation strategy is to extract the constant
4659 parts of the constructor, use that to initialize the target,
4660 and then "or" in whatever non-constant ranges we need in addition.
4662 If a large set is all zero or all ones, it is
4663 probably better to set it using memset (if available) or bzero.
4664 Also, if a large set has just a single range, it may also be
4665 better to first clear all the first clear the set (using
4666 bzero/memset), and set the bits we want. */
4668 /* Check for all zeros. */
4669 if (elt == NULL_TREE && size > 0)
4671 if (!cleared)
4672 clear_storage (target, GEN_INT (size), TYPE_ALIGN (type));
4673 return;
4676 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4677 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4678 bitlength = size_binop (PLUS_EXPR,
4679 size_diffop (domain_max, domain_min),
4680 ssize_int (1));
4682 nbits = tree_low_cst (bitlength, 1);
4684 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4685 are "complicated" (more than one range), initialize (the
4686 constant parts) by copying from a constant. */
4687 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4688 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4690 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4691 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4692 char *bit_buffer = (char *) alloca (nbits);
4693 HOST_WIDE_INT word = 0;
4694 unsigned int bit_pos = 0;
4695 unsigned int ibit = 0;
4696 unsigned int offset = 0; /* In bytes from beginning of set. */
4698 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4699 for (;;)
4701 if (bit_buffer[ibit])
4703 if (BYTES_BIG_ENDIAN)
4704 word |= (1 << (set_word_size - 1 - bit_pos));
4705 else
4706 word |= 1 << bit_pos;
4709 bit_pos++; ibit++;
4710 if (bit_pos >= set_word_size || ibit == nbits)
4712 if (word != 0 || ! cleared)
4714 rtx datum = GEN_INT (word);
4715 rtx to_rtx;
4717 /* The assumption here is that it is safe to use
4718 XEXP if the set is multi-word, but not if
4719 it's single-word. */
4720 if (GET_CODE (target) == MEM)
4722 to_rtx = plus_constant (XEXP (target, 0), offset);
4723 to_rtx = change_address (target, mode, to_rtx);
4725 else if (offset == 0)
4726 to_rtx = target;
4727 else
4728 abort ();
4729 emit_move_insn (to_rtx, datum);
4732 if (ibit == nbits)
4733 break;
4734 word = 0;
4735 bit_pos = 0;
4736 offset += set_word_size / BITS_PER_UNIT;
4740 else if (!cleared)
4741 /* Don't bother clearing storage if the set is all ones. */
4742 if (TREE_CHAIN (elt) != NULL_TREE
4743 || (TREE_PURPOSE (elt) == NULL_TREE
4744 ? nbits != 1
4745 : ( ! host_integerp (TREE_VALUE (elt), 0)
4746 || ! host_integerp (TREE_PURPOSE (elt), 0)
4747 || (tree_low_cst (TREE_VALUE (elt), 0)
4748 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
4749 != (HOST_WIDE_INT) nbits))))
4750 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
4752 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4754 /* Start of range of element or NULL. */
4755 tree startbit = TREE_PURPOSE (elt);
4756 /* End of range of element, or element value. */
4757 tree endbit = TREE_VALUE (elt);
4758 #ifdef TARGET_MEM_FUNCTIONS
4759 HOST_WIDE_INT startb, endb;
4760 #endif
4761 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4763 bitlength_rtx = expand_expr (bitlength,
4764 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4766 /* Handle non-range tuple element like [ expr ]. */
4767 if (startbit == NULL_TREE)
4769 startbit = save_expr (endbit);
4770 endbit = startbit;
4773 startbit = convert (sizetype, startbit);
4774 endbit = convert (sizetype, endbit);
4775 if (! integer_zerop (domain_min))
4777 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4778 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4780 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4781 EXPAND_CONST_ADDRESS);
4782 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4783 EXPAND_CONST_ADDRESS);
4785 if (REG_P (target))
4787 targetx = assign_stack_temp (GET_MODE (target),
4788 GET_MODE_SIZE (GET_MODE (target)),
4790 emit_move_insn (targetx, target);
4793 else if (GET_CODE (target) == MEM)
4794 targetx = target;
4795 else
4796 abort ();
4798 #ifdef TARGET_MEM_FUNCTIONS
4799 /* Optimization: If startbit and endbit are
4800 constants divisible by BITS_PER_UNIT,
4801 call memset instead. */
4802 if (TREE_CODE (startbit) == INTEGER_CST
4803 && TREE_CODE (endbit) == INTEGER_CST
4804 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4805 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4807 emit_library_call (memset_libfunc, LCT_NORMAL,
4808 VOIDmode, 3,
4809 plus_constant (XEXP (targetx, 0),
4810 startb / BITS_PER_UNIT),
4811 Pmode,
4812 constm1_rtx, TYPE_MODE (integer_type_node),
4813 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4814 TYPE_MODE (sizetype));
4816 else
4817 #endif
4818 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4819 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
4820 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
4821 startbit_rtx, TYPE_MODE (sizetype),
4822 endbit_rtx, TYPE_MODE (sizetype));
4824 if (REG_P (target))
4825 emit_move_insn (target, targetx);
4829 else
4830 abort ();
4833 /* Store the value of EXP (an expression tree)
4834 into a subfield of TARGET which has mode MODE and occupies
4835 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4836 If MODE is VOIDmode, it means that we are storing into a bit-field.
4838 If VALUE_MODE is VOIDmode, return nothing in particular.
4839 UNSIGNEDP is not used in this case.
4841 Otherwise, return an rtx for the value stored. This rtx
4842 has mode VALUE_MODE if that is convenient to do.
4843 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4845 ALIGN is the alignment that TARGET is known to have.
4846 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
4848 ALIAS_SET is the alias set for the destination. This value will
4849 (in general) be different from that for TARGET, since TARGET is a
4850 reference to the containing structure. */
4852 static rtx
4853 store_field (target, bitsize, bitpos, mode, exp, value_mode,
4854 unsignedp, align, total_size, alias_set)
4855 rtx target;
4856 HOST_WIDE_INT bitsize;
4857 HOST_WIDE_INT bitpos;
4858 enum machine_mode mode;
4859 tree exp;
4860 enum machine_mode value_mode;
4861 int unsignedp;
4862 unsigned int align;
4863 HOST_WIDE_INT total_size;
4864 int alias_set;
4866 HOST_WIDE_INT width_mask = 0;
4868 if (TREE_CODE (exp) == ERROR_MARK)
4869 return const0_rtx;
4871 if (bitsize < HOST_BITS_PER_WIDE_INT)
4872 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
4874 /* If we are storing into an unaligned field of an aligned union that is
4875 in a register, we may have the mode of TARGET being an integer mode but
4876 MODE == BLKmode. In that case, get an aligned object whose size and
4877 alignment are the same as TARGET and store TARGET into it (we can avoid
4878 the store if the field being stored is the entire width of TARGET). Then
4879 call ourselves recursively to store the field into a BLKmode version of
4880 that object. Finally, load from the object into TARGET. This is not
4881 very efficient in general, but should only be slightly more expensive
4882 than the otherwise-required unaligned accesses. Perhaps this can be
4883 cleaned up later. */
4885 if (mode == BLKmode
4886 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4888 rtx object = assign_stack_temp (GET_MODE (target),
4889 GET_MODE_SIZE (GET_MODE (target)), 0);
4890 rtx blk_object = copy_rtx (object);
4892 MEM_SET_IN_STRUCT_P (object, 1);
4893 MEM_SET_IN_STRUCT_P (blk_object, 1);
4894 PUT_MODE (blk_object, BLKmode);
4896 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4897 emit_move_insn (object, target);
4899 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4900 align, total_size, alias_set);
4902 /* Even though we aren't returning target, we need to
4903 give it the updated value. */
4904 emit_move_insn (target, object);
4906 return blk_object;
4909 if (GET_CODE (target) == CONCAT)
4911 /* We're storing into a struct containing a single __complex. */
4913 if (bitpos != 0)
4914 abort ();
4915 return store_expr (exp, target, 0);
4918 /* If the structure is in a register or if the component
4919 is a bit field, we cannot use addressing to access it.
4920 Use bit-field techniques or SUBREG to store in it. */
4922 if (mode == VOIDmode
4923 || (mode != BLKmode && ! direct_store[(int) mode]
4924 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
4925 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
4926 || GET_CODE (target) == REG
4927 || GET_CODE (target) == SUBREG
4928 /* If the field isn't aligned enough to store as an ordinary memref,
4929 store it as a bit field. */
4930 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
4931 && (align < GET_MODE_ALIGNMENT (mode)
4932 || bitpos % GET_MODE_ALIGNMENT (mode)))
4933 || (mode == BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
4934 && (TYPE_ALIGN (TREE_TYPE (exp)) > align
4935 || bitpos % TYPE_ALIGN (TREE_TYPE (exp)) != 0))
4936 /* If the RHS and field are a constant size and the size of the
4937 RHS isn't the same size as the bitfield, we must use bitfield
4938 operations. */
4939 || (bitsize >= 0
4940 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
4941 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
4943 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4945 /* If BITSIZE is narrower than the size of the type of EXP
4946 we will be narrowing TEMP. Normally, what's wanted are the
4947 low-order bits. However, if EXP's type is a record and this is
4948 big-endian machine, we want the upper BITSIZE bits. */
4949 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4950 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4951 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4952 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4953 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4954 - bitsize),
4955 temp, 1);
4957 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4958 MODE. */
4959 if (mode != VOIDmode && mode != BLKmode
4960 && mode != TYPE_MODE (TREE_TYPE (exp)))
4961 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4963 /* If the modes of TARGET and TEMP are both BLKmode, both
4964 must be in memory and BITPOS must be aligned on a byte
4965 boundary. If so, we simply do a block copy. */
4966 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4968 unsigned int exp_align = expr_align (exp);
4970 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4971 || bitpos % BITS_PER_UNIT != 0)
4972 abort ();
4974 target = change_address (target, VOIDmode,
4975 plus_constant (XEXP (target, 0),
4976 bitpos / BITS_PER_UNIT));
4978 /* Make sure that ALIGN is no stricter than the alignment of EXP. */
4979 align = MIN (exp_align, align);
4981 /* Find an alignment that is consistent with the bit position. */
4982 while ((bitpos % align) != 0)
4983 align >>= 1;
4985 emit_block_move (target, temp,
4986 bitsize == -1 ? expr_size (exp)
4987 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4988 / BITS_PER_UNIT),
4989 align);
4991 return value_mode == VOIDmode ? const0_rtx : target;
4994 /* Store the value in the bitfield. */
4995 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4996 if (value_mode != VOIDmode)
4998 /* The caller wants an rtx for the value. */
4999 /* If possible, avoid refetching from the bitfield itself. */
5000 if (width_mask != 0
5001 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5003 tree count;
5004 enum machine_mode tmode;
5006 if (unsignedp)
5007 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
5008 tmode = GET_MODE (temp);
5009 if (tmode == VOIDmode)
5010 tmode = value_mode;
5011 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5012 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5013 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5015 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5016 NULL_RTX, value_mode, 0, align,
5017 total_size);
5019 return const0_rtx;
5021 else
5023 rtx addr = XEXP (target, 0);
5024 rtx to_rtx;
5026 /* If a value is wanted, it must be the lhs;
5027 so make the address stable for multiple use. */
5029 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5030 && ! CONSTANT_ADDRESS_P (addr)
5031 /* A frame-pointer reference is already stable. */
5032 && ! (GET_CODE (addr) == PLUS
5033 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5034 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5035 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5036 addr = copy_to_reg (addr);
5038 /* Now build a reference to just the desired component. */
5040 to_rtx = copy_rtx (change_address (target, mode,
5041 plus_constant (addr,
5042 (bitpos
5043 / BITS_PER_UNIT))));
5044 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5045 MEM_ALIAS_SET (to_rtx) = alias_set;
5047 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5051 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5052 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
5053 ARRAY_REFs and find the ultimate containing object, which we return.
5055 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5056 bit position, and *PUNSIGNEDP to the signedness of the field.
5057 If the position of the field is variable, we store a tree
5058 giving the variable offset (in units) in *POFFSET.
5059 This offset is in addition to the bit position.
5060 If the position is not variable, we store 0 in *POFFSET.
5061 We set *PALIGNMENT to the alignment of the address that will be
5062 computed. This is the alignment of the thing we return if *POFFSET
5063 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
5065 If any of the extraction expressions is volatile,
5066 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5068 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5069 is a mode that can be used to access the field. In that case, *PBITSIZE
5070 is redundant.
5072 If the field describes a variable-sized object, *PMODE is set to
5073 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5074 this case, but the address of the object can be found. */
5076 tree
5077 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5078 punsignedp, pvolatilep, palignment)
5079 tree exp;
5080 HOST_WIDE_INT *pbitsize;
5081 HOST_WIDE_INT *pbitpos;
5082 tree *poffset;
5083 enum machine_mode *pmode;
5084 int *punsignedp;
5085 int *pvolatilep;
5086 unsigned int *palignment;
5088 tree size_tree = 0;
5089 enum machine_mode mode = VOIDmode;
5090 tree offset = size_zero_node;
5091 tree bit_offset = bitsize_zero_node;
5092 unsigned int alignment = BIGGEST_ALIGNMENT;
5093 tree tem;
5095 /* First get the mode, signedness, and size. We do this from just the
5096 outermost expression. */
5097 if (TREE_CODE (exp) == COMPONENT_REF)
5099 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5100 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5101 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5103 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5105 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5107 size_tree = TREE_OPERAND (exp, 1);
5108 *punsignedp = TREE_UNSIGNED (exp);
5110 else
5112 mode = TYPE_MODE (TREE_TYPE (exp));
5113 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5115 if (mode == BLKmode)
5116 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5117 else
5118 *pbitsize = GET_MODE_BITSIZE (mode);
5121 if (size_tree != 0)
5123 if (! host_integerp (size_tree, 1))
5124 mode = BLKmode, *pbitsize = -1;
5125 else
5126 *pbitsize = tree_low_cst (size_tree, 1);
5129 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5130 and find the ultimate containing object. */
5131 while (1)
5133 if (TREE_CODE (exp) == BIT_FIELD_REF)
5134 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5135 else if (TREE_CODE (exp) == COMPONENT_REF)
5137 tree field = TREE_OPERAND (exp, 1);
5138 tree this_offset = DECL_FIELD_OFFSET (field);
5140 /* If this field hasn't been filled in yet, don't go
5141 past it. This should only happen when folding expressions
5142 made during type construction. */
5143 if (this_offset == 0)
5144 break;
5145 else if (! TREE_CONSTANT (this_offset)
5146 && contains_placeholder_p (this_offset))
5147 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5149 offset = size_binop (PLUS_EXPR, offset, this_offset);
5150 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5151 DECL_FIELD_BIT_OFFSET (field));
5153 if (! host_integerp (offset, 0))
5154 alignment = MIN (alignment, DECL_OFFSET_ALIGN (field));
5157 else if (TREE_CODE (exp) == ARRAY_REF)
5159 tree index = TREE_OPERAND (exp, 1);
5160 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5161 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5162 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (exp));
5164 /* We assume all arrays have sizes that are a multiple of a byte.
5165 First subtract the lower bound, if any, in the type of the
5166 index, then convert to sizetype and multiply by the size of the
5167 array element. */
5168 if (low_bound != 0 && ! integer_zerop (low_bound))
5169 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5170 index, low_bound));
5172 /* If the index has a self-referential type, pass it to a
5173 WITH_RECORD_EXPR; if the component size is, pass our
5174 component to one. */
5175 if (! TREE_CONSTANT (index)
5176 && contains_placeholder_p (index))
5177 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5178 if (! TREE_CONSTANT (unit_size)
5179 && contains_placeholder_p (unit_size))
5180 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size,
5181 TREE_OPERAND (exp, 0));
5183 offset = size_binop (PLUS_EXPR, offset,
5184 size_binop (MULT_EXPR,
5185 convert (sizetype, index),
5186 unit_size));
5189 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5190 && ! ((TREE_CODE (exp) == NOP_EXPR
5191 || TREE_CODE (exp) == CONVERT_EXPR)
5192 && (TYPE_MODE (TREE_TYPE (exp))
5193 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5194 break;
5196 /* If any reference in the chain is volatile, the effect is volatile. */
5197 if (TREE_THIS_VOLATILE (exp))
5198 *pvolatilep = 1;
5200 /* If the offset is non-constant already, then we can't assume any
5201 alignment more than the alignment here. */
5202 if (! TREE_CONSTANT (offset))
5203 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5205 exp = TREE_OPERAND (exp, 0);
5208 if (DECL_P (exp))
5209 alignment = MIN (alignment, DECL_ALIGN (exp));
5210 else if (TREE_TYPE (exp) != 0)
5211 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5213 /* If OFFSET is constant, see if we can return the whole thing as a
5214 constant bit position. Otherwise, split it up. */
5215 if (host_integerp (offset, 0)
5216 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5217 bitsize_unit_node))
5218 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5219 && host_integerp (tem, 0))
5220 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5221 else
5222 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5224 *pmode = mode;
5225 *palignment = alignment;
5226 return exp;
5229 /* Subroutine of expand_exp: compute memory_usage from modifier. */
5231 static enum memory_use_mode
5232 get_memory_usage_from_modifier (modifier)
5233 enum expand_modifier modifier;
5235 switch (modifier)
5237 case EXPAND_NORMAL:
5238 case EXPAND_SUM:
5239 return MEMORY_USE_RO;
5240 break;
5241 case EXPAND_MEMORY_USE_WO:
5242 return MEMORY_USE_WO;
5243 break;
5244 case EXPAND_MEMORY_USE_RW:
5245 return MEMORY_USE_RW;
5246 break;
5247 case EXPAND_MEMORY_USE_DONT:
5248 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5249 MEMORY_USE_DONT, because they are modifiers to a call of
5250 expand_expr in the ADDR_EXPR case of expand_expr. */
5251 case EXPAND_CONST_ADDRESS:
5252 case EXPAND_INITIALIZER:
5253 return MEMORY_USE_DONT;
5254 case EXPAND_MEMORY_USE_BAD:
5255 default:
5256 abort ();
5260 /* Given an rtx VALUE that may contain additions and multiplications,
5261 return an equivalent value that just refers to a register or memory.
5262 This is done by generating instructions to perform the arithmetic
5263 and returning a pseudo-register containing the value.
5265 The returned value may be a REG, SUBREG, MEM or constant. */
5268 force_operand (value, target)
5269 rtx value, target;
5271 register optab binoptab = 0;
5272 /* Use a temporary to force order of execution of calls to
5273 `force_operand'. */
5274 rtx tmp;
5275 register rtx op2;
5276 /* Use subtarget as the target for operand 0 of a binary operation. */
5277 register rtx subtarget = get_subtarget (target);
5279 /* Check for a PIC address load. */
5280 if (flag_pic
5281 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5282 && XEXP (value, 0) == pic_offset_table_rtx
5283 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5284 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5285 || GET_CODE (XEXP (value, 1)) == CONST))
5287 if (!subtarget)
5288 subtarget = gen_reg_rtx (GET_MODE (value));
5289 emit_move_insn (subtarget, value);
5290 return subtarget;
5293 if (GET_CODE (value) == PLUS)
5294 binoptab = add_optab;
5295 else if (GET_CODE (value) == MINUS)
5296 binoptab = sub_optab;
5297 else if (GET_CODE (value) == MULT)
5299 op2 = XEXP (value, 1);
5300 if (!CONSTANT_P (op2)
5301 && !(GET_CODE (op2) == REG && op2 != subtarget))
5302 subtarget = 0;
5303 tmp = force_operand (XEXP (value, 0), subtarget);
5304 return expand_mult (GET_MODE (value), tmp,
5305 force_operand (op2, NULL_RTX),
5306 target, 1);
5309 if (binoptab)
5311 op2 = XEXP (value, 1);
5312 if (!CONSTANT_P (op2)
5313 && !(GET_CODE (op2) == REG && op2 != subtarget))
5314 subtarget = 0;
5315 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5317 binoptab = add_optab;
5318 op2 = negate_rtx (GET_MODE (value), op2);
5321 /* Check for an addition with OP2 a constant integer and our first
5322 operand a PLUS of a virtual register and something else. In that
5323 case, we want to emit the sum of the virtual register and the
5324 constant first and then add the other value. This allows virtual
5325 register instantiation to simply modify the constant rather than
5326 creating another one around this addition. */
5327 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5328 && GET_CODE (XEXP (value, 0)) == PLUS
5329 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5330 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5331 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5333 rtx temp = expand_binop (GET_MODE (value), binoptab,
5334 XEXP (XEXP (value, 0), 0), op2,
5335 subtarget, 0, OPTAB_LIB_WIDEN);
5336 return expand_binop (GET_MODE (value), binoptab, temp,
5337 force_operand (XEXP (XEXP (value, 0), 1), 0),
5338 target, 0, OPTAB_LIB_WIDEN);
5341 tmp = force_operand (XEXP (value, 0), subtarget);
5342 return expand_binop (GET_MODE (value), binoptab, tmp,
5343 force_operand (op2, NULL_RTX),
5344 target, 0, OPTAB_LIB_WIDEN);
5345 /* We give UNSIGNEDP = 0 to expand_binop
5346 because the only operations we are expanding here are signed ones. */
5348 return value;
5351 /* Subroutine of expand_expr:
5352 save the non-copied parts (LIST) of an expr (LHS), and return a list
5353 which can restore these values to their previous values,
5354 should something modify their storage. */
5356 static tree
5357 save_noncopied_parts (lhs, list)
5358 tree lhs;
5359 tree list;
5361 tree tail;
5362 tree parts = 0;
5364 for (tail = list; tail; tail = TREE_CHAIN (tail))
5365 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5366 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
5367 else
5369 tree part = TREE_VALUE (tail);
5370 tree part_type = TREE_TYPE (part);
5371 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
5372 rtx target = assign_temp (part_type, 0, 1, 1);
5373 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
5374 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
5375 parts = tree_cons (to_be_saved,
5376 build (RTL_EXPR, part_type, NULL_TREE,
5377 (tree) target),
5378 parts);
5379 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
5381 return parts;
5384 /* Subroutine of expand_expr:
5385 record the non-copied parts (LIST) of an expr (LHS), and return a list
5386 which specifies the initial values of these parts. */
5388 static tree
5389 init_noncopied_parts (lhs, list)
5390 tree lhs;
5391 tree list;
5393 tree tail;
5394 tree parts = 0;
5396 for (tail = list; tail; tail = TREE_CHAIN (tail))
5397 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5398 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
5399 else if (TREE_PURPOSE (tail))
5401 tree part = TREE_VALUE (tail);
5402 tree part_type = TREE_TYPE (part);
5403 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
5404 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5406 return parts;
5409 /* Subroutine of expand_expr: return nonzero iff there is no way that
5410 EXP can reference X, which is being modified. TOP_P is nonzero if this
5411 call is going to be used to determine whether we need a temporary
5412 for EXP, as opposed to a recursive call to this function.
5414 It is always safe for this routine to return zero since it merely
5415 searches for optimization opportunities. */
5418 safe_from_p (x, exp, top_p)
5419 rtx x;
5420 tree exp;
5421 int top_p;
5423 rtx exp_rtl = 0;
5424 int i, nops;
5425 static int save_expr_count;
5426 static int save_expr_size = 0;
5427 static tree *save_expr_rewritten;
5428 static tree save_expr_trees[256];
5430 if (x == 0
5431 /* If EXP has varying size, we MUST use a target since we currently
5432 have no way of allocating temporaries of variable size
5433 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5434 So we assume here that something at a higher level has prevented a
5435 clash. This is somewhat bogus, but the best we can do. Only
5436 do this when X is BLKmode and when we are at the top level. */
5437 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5438 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5439 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5440 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5441 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5442 != INTEGER_CST)
5443 && GET_MODE (x) == BLKmode))
5444 return 1;
5446 if (top_p && save_expr_size == 0)
5448 int rtn;
5450 save_expr_count = 0;
5451 save_expr_size = ARRAY_SIZE (save_expr_trees);
5452 save_expr_rewritten = &save_expr_trees[0];
5454 rtn = safe_from_p (x, exp, 1);
5456 for (i = 0; i < save_expr_count; ++i)
5458 if (TREE_CODE (save_expr_trees[i]) != ERROR_MARK)
5459 abort ();
5460 TREE_SET_CODE (save_expr_trees[i], SAVE_EXPR);
5463 save_expr_size = 0;
5465 return rtn;
5468 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5469 find the underlying pseudo. */
5470 if (GET_CODE (x) == SUBREG)
5472 x = SUBREG_REG (x);
5473 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5474 return 0;
5477 /* If X is a location in the outgoing argument area, it is always safe. */
5478 if (GET_CODE (x) == MEM
5479 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5480 || (GET_CODE (XEXP (x, 0)) == PLUS
5481 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
5482 return 1;
5484 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5486 case 'd':
5487 exp_rtl = DECL_RTL (exp);
5488 break;
5490 case 'c':
5491 return 1;
5493 case 'x':
5494 if (TREE_CODE (exp) == TREE_LIST)
5495 return ((TREE_VALUE (exp) == 0
5496 || safe_from_p (x, TREE_VALUE (exp), 0))
5497 && (TREE_CHAIN (exp) == 0
5498 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5499 else if (TREE_CODE (exp) == ERROR_MARK)
5500 return 1; /* An already-visited SAVE_EXPR? */
5501 else
5502 return 0;
5504 case '1':
5505 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5507 case '2':
5508 case '<':
5509 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5510 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5512 case 'e':
5513 case 'r':
5514 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5515 the expression. If it is set, we conflict iff we are that rtx or
5516 both are in memory. Otherwise, we check all operands of the
5517 expression recursively. */
5519 switch (TREE_CODE (exp))
5521 case ADDR_EXPR:
5522 return (staticp (TREE_OPERAND (exp, 0))
5523 || safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5524 || TREE_STATIC (exp));
5526 case INDIRECT_REF:
5527 if (GET_CODE (x) == MEM)
5528 return 0;
5529 break;
5531 case CALL_EXPR:
5532 /* Assume that the call will clobber all hard registers and
5533 all of memory. */
5534 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5535 || GET_CODE (x) == MEM)
5536 return 0;
5537 break;
5539 case RTL_EXPR:
5540 /* If a sequence exists, we would have to scan every instruction
5541 in the sequence to see if it was safe. This is probably not
5542 worthwhile. */
5543 if (RTL_EXPR_SEQUENCE (exp))
5544 return 0;
5546 exp_rtl = RTL_EXPR_RTL (exp);
5547 break;
5549 case WITH_CLEANUP_EXPR:
5550 exp_rtl = RTL_EXPR_RTL (exp);
5551 break;
5553 case CLEANUP_POINT_EXPR:
5554 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5556 case SAVE_EXPR:
5557 exp_rtl = SAVE_EXPR_RTL (exp);
5558 if (exp_rtl)
5559 break;
5561 /* This SAVE_EXPR might appear many times in the top-level
5562 safe_from_p() expression, and if it has a complex
5563 subexpression, examining it multiple times could result
5564 in a combinatorial explosion. E.g. on an Alpha
5565 running at least 200MHz, a Fortran test case compiled with
5566 optimization took about 28 minutes to compile -- even though
5567 it was only a few lines long, and the complicated line causing
5568 so much time to be spent in the earlier version of safe_from_p()
5569 had only 293 or so unique nodes.
5571 So, turn this SAVE_EXPR into an ERROR_MARK for now, but remember
5572 where it is so we can turn it back in the top-level safe_from_p()
5573 when we're done. */
5575 /* For now, don't bother re-sizing the array. */
5576 if (save_expr_count >= save_expr_size)
5577 return 0;
5578 save_expr_rewritten[save_expr_count++] = exp;
5580 nops = TREE_CODE_LENGTH (SAVE_EXPR);
5581 for (i = 0; i < nops; i++)
5583 tree operand = TREE_OPERAND (exp, i);
5584 if (operand == NULL_TREE)
5585 continue;
5586 TREE_SET_CODE (exp, ERROR_MARK);
5587 if (!safe_from_p (x, operand, 0))
5588 return 0;
5589 TREE_SET_CODE (exp, SAVE_EXPR);
5591 TREE_SET_CODE (exp, ERROR_MARK);
5592 return 1;
5594 case BIND_EXPR:
5595 /* The only operand we look at is operand 1. The rest aren't
5596 part of the expression. */
5597 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5599 case METHOD_CALL_EXPR:
5600 /* This takes a rtx argument, but shouldn't appear here. */
5601 abort ();
5603 default:
5604 break;
5607 /* If we have an rtx, we do not need to scan our operands. */
5608 if (exp_rtl)
5609 break;
5611 nops = first_rtl_op (TREE_CODE (exp));
5612 for (i = 0; i < nops; i++)
5613 if (TREE_OPERAND (exp, i) != 0
5614 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5615 return 0;
5617 /* If this is a language-specific tree code, it may require
5618 special handling. */
5619 if (TREE_CODE (exp) >= LAST_AND_UNUSED_TREE_CODE
5620 && lang_safe_from_p
5621 && !(*lang_safe_from_p) (x, exp))
5622 return 0;
5625 /* If we have an rtl, find any enclosed object. Then see if we conflict
5626 with it. */
5627 if (exp_rtl)
5629 if (GET_CODE (exp_rtl) == SUBREG)
5631 exp_rtl = SUBREG_REG (exp_rtl);
5632 if (GET_CODE (exp_rtl) == REG
5633 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5634 return 0;
5637 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5638 are memory and EXP is not readonly. */
5639 return ! (rtx_equal_p (x, exp_rtl)
5640 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5641 && ! TREE_READONLY (exp)));
5644 /* If we reach here, it is safe. */
5645 return 1;
5648 /* Subroutine of expand_expr: return nonzero iff EXP is an
5649 expression whose type is statically determinable. */
5651 static int
5652 fixed_type_p (exp)
5653 tree exp;
5655 if (TREE_CODE (exp) == PARM_DECL
5656 || TREE_CODE (exp) == VAR_DECL
5657 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5658 || TREE_CODE (exp) == COMPONENT_REF
5659 || TREE_CODE (exp) == ARRAY_REF)
5660 return 1;
5661 return 0;
5664 /* Subroutine of expand_expr: return rtx if EXP is a
5665 variable or parameter; else return 0. */
5667 static rtx
5668 var_rtx (exp)
5669 tree exp;
5671 STRIP_NOPS (exp);
5672 switch (TREE_CODE (exp))
5674 case PARM_DECL:
5675 case VAR_DECL:
5676 return DECL_RTL (exp);
5677 default:
5678 return 0;
5682 #ifdef MAX_INTEGER_COMPUTATION_MODE
5683 void
5684 check_max_integer_computation_mode (exp)
5685 tree exp;
5687 enum tree_code code;
5688 enum machine_mode mode;
5690 /* Strip any NOPs that don't change the mode. */
5691 STRIP_NOPS (exp);
5692 code = TREE_CODE (exp);
5694 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5695 if (code == NOP_EXPR
5696 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5697 return;
5699 /* First check the type of the overall operation. We need only look at
5700 unary, binary and relational operations. */
5701 if (TREE_CODE_CLASS (code) == '1'
5702 || TREE_CODE_CLASS (code) == '2'
5703 || TREE_CODE_CLASS (code) == '<')
5705 mode = TYPE_MODE (TREE_TYPE (exp));
5706 if (GET_MODE_CLASS (mode) == MODE_INT
5707 && mode > MAX_INTEGER_COMPUTATION_MODE)
5708 fatal ("unsupported wide integer operation");
5711 /* Check operand of a unary op. */
5712 if (TREE_CODE_CLASS (code) == '1')
5714 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5715 if (GET_MODE_CLASS (mode) == MODE_INT
5716 && mode > MAX_INTEGER_COMPUTATION_MODE)
5717 fatal ("unsupported wide integer operation");
5720 /* Check operands of a binary/comparison op. */
5721 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5723 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5724 if (GET_MODE_CLASS (mode) == MODE_INT
5725 && mode > MAX_INTEGER_COMPUTATION_MODE)
5726 fatal ("unsupported wide integer operation");
5728 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5729 if (GET_MODE_CLASS (mode) == MODE_INT
5730 && mode > MAX_INTEGER_COMPUTATION_MODE)
5731 fatal ("unsupported wide integer operation");
5734 #endif
5736 /* Utility function used by expand_expr to see if TYPE, a RECORD_TYPE,
5737 has any readonly fields. If any of the fields have types that
5738 contain readonly fields, return true as well. */
5740 static int
5741 readonly_fields_p (type)
5742 tree type;
5744 tree field;
5746 for (field = TYPE_FIELDS (type); field != 0; field = TREE_CHAIN (field))
5747 if (TREE_CODE (field) == FIELD_DECL
5748 && (TREE_READONLY (field)
5749 || (TREE_CODE (TREE_TYPE (field)) == RECORD_TYPE
5750 && readonly_fields_p (TREE_TYPE (field)))))
5751 return 1;
5753 return 0;
5756 /* expand_expr: generate code for computing expression EXP.
5757 An rtx for the computed value is returned. The value is never null.
5758 In the case of a void EXP, const0_rtx is returned.
5760 The value may be stored in TARGET if TARGET is nonzero.
5761 TARGET is just a suggestion; callers must assume that
5762 the rtx returned may not be the same as TARGET.
5764 If TARGET is CONST0_RTX, it means that the value will be ignored.
5766 If TMODE is not VOIDmode, it suggests generating the
5767 result in mode TMODE. But this is done only when convenient.
5768 Otherwise, TMODE is ignored and the value generated in its natural mode.
5769 TMODE is just a suggestion; callers must assume that
5770 the rtx returned may not have mode TMODE.
5772 Note that TARGET may have neither TMODE nor MODE. In that case, it
5773 probably will not be used.
5775 If MODIFIER is EXPAND_SUM then when EXP is an addition
5776 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5777 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5778 products as above, or REG or MEM, or constant.
5779 Ordinarily in such cases we would output mul or add instructions
5780 and then return a pseudo reg containing the sum.
5782 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5783 it also marks a label as absolutely required (it can't be dead).
5784 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
5785 This is used for outputting expressions used in initializers.
5787 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5788 with a constant address even if that address is not normally legitimate.
5789 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
5792 expand_expr (exp, target, tmode, modifier)
5793 register tree exp;
5794 rtx target;
5795 enum machine_mode tmode;
5796 enum expand_modifier modifier;
5798 register rtx op0, op1, temp;
5799 tree type = TREE_TYPE (exp);
5800 int unsignedp = TREE_UNSIGNED (type);
5801 register enum machine_mode mode;
5802 register enum tree_code code = TREE_CODE (exp);
5803 optab this_optab;
5804 rtx subtarget, original_target;
5805 int ignore;
5806 tree context;
5807 /* Used by check-memory-usage to make modifier read only. */
5808 enum expand_modifier ro_modifier;
5810 /* Handle ERROR_MARK before anybody tries to access its type. */
5811 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
5813 op0 = CONST0_RTX (tmode);
5814 if (op0 != 0)
5815 return op0;
5816 return const0_rtx;
5819 mode = TYPE_MODE (type);
5820 /* Use subtarget as the target for operand 0 of a binary operation. */
5821 subtarget = get_subtarget (target);
5822 original_target = target;
5823 ignore = (target == const0_rtx
5824 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
5825 || code == CONVERT_EXPR || code == REFERENCE_EXPR
5826 || code == COND_EXPR)
5827 && TREE_CODE (type) == VOID_TYPE));
5829 /* Make a read-only version of the modifier. */
5830 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
5831 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
5832 ro_modifier = modifier;
5833 else
5834 ro_modifier = EXPAND_NORMAL;
5836 /* If we are going to ignore this result, we need only do something
5837 if there is a side-effect somewhere in the expression. If there
5838 is, short-circuit the most common cases here. Note that we must
5839 not call expand_expr with anything but const0_rtx in case this
5840 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
5842 if (ignore)
5844 if (! TREE_SIDE_EFFECTS (exp))
5845 return const0_rtx;
5847 /* Ensure we reference a volatile object even if value is ignored, but
5848 don't do this if all we are doing is taking its address. */
5849 if (TREE_THIS_VOLATILE (exp)
5850 && TREE_CODE (exp) != FUNCTION_DECL
5851 && mode != VOIDmode && mode != BLKmode
5852 && modifier != EXPAND_CONST_ADDRESS)
5854 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
5855 if (GET_CODE (temp) == MEM)
5856 temp = copy_to_reg (temp);
5857 return const0_rtx;
5860 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
5861 || code == INDIRECT_REF || code == BUFFER_REF)
5862 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5863 VOIDmode, ro_modifier);
5864 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
5865 || code == ARRAY_REF)
5867 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5868 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5869 return const0_rtx;
5871 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
5872 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
5873 /* If the second operand has no side effects, just evaluate
5874 the first. */
5875 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
5876 VOIDmode, ro_modifier);
5877 else if (code == BIT_FIELD_REF)
5879 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5880 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5881 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, ro_modifier);
5882 return const0_rtx;
5885 target = 0;
5888 #ifdef MAX_INTEGER_COMPUTATION_MODE
5889 /* Only check stuff here if the mode we want is different from the mode
5890 of the expression; if it's the same, check_max_integer_computiation_mode
5891 will handle it. Do we really need to check this stuff at all? */
5893 if (target
5894 && GET_MODE (target) != mode
5895 && TREE_CODE (exp) != INTEGER_CST
5896 && TREE_CODE (exp) != PARM_DECL
5897 && TREE_CODE (exp) != ARRAY_REF
5898 && TREE_CODE (exp) != COMPONENT_REF
5899 && TREE_CODE (exp) != BIT_FIELD_REF
5900 && TREE_CODE (exp) != INDIRECT_REF
5901 && TREE_CODE (exp) != CALL_EXPR
5902 && TREE_CODE (exp) != VAR_DECL
5903 && TREE_CODE (exp) != RTL_EXPR)
5905 enum machine_mode mode = GET_MODE (target);
5907 if (GET_MODE_CLASS (mode) == MODE_INT
5908 && mode > MAX_INTEGER_COMPUTATION_MODE)
5909 fatal ("unsupported wide integer operation");
5912 if (tmode != mode
5913 && TREE_CODE (exp) != INTEGER_CST
5914 && TREE_CODE (exp) != PARM_DECL
5915 && TREE_CODE (exp) != ARRAY_REF
5916 && TREE_CODE (exp) != COMPONENT_REF
5917 && TREE_CODE (exp) != BIT_FIELD_REF
5918 && TREE_CODE (exp) != INDIRECT_REF
5919 && TREE_CODE (exp) != VAR_DECL
5920 && TREE_CODE (exp) != CALL_EXPR
5921 && TREE_CODE (exp) != RTL_EXPR
5922 && GET_MODE_CLASS (tmode) == MODE_INT
5923 && tmode > MAX_INTEGER_COMPUTATION_MODE)
5924 fatal ("unsupported wide integer operation");
5926 check_max_integer_computation_mode (exp);
5927 #endif
5929 /* If will do cse, generate all results into pseudo registers
5930 since 1) that allows cse to find more things
5931 and 2) otherwise cse could produce an insn the machine
5932 cannot support. */
5934 if (! cse_not_expected && mode != BLKmode && target
5935 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
5936 target = subtarget;
5938 switch (code)
5940 case LABEL_DECL:
5942 tree function = decl_function_context (exp);
5943 /* Handle using a label in a containing function. */
5944 if (function != current_function_decl
5945 && function != inline_function_decl && function != 0)
5947 struct function *p = find_function_data (function);
5948 p->expr->x_forced_labels
5949 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
5950 p->expr->x_forced_labels);
5952 else
5954 if (modifier == EXPAND_INITIALIZER)
5955 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5956 label_rtx (exp),
5957 forced_labels);
5960 temp = gen_rtx_MEM (FUNCTION_MODE,
5961 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
5962 if (function != current_function_decl
5963 && function != inline_function_decl && function != 0)
5964 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
5965 return temp;
5968 case PARM_DECL:
5969 if (DECL_RTL (exp) == 0)
5971 error_with_decl (exp, "prior parameter's size depends on `%s'");
5972 return CONST0_RTX (mode);
5975 /* ... fall through ... */
5977 case VAR_DECL:
5978 /* If a static var's type was incomplete when the decl was written,
5979 but the type is complete now, lay out the decl now. */
5980 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5981 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
5983 layout_decl (exp, 0);
5984 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
5987 /* Although static-storage variables start off initialized, according to
5988 ANSI C, a memcpy could overwrite them with uninitialized values. So
5989 we check them too. This also lets us check for read-only variables
5990 accessed via a non-const declaration, in case it won't be detected
5991 any other way (e.g., in an embedded system or OS kernel without
5992 memory protection).
5994 Aggregates are not checked here; they're handled elsewhere. */
5995 if (cfun && current_function_check_memory_usage
5996 && code == VAR_DECL
5997 && GET_CODE (DECL_RTL (exp)) == MEM
5998 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6000 enum memory_use_mode memory_usage;
6001 memory_usage = get_memory_usage_from_modifier (modifier);
6003 in_check_memory_usage = 1;
6004 if (memory_usage != MEMORY_USE_DONT)
6005 emit_library_call (chkr_check_addr_libfunc,
6006 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
6007 XEXP (DECL_RTL (exp), 0), Pmode,
6008 GEN_INT (int_size_in_bytes (type)),
6009 TYPE_MODE (sizetype),
6010 GEN_INT (memory_usage),
6011 TYPE_MODE (integer_type_node));
6012 in_check_memory_usage = 0;
6015 /* ... fall through ... */
6017 case FUNCTION_DECL:
6018 case RESULT_DECL:
6019 if (DECL_RTL (exp) == 0)
6020 abort ();
6022 /* Ensure variable marked as used even if it doesn't go through
6023 a parser. If it hasn't be used yet, write out an external
6024 definition. */
6025 if (! TREE_USED (exp))
6027 assemble_external (exp);
6028 TREE_USED (exp) = 1;
6031 /* Show we haven't gotten RTL for this yet. */
6032 temp = 0;
6034 /* Handle variables inherited from containing functions. */
6035 context = decl_function_context (exp);
6037 /* We treat inline_function_decl as an alias for the current function
6038 because that is the inline function whose vars, types, etc.
6039 are being merged into the current function.
6040 See expand_inline_function. */
6042 if (context != 0 && context != current_function_decl
6043 && context != inline_function_decl
6044 /* If var is static, we don't need a static chain to access it. */
6045 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6046 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6048 rtx addr;
6050 /* Mark as non-local and addressable. */
6051 DECL_NONLOCAL (exp) = 1;
6052 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6053 abort ();
6054 mark_addressable (exp);
6055 if (GET_CODE (DECL_RTL (exp)) != MEM)
6056 abort ();
6057 addr = XEXP (DECL_RTL (exp), 0);
6058 if (GET_CODE (addr) == MEM)
6059 addr = change_address (addr, Pmode,
6060 fix_lexical_addr (XEXP (addr, 0), exp));
6061 else
6062 addr = fix_lexical_addr (addr, exp);
6064 temp = change_address (DECL_RTL (exp), mode, addr);
6067 /* This is the case of an array whose size is to be determined
6068 from its initializer, while the initializer is still being parsed.
6069 See expand_decl. */
6071 else if (GET_CODE (DECL_RTL (exp)) == MEM
6072 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6073 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
6074 XEXP (DECL_RTL (exp), 0));
6076 /* If DECL_RTL is memory, we are in the normal case and either
6077 the address is not valid or it is not a register and -fforce-addr
6078 is specified, get the address into a register. */
6080 else if (GET_CODE (DECL_RTL (exp)) == MEM
6081 && modifier != EXPAND_CONST_ADDRESS
6082 && modifier != EXPAND_SUM
6083 && modifier != EXPAND_INITIALIZER
6084 && (! memory_address_p (DECL_MODE (exp),
6085 XEXP (DECL_RTL (exp), 0))
6086 || (flag_force_addr
6087 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6088 temp = change_address (DECL_RTL (exp), VOIDmode,
6089 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6091 /* If we got something, return it. But first, set the alignment
6092 the address is a register. */
6093 if (temp != 0)
6095 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6096 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6098 return temp;
6101 /* If the mode of DECL_RTL does not match that of the decl, it
6102 must be a promoted value. We return a SUBREG of the wanted mode,
6103 but mark it so that we know that it was already extended. */
6105 if (GET_CODE (DECL_RTL (exp)) == REG
6106 && GET_MODE (DECL_RTL (exp)) != mode)
6108 /* Get the signedness used for this variable. Ensure we get the
6109 same mode we got when the variable was declared. */
6110 if (GET_MODE (DECL_RTL (exp))
6111 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
6112 abort ();
6114 temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
6115 SUBREG_PROMOTED_VAR_P (temp) = 1;
6116 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6117 return temp;
6120 return DECL_RTL (exp);
6122 case INTEGER_CST:
6123 return immed_double_const (TREE_INT_CST_LOW (exp),
6124 TREE_INT_CST_HIGH (exp), mode);
6126 case CONST_DECL:
6127 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
6128 EXPAND_MEMORY_USE_BAD);
6130 case REAL_CST:
6131 /* If optimized, generate immediate CONST_DOUBLE
6132 which will be turned into memory by reload if necessary.
6134 We used to force a register so that loop.c could see it. But
6135 this does not allow gen_* patterns to perform optimizations with
6136 the constants. It also produces two insns in cases like "x = 1.0;".
6137 On most machines, floating-point constants are not permitted in
6138 many insns, so we'd end up copying it to a register in any case.
6140 Now, we do the copying in expand_binop, if appropriate. */
6141 return immed_real_const (exp);
6143 case COMPLEX_CST:
6144 case STRING_CST:
6145 if (! TREE_CST_RTL (exp))
6146 output_constant_def (exp);
6148 /* TREE_CST_RTL probably contains a constant address.
6149 On RISC machines where a constant address isn't valid,
6150 make some insns to get that address into a register. */
6151 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6152 && modifier != EXPAND_CONST_ADDRESS
6153 && modifier != EXPAND_INITIALIZER
6154 && modifier != EXPAND_SUM
6155 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6156 || (flag_force_addr
6157 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6158 return change_address (TREE_CST_RTL (exp), VOIDmode,
6159 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6160 return TREE_CST_RTL (exp);
6162 case EXPR_WITH_FILE_LOCATION:
6164 rtx to_return;
6165 const char *saved_input_filename = input_filename;
6166 int saved_lineno = lineno;
6167 input_filename = EXPR_WFL_FILENAME (exp);
6168 lineno = EXPR_WFL_LINENO (exp);
6169 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6170 emit_line_note (input_filename, lineno);
6171 /* Possibly avoid switching back and force here. */
6172 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6173 input_filename = saved_input_filename;
6174 lineno = saved_lineno;
6175 return to_return;
6178 case SAVE_EXPR:
6179 context = decl_function_context (exp);
6181 /* If this SAVE_EXPR was at global context, assume we are an
6182 initialization function and move it into our context. */
6183 if (context == 0)
6184 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6186 /* We treat inline_function_decl as an alias for the current function
6187 because that is the inline function whose vars, types, etc.
6188 are being merged into the current function.
6189 See expand_inline_function. */
6190 if (context == current_function_decl || context == inline_function_decl)
6191 context = 0;
6193 /* If this is non-local, handle it. */
6194 if (context)
6196 /* The following call just exists to abort if the context is
6197 not of a containing function. */
6198 find_function_data (context);
6200 temp = SAVE_EXPR_RTL (exp);
6201 if (temp && GET_CODE (temp) == REG)
6203 put_var_into_stack (exp);
6204 temp = SAVE_EXPR_RTL (exp);
6206 if (temp == 0 || GET_CODE (temp) != MEM)
6207 abort ();
6208 return change_address (temp, mode,
6209 fix_lexical_addr (XEXP (temp, 0), exp));
6211 if (SAVE_EXPR_RTL (exp) == 0)
6213 if (mode == VOIDmode)
6214 temp = const0_rtx;
6215 else
6217 temp = assign_temp (type, 3, 0, 0);
6218 if (GET_CODE (temp) == MEM)
6219 RTX_UNCHANGING_P (temp) = 1;
6222 SAVE_EXPR_RTL (exp) = temp;
6223 if (!optimize && GET_CODE (temp) == REG)
6224 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6225 save_expr_regs);
6227 /* If the mode of TEMP does not match that of the expression, it
6228 must be a promoted value. We pass store_expr a SUBREG of the
6229 wanted mode but mark it so that we know that it was already
6230 extended. Note that `unsignedp' was modified above in
6231 this case. */
6233 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6235 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6236 SUBREG_PROMOTED_VAR_P (temp) = 1;
6237 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6240 if (temp == const0_rtx)
6241 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6242 EXPAND_MEMORY_USE_BAD);
6243 else
6244 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6246 TREE_USED (exp) = 1;
6249 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6250 must be a promoted value. We return a SUBREG of the wanted mode,
6251 but mark it so that we know that it was already extended. */
6253 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6254 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6256 /* Compute the signedness and make the proper SUBREG. */
6257 promote_mode (type, mode, &unsignedp, 0);
6258 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
6259 SUBREG_PROMOTED_VAR_P (temp) = 1;
6260 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6261 return temp;
6264 return SAVE_EXPR_RTL (exp);
6266 case UNSAVE_EXPR:
6268 rtx temp;
6269 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6270 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6271 return temp;
6274 case PLACEHOLDER_EXPR:
6276 tree placeholder_expr;
6278 /* If there is an object on the head of the placeholder list,
6279 see if some object in it of type TYPE or a pointer to it. For
6280 further information, see tree.def. */
6281 for (placeholder_expr = placeholder_list;
6282 placeholder_expr != 0;
6283 placeholder_expr = TREE_CHAIN (placeholder_expr))
6285 tree need_type = TYPE_MAIN_VARIANT (type);
6286 tree object = 0;
6287 tree old_list = placeholder_list;
6288 tree elt;
6290 /* Find the outermost reference that is of the type we want.
6291 If none, see if any object has a type that is a pointer to
6292 the type we want. */
6293 for (elt = TREE_PURPOSE (placeholder_expr);
6294 elt != 0 && object == 0;
6296 = ((TREE_CODE (elt) == COMPOUND_EXPR
6297 || TREE_CODE (elt) == COND_EXPR)
6298 ? TREE_OPERAND (elt, 1)
6299 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6300 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6301 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6302 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6303 ? TREE_OPERAND (elt, 0) : 0))
6304 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6305 object = elt;
6307 for (elt = TREE_PURPOSE (placeholder_expr);
6308 elt != 0 && object == 0;
6310 = ((TREE_CODE (elt) == COMPOUND_EXPR
6311 || TREE_CODE (elt) == COND_EXPR)
6312 ? TREE_OPERAND (elt, 1)
6313 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6314 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6315 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6316 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6317 ? TREE_OPERAND (elt, 0) : 0))
6318 if (POINTER_TYPE_P (TREE_TYPE (elt))
6319 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6320 == need_type))
6321 object = build1 (INDIRECT_REF, need_type, elt);
6323 if (object != 0)
6325 /* Expand this object skipping the list entries before
6326 it was found in case it is also a PLACEHOLDER_EXPR.
6327 In that case, we want to translate it using subsequent
6328 entries. */
6329 placeholder_list = TREE_CHAIN (placeholder_expr);
6330 temp = expand_expr (object, original_target, tmode,
6331 ro_modifier);
6332 placeholder_list = old_list;
6333 return temp;
6338 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6339 abort ();
6341 case WITH_RECORD_EXPR:
6342 /* Put the object on the placeholder list, expand our first operand,
6343 and pop the list. */
6344 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6345 placeholder_list);
6346 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
6347 tmode, ro_modifier);
6348 placeholder_list = TREE_CHAIN (placeholder_list);
6349 return target;
6351 case GOTO_EXPR:
6352 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6353 expand_goto (TREE_OPERAND (exp, 0));
6354 else
6355 expand_computed_goto (TREE_OPERAND (exp, 0));
6356 return const0_rtx;
6358 case EXIT_EXPR:
6359 expand_exit_loop_if_false (NULL_PTR,
6360 invert_truthvalue (TREE_OPERAND (exp, 0)));
6361 return const0_rtx;
6363 case LABELED_BLOCK_EXPR:
6364 if (LABELED_BLOCK_BODY (exp))
6365 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6366 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6367 return const0_rtx;
6369 case EXIT_BLOCK_EXPR:
6370 if (EXIT_BLOCK_RETURN (exp))
6371 sorry ("returned value in block_exit_expr");
6372 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6373 return const0_rtx;
6375 case LOOP_EXPR:
6376 push_temp_slots ();
6377 expand_start_loop (1);
6378 expand_expr_stmt (TREE_OPERAND (exp, 0));
6379 expand_end_loop ();
6380 pop_temp_slots ();
6382 return const0_rtx;
6384 case BIND_EXPR:
6386 tree vars = TREE_OPERAND (exp, 0);
6387 int vars_need_expansion = 0;
6389 /* Need to open a binding contour here because
6390 if there are any cleanups they must be contained here. */
6391 expand_start_bindings (2);
6393 /* Mark the corresponding BLOCK for output in its proper place. */
6394 if (TREE_OPERAND (exp, 2) != 0
6395 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6396 insert_block (TREE_OPERAND (exp, 2));
6398 /* If VARS have not yet been expanded, expand them now. */
6399 while (vars)
6401 if (DECL_RTL (vars) == 0)
6403 vars_need_expansion = 1;
6404 expand_decl (vars);
6406 expand_decl_init (vars);
6407 vars = TREE_CHAIN (vars);
6410 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6412 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6414 return temp;
6417 case RTL_EXPR:
6418 if (RTL_EXPR_SEQUENCE (exp))
6420 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6421 abort ();
6422 emit_insns (RTL_EXPR_SEQUENCE (exp));
6423 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6425 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6426 free_temps_for_rtl_expr (exp);
6427 return RTL_EXPR_RTL (exp);
6429 case CONSTRUCTOR:
6430 /* If we don't need the result, just ensure we evaluate any
6431 subexpressions. */
6432 if (ignore)
6434 tree elt;
6435 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6436 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6437 EXPAND_MEMORY_USE_BAD);
6438 return const0_rtx;
6441 /* All elts simple constants => refer to a constant in memory. But
6442 if this is a non-BLKmode mode, let it store a field at a time
6443 since that should make a CONST_INT or CONST_DOUBLE when we
6444 fold. Likewise, if we have a target we can use, it is best to
6445 store directly into the target unless the type is large enough
6446 that memcpy will be used. If we are making an initializer and
6447 all operands are constant, put it in memory as well. */
6448 else if ((TREE_STATIC (exp)
6449 && ((mode == BLKmode
6450 && ! (target != 0 && safe_from_p (target, exp, 1)))
6451 || TREE_ADDRESSABLE (exp)
6452 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6453 && (! MOVE_BY_PIECES_P
6454 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6455 TYPE_ALIGN (type)))
6456 && ! mostly_zeros_p (exp))))
6457 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6459 rtx constructor = output_constant_def (exp);
6461 if (modifier != EXPAND_CONST_ADDRESS
6462 && modifier != EXPAND_INITIALIZER
6463 && modifier != EXPAND_SUM
6464 && (! memory_address_p (GET_MODE (constructor),
6465 XEXP (constructor, 0))
6466 || (flag_force_addr
6467 && GET_CODE (XEXP (constructor, 0)) != REG)))
6468 constructor = change_address (constructor, VOIDmode,
6469 XEXP (constructor, 0));
6470 return constructor;
6473 else
6475 /* Handle calls that pass values in multiple non-contiguous
6476 locations. The Irix 6 ABI has examples of this. */
6477 if (target == 0 || ! safe_from_p (target, exp, 1)
6478 || GET_CODE (target) == PARALLEL)
6480 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
6481 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6482 else
6483 target = assign_temp (type, 0, 1, 1);
6486 if (TREE_READONLY (exp))
6488 if (GET_CODE (target) == MEM)
6489 target = copy_rtx (target);
6491 RTX_UNCHANGING_P (target) = 1;
6494 store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0,
6495 int_size_in_bytes (TREE_TYPE (exp)));
6496 return target;
6499 case INDIRECT_REF:
6501 tree exp1 = TREE_OPERAND (exp, 0);
6502 tree index;
6503 tree string = string_constant (exp1, &index);
6505 /* Try to optimize reads from const strings. */
6506 if (string
6507 && TREE_CODE (string) == STRING_CST
6508 && TREE_CODE (index) == INTEGER_CST
6509 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6510 && GET_MODE_CLASS (mode) == MODE_INT
6511 && GET_MODE_SIZE (mode) == 1
6512 && modifier != EXPAND_MEMORY_USE_WO)
6513 return
6514 GEN_INT (TREE_STRING_POINTER (string)[TREE_INT_CST_LOW (index)]);
6516 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6517 op0 = memory_address (mode, op0);
6519 if (cfun && current_function_check_memory_usage
6520 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6522 enum memory_use_mode memory_usage;
6523 memory_usage = get_memory_usage_from_modifier (modifier);
6525 if (memory_usage != MEMORY_USE_DONT)
6527 in_check_memory_usage = 1;
6528 emit_library_call (chkr_check_addr_libfunc,
6529 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, op0,
6530 Pmode, GEN_INT (int_size_in_bytes (type)),
6531 TYPE_MODE (sizetype),
6532 GEN_INT (memory_usage),
6533 TYPE_MODE (integer_type_node));
6534 in_check_memory_usage = 0;
6538 temp = gen_rtx_MEM (mode, op0);
6539 set_mem_attributes (temp, exp, 0);
6541 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6542 here, because, in C and C++, the fact that a location is accessed
6543 through a pointer to const does not mean that the value there can
6544 never change. Languages where it can never change should
6545 also set TREE_STATIC. */
6546 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
6548 /* If we are writing to this object and its type is a record with
6549 readonly fields, we must mark it as readonly so it will
6550 conflict with readonly references to those fields. */
6551 if (modifier == EXPAND_MEMORY_USE_WO
6552 && TREE_CODE (type) == RECORD_TYPE && readonly_fields_p (type))
6553 RTX_UNCHANGING_P (temp) = 1;
6555 return temp;
6558 case ARRAY_REF:
6559 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6560 abort ();
6563 tree array = TREE_OPERAND (exp, 0);
6564 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6565 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6566 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6567 HOST_WIDE_INT i;
6569 /* Optimize the special-case of a zero lower bound.
6571 We convert the low_bound to sizetype to avoid some problems
6572 with constant folding. (E.g. suppose the lower bound is 1,
6573 and its mode is QI. Without the conversion, (ARRAY
6574 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6575 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6577 if (! integer_zerop (low_bound))
6578 index = size_diffop (index, convert (sizetype, low_bound));
6580 /* Fold an expression like: "foo"[2].
6581 This is not done in fold so it won't happen inside &.
6582 Don't fold if this is for wide characters since it's too
6583 difficult to do correctly and this is a very rare case. */
6585 if (TREE_CODE (array) == STRING_CST
6586 && TREE_CODE (index) == INTEGER_CST
6587 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6588 && GET_MODE_CLASS (mode) == MODE_INT
6589 && GET_MODE_SIZE (mode) == 1)
6590 return
6591 GEN_INT (TREE_STRING_POINTER (array)[TREE_INT_CST_LOW (index)]);
6593 /* If this is a constant index into a constant array,
6594 just get the value from the array. Handle both the cases when
6595 we have an explicit constructor and when our operand is a variable
6596 that was declared const. */
6598 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6599 && TREE_CODE (index) == INTEGER_CST
6600 && 0 > compare_tree_int (index,
6601 list_length (CONSTRUCTOR_ELTS
6602 (TREE_OPERAND (exp, 0)))))
6604 tree elem;
6606 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6607 i = TREE_INT_CST_LOW (index);
6608 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6611 if (elem)
6612 return expand_expr (fold (TREE_VALUE (elem)), target,
6613 tmode, ro_modifier);
6616 else if (optimize >= 1
6617 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6618 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6619 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6621 if (TREE_CODE (index) == INTEGER_CST)
6623 tree init = DECL_INITIAL (array);
6625 if (TREE_CODE (init) == CONSTRUCTOR)
6627 tree elem;
6629 for (elem = CONSTRUCTOR_ELTS (init);
6630 (elem
6631 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6632 elem = TREE_CHAIN (elem))
6635 if (elem)
6636 return expand_expr (fold (TREE_VALUE (elem)), target,
6637 tmode, ro_modifier);
6639 else if (TREE_CODE (init) == STRING_CST
6640 && 0 > compare_tree_int (index,
6641 TREE_STRING_LENGTH (init)))
6643 tree type = TREE_TYPE (TREE_TYPE (init));
6644 enum machine_mode mode = TYPE_MODE (type);
6646 if (GET_MODE_CLASS (mode) == MODE_INT
6647 && GET_MODE_SIZE (mode) == 1)
6648 return (GEN_INT
6649 (TREE_STRING_POINTER
6650 (init)[TREE_INT_CST_LOW (index)]));
6655 /* Fall through. */
6657 case COMPONENT_REF:
6658 case BIT_FIELD_REF:
6659 /* If the operand is a CONSTRUCTOR, we can just extract the
6660 appropriate field if it is present. Don't do this if we have
6661 already written the data since we want to refer to that copy
6662 and varasm.c assumes that's what we'll do. */
6663 if (code != ARRAY_REF
6664 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6665 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6667 tree elt;
6669 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6670 elt = TREE_CHAIN (elt))
6671 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6672 /* We can normally use the value of the field in the
6673 CONSTRUCTOR. However, if this is a bitfield in
6674 an integral mode that we can fit in a HOST_WIDE_INT,
6675 we must mask only the number of bits in the bitfield,
6676 since this is done implicitly by the constructor. If
6677 the bitfield does not meet either of those conditions,
6678 we can't do this optimization. */
6679 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6680 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6681 == MODE_INT)
6682 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6683 <= HOST_BITS_PER_WIDE_INT))))
6685 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6686 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6688 HOST_WIDE_INT bitsize
6689 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6691 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6693 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6694 op0 = expand_and (op0, op1, target);
6696 else
6698 enum machine_mode imode
6699 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6700 tree count
6701 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6704 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6705 target, 0);
6706 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6707 target, 0);
6711 return op0;
6716 enum machine_mode mode1;
6717 HOST_WIDE_INT bitsize, bitpos;
6718 tree offset;
6719 int volatilep = 0;
6720 unsigned int alignment;
6721 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6722 &mode1, &unsignedp, &volatilep,
6723 &alignment);
6725 /* If we got back the original object, something is wrong. Perhaps
6726 we are evaluating an expression too early. In any event, don't
6727 infinitely recurse. */
6728 if (tem == exp)
6729 abort ();
6731 /* If TEM's type is a union of variable size, pass TARGET to the inner
6732 computation, since it will need a temporary and TARGET is known
6733 to have to do. This occurs in unchecked conversion in Ada. */
6735 op0 = expand_expr (tem,
6736 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6737 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6738 != INTEGER_CST)
6739 ? target : NULL_RTX),
6740 VOIDmode,
6741 (modifier == EXPAND_INITIALIZER
6742 || modifier == EXPAND_CONST_ADDRESS)
6743 ? modifier : EXPAND_NORMAL);
6745 /* If this is a constant, put it into a register if it is a
6746 legitimate constant and OFFSET is 0 and memory if it isn't. */
6747 if (CONSTANT_P (op0))
6749 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6750 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6751 && offset == 0)
6752 op0 = force_reg (mode, op0);
6753 else
6754 op0 = validize_mem (force_const_mem (mode, op0));
6757 if (offset != 0)
6759 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
6761 /* If this object is in memory, put it into a register.
6762 This case can't occur in C, but can in Ada if we have
6763 unchecked conversion of an expression from a scalar type to
6764 an array or record type. */
6765 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6766 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
6768 rtx memloc = assign_temp (TREE_TYPE (tem), 1, 1, 1);
6770 mark_temp_addr_taken (memloc);
6771 emit_move_insn (memloc, op0);
6772 op0 = memloc;
6775 if (GET_CODE (op0) != MEM)
6776 abort ();
6778 if (GET_MODE (offset_rtx) != ptr_mode)
6780 #ifdef POINTERS_EXTEND_UNSIGNED
6781 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
6782 #else
6783 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6784 #endif
6787 /* A constant address in OP0 can have VOIDmode, we must not try
6788 to call force_reg for that case. Avoid that case. */
6789 if (GET_CODE (op0) == MEM
6790 && GET_MODE (op0) == BLKmode
6791 && GET_MODE (XEXP (op0, 0)) != VOIDmode
6792 && bitsize != 0
6793 && (bitpos % bitsize) == 0
6794 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6795 && alignment == GET_MODE_ALIGNMENT (mode1))
6797 rtx temp = change_address (op0, mode1,
6798 plus_constant (XEXP (op0, 0),
6799 (bitpos /
6800 BITS_PER_UNIT)));
6801 if (GET_CODE (XEXP (temp, 0)) == REG)
6802 op0 = temp;
6803 else
6804 op0 = change_address (op0, mode1,
6805 force_reg (GET_MODE (XEXP (temp, 0)),
6806 XEXP (temp, 0)));
6807 bitpos = 0;
6810 op0 = change_address (op0, VOIDmode,
6811 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
6812 force_reg (ptr_mode,
6813 offset_rtx)));
6816 /* Don't forget about volatility even if this is a bitfield. */
6817 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6819 op0 = copy_rtx (op0);
6820 MEM_VOLATILE_P (op0) = 1;
6823 /* Check the access. */
6824 if (cfun != 0 && current_function_check_memory_usage
6825 && GET_CODE (op0) == MEM)
6827 enum memory_use_mode memory_usage;
6828 memory_usage = get_memory_usage_from_modifier (modifier);
6830 if (memory_usage != MEMORY_USE_DONT)
6832 rtx to;
6833 int size;
6835 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
6836 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
6838 /* Check the access right of the pointer. */
6839 in_check_memory_usage = 1;
6840 if (size > BITS_PER_UNIT)
6841 emit_library_call (chkr_check_addr_libfunc,
6842 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, to,
6843 Pmode, GEN_INT (size / BITS_PER_UNIT),
6844 TYPE_MODE (sizetype),
6845 GEN_INT (memory_usage),
6846 TYPE_MODE (integer_type_node));
6847 in_check_memory_usage = 0;
6851 /* In cases where an aligned union has an unaligned object
6852 as a field, we might be extracting a BLKmode value from
6853 an integer-mode (e.g., SImode) object. Handle this case
6854 by doing the extract into an object as wide as the field
6855 (which we know to be the width of a basic mode), then
6856 storing into memory, and changing the mode to BLKmode.
6857 If we ultimately want the address (EXPAND_CONST_ADDRESS or
6858 EXPAND_INITIALIZER), then we must not copy to a temporary. */
6859 if (mode1 == VOIDmode
6860 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6861 || (modifier != EXPAND_CONST_ADDRESS
6862 && modifier != EXPAND_INITIALIZER
6863 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
6864 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6865 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6866 /* If the field isn't aligned enough to fetch as a memref,
6867 fetch it as a bit field. */
6868 || (mode1 != BLKmode
6869 && SLOW_UNALIGNED_ACCESS (mode1, alignment)
6870 && ((TYPE_ALIGN (TREE_TYPE (tem))
6871 < GET_MODE_ALIGNMENT (mode))
6872 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
6873 /* If the type and the field are a constant size and the
6874 size of the type isn't the same size as the bitfield,
6875 we must use bitfield operations. */
6876 || ((bitsize >= 0
6877 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
6878 == INTEGER_CST)
6879 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
6880 bitsize)))))
6881 || (modifier != EXPAND_CONST_ADDRESS
6882 && modifier != EXPAND_INITIALIZER
6883 && mode == BLKmode
6884 && SLOW_UNALIGNED_ACCESS (mode, alignment)
6885 && (TYPE_ALIGN (type) > alignment
6886 || bitpos % TYPE_ALIGN (type) != 0)))
6888 enum machine_mode ext_mode = mode;
6890 if (ext_mode == BLKmode
6891 && ! (target != 0 && GET_CODE (op0) == MEM
6892 && GET_CODE (target) == MEM
6893 && bitpos % BITS_PER_UNIT == 0))
6894 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6896 if (ext_mode == BLKmode)
6898 /* In this case, BITPOS must start at a byte boundary and
6899 TARGET, if specified, must be a MEM. */
6900 if (GET_CODE (op0) != MEM
6901 || (target != 0 && GET_CODE (target) != MEM)
6902 || bitpos % BITS_PER_UNIT != 0)
6903 abort ();
6905 op0 = change_address (op0, VOIDmode,
6906 plus_constant (XEXP (op0, 0),
6907 bitpos / BITS_PER_UNIT));
6908 if (target == 0)
6909 target = assign_temp (type, 0, 1, 1);
6911 emit_block_move (target, op0,
6912 bitsize == -1 ? expr_size (exp)
6913 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6914 / BITS_PER_UNIT),
6915 BITS_PER_UNIT);
6917 return target;
6920 op0 = validize_mem (op0);
6922 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
6923 mark_reg_pointer (XEXP (op0, 0), alignment);
6925 op0 = extract_bit_field (op0, bitsize, bitpos,
6926 unsignedp, target, ext_mode, ext_mode,
6927 alignment,
6928 int_size_in_bytes (TREE_TYPE (tem)));
6930 /* If the result is a record type and BITSIZE is narrower than
6931 the mode of OP0, an integral mode, and this is a big endian
6932 machine, we must put the field into the high-order bits. */
6933 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
6934 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
6935 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
6936 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
6937 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
6938 - bitsize),
6939 op0, 1);
6941 if (mode == BLKmode)
6943 rtx new = assign_stack_temp (ext_mode,
6944 bitsize / BITS_PER_UNIT, 0);
6946 emit_move_insn (new, op0);
6947 op0 = copy_rtx (new);
6948 PUT_MODE (op0, BLKmode);
6949 MEM_SET_IN_STRUCT_P (op0, 1);
6952 return op0;
6955 /* If the result is BLKmode, use that to access the object
6956 now as well. */
6957 if (mode == BLKmode)
6958 mode1 = BLKmode;
6960 /* Get a reference to just this component. */
6961 if (modifier == EXPAND_CONST_ADDRESS
6962 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6964 rtx new = gen_rtx_MEM (mode1,
6965 plus_constant (XEXP (op0, 0),
6966 (bitpos / BITS_PER_UNIT)));
6968 MEM_COPY_ATTRIBUTES (new, op0);
6969 op0 = new;
6971 else
6972 op0 = change_address (op0, mode1,
6973 plus_constant (XEXP (op0, 0),
6974 (bitpos / BITS_PER_UNIT)));
6976 set_mem_attributes (op0, exp, 0);
6977 if (GET_CODE (XEXP (op0, 0)) == REG)
6978 mark_reg_pointer (XEXP (op0, 0), alignment);
6980 MEM_VOLATILE_P (op0) |= volatilep;
6981 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
6982 || modifier == EXPAND_CONST_ADDRESS
6983 || modifier == EXPAND_INITIALIZER)
6984 return op0;
6985 else if (target == 0)
6986 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6988 convert_move (target, op0, unsignedp);
6989 return target;
6992 /* Intended for a reference to a buffer of a file-object in Pascal.
6993 But it's not certain that a special tree code will really be
6994 necessary for these. INDIRECT_REF might work for them. */
6995 case BUFFER_REF:
6996 abort ();
6998 case IN_EXPR:
7000 /* Pascal set IN expression.
7002 Algorithm:
7003 rlo = set_low - (set_low%bits_per_word);
7004 the_word = set [ (index - rlo)/bits_per_word ];
7005 bit_index = index % bits_per_word;
7006 bitmask = 1 << bit_index;
7007 return !!(the_word & bitmask); */
7009 tree set = TREE_OPERAND (exp, 0);
7010 tree index = TREE_OPERAND (exp, 1);
7011 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7012 tree set_type = TREE_TYPE (set);
7013 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7014 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7015 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7016 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7017 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7018 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7019 rtx setaddr = XEXP (setval, 0);
7020 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7021 rtx rlow;
7022 rtx diff, quo, rem, addr, bit, result;
7024 /* If domain is empty, answer is no. Likewise if index is constant
7025 and out of bounds. */
7026 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7027 && TREE_CODE (set_low_bound) == INTEGER_CST
7028 && tree_int_cst_lt (set_high_bound, set_low_bound))
7029 || (TREE_CODE (index) == INTEGER_CST
7030 && TREE_CODE (set_low_bound) == INTEGER_CST
7031 && tree_int_cst_lt (index, set_low_bound))
7032 || (TREE_CODE (set_high_bound) == INTEGER_CST
7033 && TREE_CODE (index) == INTEGER_CST
7034 && tree_int_cst_lt (set_high_bound, index))))
7035 return const0_rtx;
7037 if (target == 0)
7038 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7040 /* If we get here, we have to generate the code for both cases
7041 (in range and out of range). */
7043 op0 = gen_label_rtx ();
7044 op1 = gen_label_rtx ();
7046 if (! (GET_CODE (index_val) == CONST_INT
7047 && GET_CODE (lo_r) == CONST_INT))
7049 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7050 GET_MODE (index_val), iunsignedp, 0, op1);
7053 if (! (GET_CODE (index_val) == CONST_INT
7054 && GET_CODE (hi_r) == CONST_INT))
7056 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7057 GET_MODE (index_val), iunsignedp, 0, op1);
7060 /* Calculate the element number of bit zero in the first word
7061 of the set. */
7062 if (GET_CODE (lo_r) == CONST_INT)
7063 rlow = GEN_INT (INTVAL (lo_r)
7064 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7065 else
7066 rlow = expand_binop (index_mode, and_optab, lo_r,
7067 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7068 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7070 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7071 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7073 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7074 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7075 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7076 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7078 addr = memory_address (byte_mode,
7079 expand_binop (index_mode, add_optab, diff,
7080 setaddr, NULL_RTX, iunsignedp,
7081 OPTAB_LIB_WIDEN));
7083 /* Extract the bit we want to examine. */
7084 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7085 gen_rtx_MEM (byte_mode, addr),
7086 make_tree (TREE_TYPE (index), rem),
7087 NULL_RTX, 1);
7088 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7089 GET_MODE (target) == byte_mode ? target : 0,
7090 1, OPTAB_LIB_WIDEN);
7092 if (result != target)
7093 convert_move (target, result, 1);
7095 /* Output the code to handle the out-of-range case. */
7096 emit_jump (op0);
7097 emit_label (op1);
7098 emit_move_insn (target, const0_rtx);
7099 emit_label (op0);
7100 return target;
7103 case WITH_CLEANUP_EXPR:
7104 if (RTL_EXPR_RTL (exp) == 0)
7106 RTL_EXPR_RTL (exp)
7107 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7108 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
7110 /* That's it for this cleanup. */
7111 TREE_OPERAND (exp, 2) = 0;
7113 return RTL_EXPR_RTL (exp);
7115 case CLEANUP_POINT_EXPR:
7117 /* Start a new binding layer that will keep track of all cleanup
7118 actions to be performed. */
7119 expand_start_bindings (2);
7121 target_temp_slot_level = temp_slot_level;
7123 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7124 /* If we're going to use this value, load it up now. */
7125 if (! ignore)
7126 op0 = force_not_mem (op0);
7127 preserve_temp_slots (op0);
7128 expand_end_bindings (NULL_TREE, 0, 0);
7130 return op0;
7132 case CALL_EXPR:
7133 /* Check for a built-in function. */
7134 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7135 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7136 == FUNCTION_DECL)
7137 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7139 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7140 == BUILT_IN_FRONTEND)
7141 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7142 else
7143 return expand_builtin (exp, target, subtarget, tmode, ignore);
7146 return expand_call (exp, target, ignore);
7148 case NON_LVALUE_EXPR:
7149 case NOP_EXPR:
7150 case CONVERT_EXPR:
7151 case REFERENCE_EXPR:
7152 if (TREE_OPERAND (exp, 0) == error_mark_node)
7153 return const0_rtx;
7155 if (TREE_CODE (type) == UNION_TYPE)
7157 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7159 /* If both input and output are BLKmode, this conversion
7160 isn't actually doing anything unless we need to make the
7161 alignment stricter. */
7162 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode
7163 && (TYPE_ALIGN (type) <= TYPE_ALIGN (valtype)
7164 || TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT))
7165 return expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7166 modifier);
7168 if (target == 0)
7170 if (mode != BLKmode)
7171 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7172 else
7173 target = assign_temp (type, 0, 1, 1);
7176 if (GET_CODE (target) == MEM)
7177 /* Store data into beginning of memory target. */
7178 store_expr (TREE_OPERAND (exp, 0),
7179 change_address (target, TYPE_MODE (valtype), 0), 0);
7181 else if (GET_CODE (target) == REG)
7182 /* Store this field into a union of the proper type. */
7183 store_field (target,
7184 MIN ((int_size_in_bytes (TREE_TYPE
7185 (TREE_OPERAND (exp, 0)))
7186 * BITS_PER_UNIT),
7187 GET_MODE_BITSIZE (mode)),
7188 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7189 VOIDmode, 0, BITS_PER_UNIT,
7190 int_size_in_bytes (type), 0);
7191 else
7192 abort ();
7194 /* Return the entire union. */
7195 return target;
7198 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7200 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7201 ro_modifier);
7203 /* If the signedness of the conversion differs and OP0 is
7204 a promoted SUBREG, clear that indication since we now
7205 have to do the proper extension. */
7206 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7207 && GET_CODE (op0) == SUBREG)
7208 SUBREG_PROMOTED_VAR_P (op0) = 0;
7210 return op0;
7213 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
7214 if (GET_MODE (op0) == mode)
7215 return op0;
7217 /* If OP0 is a constant, just convert it into the proper mode. */
7218 if (CONSTANT_P (op0))
7219 return
7220 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7221 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7223 if (modifier == EXPAND_INITIALIZER)
7224 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7226 if (target == 0)
7227 return
7228 convert_to_mode (mode, op0,
7229 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7230 else
7231 convert_move (target, op0,
7232 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7233 return target;
7235 case PLUS_EXPR:
7236 /* We come here from MINUS_EXPR when the second operand is a
7237 constant. */
7238 plus_expr:
7239 this_optab = ! unsignedp && flag_trapv
7240 && (GET_MODE_CLASS(mode) == MODE_INT)
7241 ? addv_optab : add_optab;
7243 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7244 something else, make sure we add the register to the constant and
7245 then to the other thing. This case can occur during strength
7246 reduction and doing it this way will produce better code if the
7247 frame pointer or argument pointer is eliminated.
7249 fold-const.c will ensure that the constant is always in the inner
7250 PLUS_EXPR, so the only case we need to do anything about is if
7251 sp, ap, or fp is our second argument, in which case we must swap
7252 the innermost first argument and our second argument. */
7254 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7255 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7256 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7257 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7258 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7259 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7261 tree t = TREE_OPERAND (exp, 1);
7263 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7264 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7267 /* If the result is to be ptr_mode and we are adding an integer to
7268 something, we might be forming a constant. So try to use
7269 plus_constant. If it produces a sum and we can't accept it,
7270 use force_operand. This allows P = &ARR[const] to generate
7271 efficient code on machines where a SYMBOL_REF is not a valid
7272 address.
7274 If this is an EXPAND_SUM call, always return the sum. */
7275 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7276 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7278 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7279 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7280 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7282 rtx constant_part;
7284 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7285 EXPAND_SUM);
7286 /* Use immed_double_const to ensure that the constant is
7287 truncated according to the mode of OP1, then sign extended
7288 to a HOST_WIDE_INT. Using the constant directly can result
7289 in non-canonical RTL in a 64x32 cross compile. */
7290 constant_part
7291 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7292 (HOST_WIDE_INT) 0,
7293 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7294 op1 = plus_constant (op1, INTVAL (constant_part));
7295 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7296 op1 = force_operand (op1, target);
7297 return op1;
7300 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7301 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7302 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7304 rtx constant_part;
7306 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7307 EXPAND_SUM);
7308 if (! CONSTANT_P (op0))
7310 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7311 VOIDmode, modifier);
7312 /* Don't go to both_summands if modifier
7313 says it's not right to return a PLUS. */
7314 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7315 goto binop2;
7316 goto both_summands;
7318 /* Use immed_double_const to ensure that the constant is
7319 truncated according to the mode of OP1, then sign extended
7320 to a HOST_WIDE_INT. Using the constant directly can result
7321 in non-canonical RTL in a 64x32 cross compile. */
7322 constant_part
7323 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7324 (HOST_WIDE_INT) 0,
7325 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7326 op0 = plus_constant (op0, INTVAL (constant_part));
7327 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7328 op0 = force_operand (op0, target);
7329 return op0;
7333 /* No sense saving up arithmetic to be done
7334 if it's all in the wrong mode to form part of an address.
7335 And force_operand won't know whether to sign-extend or
7336 zero-extend. */
7337 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7338 || mode != ptr_mode)
7339 goto binop;
7341 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7342 subtarget = 0;
7344 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7345 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
7347 both_summands:
7348 /* Make sure any term that's a sum with a constant comes last. */
7349 if (GET_CODE (op0) == PLUS
7350 && CONSTANT_P (XEXP (op0, 1)))
7352 temp = op0;
7353 op0 = op1;
7354 op1 = temp;
7356 /* If adding to a sum including a constant,
7357 associate it to put the constant outside. */
7358 if (GET_CODE (op1) == PLUS
7359 && CONSTANT_P (XEXP (op1, 1)))
7361 rtx constant_term = const0_rtx;
7363 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7364 if (temp != 0)
7365 op0 = temp;
7366 /* Ensure that MULT comes first if there is one. */
7367 else if (GET_CODE (op0) == MULT)
7368 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7369 else
7370 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7372 /* Let's also eliminate constants from op0 if possible. */
7373 op0 = eliminate_constant_term (op0, &constant_term);
7375 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7376 their sum should be a constant. Form it into OP1, since the
7377 result we want will then be OP0 + OP1. */
7379 temp = simplify_binary_operation (PLUS, mode, constant_term,
7380 XEXP (op1, 1));
7381 if (temp != 0)
7382 op1 = temp;
7383 else
7384 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7387 /* Put a constant term last and put a multiplication first. */
7388 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7389 temp = op1, op1 = op0, op0 = temp;
7391 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7392 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7394 case MINUS_EXPR:
7395 /* For initializers, we are allowed to return a MINUS of two
7396 symbolic constants. Here we handle all cases when both operands
7397 are constant. */
7398 /* Handle difference of two symbolic constants,
7399 for the sake of an initializer. */
7400 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7401 && really_constant_p (TREE_OPERAND (exp, 0))
7402 && really_constant_p (TREE_OPERAND (exp, 1)))
7404 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
7405 VOIDmode, ro_modifier);
7406 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7407 VOIDmode, ro_modifier);
7409 /* If the last operand is a CONST_INT, use plus_constant of
7410 the negated constant. Else make the MINUS. */
7411 if (GET_CODE (op1) == CONST_INT)
7412 return plus_constant (op0, - INTVAL (op1));
7413 else
7414 return gen_rtx_MINUS (mode, op0, op1);
7416 /* Convert A - const to A + (-const). */
7417 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7419 tree negated = fold (build1 (NEGATE_EXPR, type,
7420 TREE_OPERAND (exp, 1)));
7422 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7423 /* If we can't negate the constant in TYPE, leave it alone and
7424 expand_binop will negate it for us. We used to try to do it
7425 here in the signed version of TYPE, but that doesn't work
7426 on POINTER_TYPEs. */;
7427 else
7429 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7430 goto plus_expr;
7433 this_optab = ! unsignedp && flag_trapv
7434 && (GET_MODE_CLASS(mode) == MODE_INT)
7435 ? subv_optab : sub_optab;
7436 goto binop;
7438 case MULT_EXPR:
7439 /* If first operand is constant, swap them.
7440 Thus the following special case checks need only
7441 check the second operand. */
7442 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7444 register tree t1 = TREE_OPERAND (exp, 0);
7445 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7446 TREE_OPERAND (exp, 1) = t1;
7449 /* Attempt to return something suitable for generating an
7450 indexed address, for machines that support that. */
7452 if (modifier == EXPAND_SUM && mode == ptr_mode
7453 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7454 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7456 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7457 EXPAND_SUM);
7459 /* Apply distributive law if OP0 is x+c. */
7460 if (GET_CODE (op0) == PLUS
7461 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7462 return
7463 gen_rtx_PLUS
7464 (mode,
7465 gen_rtx_MULT
7466 (mode, XEXP (op0, 0),
7467 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7468 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7469 * INTVAL (XEXP (op0, 1))));
7471 if (GET_CODE (op0) != REG)
7472 op0 = force_operand (op0, NULL_RTX);
7473 if (GET_CODE (op0) != REG)
7474 op0 = copy_to_mode_reg (mode, op0);
7476 return
7477 gen_rtx_MULT (mode, op0,
7478 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7481 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7482 subtarget = 0;
7484 /* Check for multiplying things that have been extended
7485 from a narrower type. If this machine supports multiplying
7486 in that narrower type with a result in the desired type,
7487 do it that way, and avoid the explicit type-conversion. */
7488 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7489 && TREE_CODE (type) == INTEGER_TYPE
7490 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7491 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7492 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7493 && int_fits_type_p (TREE_OPERAND (exp, 1),
7494 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7495 /* Don't use a widening multiply if a shift will do. */
7496 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7497 > HOST_BITS_PER_WIDE_INT)
7498 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7500 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7501 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7503 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7504 /* If both operands are extended, they must either both
7505 be zero-extended or both be sign-extended. */
7506 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7508 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7510 enum machine_mode innermode
7511 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7512 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7513 ? smul_widen_optab : umul_widen_optab);
7514 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7515 ? umul_widen_optab : smul_widen_optab);
7516 if (mode == GET_MODE_WIDER_MODE (innermode))
7518 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7520 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7521 NULL_RTX, VOIDmode, 0);
7522 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7523 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7524 VOIDmode, 0);
7525 else
7526 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7527 NULL_RTX, VOIDmode, 0);
7528 goto binop2;
7530 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7531 && innermode == word_mode)
7533 rtx htem;
7534 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7535 NULL_RTX, VOIDmode, 0);
7536 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7537 op1 = convert_modes (innermode, mode,
7538 expand_expr (TREE_OPERAND (exp, 1),
7539 NULL_RTX, VOIDmode, 0),
7540 unsignedp);
7541 else
7542 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7543 NULL_RTX, VOIDmode, 0);
7544 temp = expand_binop (mode, other_optab, op0, op1, target,
7545 unsignedp, OPTAB_LIB_WIDEN);
7546 htem = expand_mult_highpart_adjust (innermode,
7547 gen_highpart (innermode, temp),
7548 op0, op1,
7549 gen_highpart (innermode, temp),
7550 unsignedp);
7551 emit_move_insn (gen_highpart (innermode, temp), htem);
7552 return temp;
7556 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7557 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7558 return expand_mult (mode, op0, op1, target, unsignedp);
7560 case TRUNC_DIV_EXPR:
7561 case FLOOR_DIV_EXPR:
7562 case CEIL_DIV_EXPR:
7563 case ROUND_DIV_EXPR:
7564 case EXACT_DIV_EXPR:
7565 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7566 subtarget = 0;
7567 /* Possible optimization: compute the dividend with EXPAND_SUM
7568 then if the divisor is constant can optimize the case
7569 where some terms of the dividend have coeffs divisible by it. */
7570 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7571 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7572 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7574 case RDIV_EXPR:
7575 this_optab = flodiv_optab;
7576 goto binop;
7578 case TRUNC_MOD_EXPR:
7579 case FLOOR_MOD_EXPR:
7580 case CEIL_MOD_EXPR:
7581 case ROUND_MOD_EXPR:
7582 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7583 subtarget = 0;
7584 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7585 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7586 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7588 case FIX_ROUND_EXPR:
7589 case FIX_FLOOR_EXPR:
7590 case FIX_CEIL_EXPR:
7591 abort (); /* Not used for C. */
7593 case FIX_TRUNC_EXPR:
7594 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7595 if (target == 0)
7596 target = gen_reg_rtx (mode);
7597 expand_fix (target, op0, unsignedp);
7598 return target;
7600 case FLOAT_EXPR:
7601 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7602 if (target == 0)
7603 target = gen_reg_rtx (mode);
7604 /* expand_float can't figure out what to do if FROM has VOIDmode.
7605 So give it the correct mode. With -O, cse will optimize this. */
7606 if (GET_MODE (op0) == VOIDmode)
7607 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7608 op0);
7609 expand_float (target, op0,
7610 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7611 return target;
7613 case NEGATE_EXPR:
7614 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7615 temp = expand_unop (mode,
7616 ! unsignedp && flag_trapv
7617 && (GET_MODE_CLASS(mode) == MODE_INT)
7618 ? negv_optab : neg_optab, op0, target, 0);
7619 if (temp == 0)
7620 abort ();
7621 return temp;
7623 case ABS_EXPR:
7624 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7626 /* Handle complex values specially. */
7627 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7628 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7629 return expand_complex_abs (mode, op0, target, unsignedp);
7631 /* Unsigned abs is simply the operand. Testing here means we don't
7632 risk generating incorrect code below. */
7633 if (TREE_UNSIGNED (type))
7634 return op0;
7636 return expand_abs (mode, op0, target, unsignedp,
7637 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7639 case MAX_EXPR:
7640 case MIN_EXPR:
7641 target = original_target;
7642 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7643 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7644 || GET_MODE (target) != mode
7645 || (GET_CODE (target) == REG
7646 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7647 target = gen_reg_rtx (mode);
7648 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7649 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7651 /* First try to do it with a special MIN or MAX instruction.
7652 If that does not win, use a conditional jump to select the proper
7653 value. */
7654 this_optab = (TREE_UNSIGNED (type)
7655 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7656 : (code == MIN_EXPR ? smin_optab : smax_optab));
7658 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7659 OPTAB_WIDEN);
7660 if (temp != 0)
7661 return temp;
7663 /* At this point, a MEM target is no longer useful; we will get better
7664 code without it. */
7666 if (GET_CODE (target) == MEM)
7667 target = gen_reg_rtx (mode);
7669 if (target != op0)
7670 emit_move_insn (target, op0);
7672 op0 = gen_label_rtx ();
7674 /* If this mode is an integer too wide to compare properly,
7675 compare word by word. Rely on cse to optimize constant cases. */
7676 if (GET_MODE_CLASS (mode) == MODE_INT
7677 && ! can_compare_p (GE, mode, ccp_jump))
7679 if (code == MAX_EXPR)
7680 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7681 target, op1, NULL_RTX, op0);
7682 else
7683 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7684 op1, target, NULL_RTX, op0);
7686 else
7688 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7689 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7690 unsignedp, mode, NULL_RTX, 0, NULL_RTX,
7691 op0);
7693 emit_move_insn (target, op1);
7694 emit_label (op0);
7695 return target;
7697 case BIT_NOT_EXPR:
7698 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7699 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7700 if (temp == 0)
7701 abort ();
7702 return temp;
7704 case FFS_EXPR:
7705 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7706 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7707 if (temp == 0)
7708 abort ();
7709 return temp;
7711 /* ??? Can optimize bitwise operations with one arg constant.
7712 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7713 and (a bitwise1 b) bitwise2 b (etc)
7714 but that is probably not worth while. */
7716 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7717 boolean values when we want in all cases to compute both of them. In
7718 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7719 as actual zero-or-1 values and then bitwise anding. In cases where
7720 there cannot be any side effects, better code would be made by
7721 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7722 how to recognize those cases. */
7724 case TRUTH_AND_EXPR:
7725 case BIT_AND_EXPR:
7726 this_optab = and_optab;
7727 goto binop;
7729 case TRUTH_OR_EXPR:
7730 case BIT_IOR_EXPR:
7731 this_optab = ior_optab;
7732 goto binop;
7734 case TRUTH_XOR_EXPR:
7735 case BIT_XOR_EXPR:
7736 this_optab = xor_optab;
7737 goto binop;
7739 case LSHIFT_EXPR:
7740 case RSHIFT_EXPR:
7741 case LROTATE_EXPR:
7742 case RROTATE_EXPR:
7743 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7744 subtarget = 0;
7745 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7746 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7747 unsignedp);
7749 /* Could determine the answer when only additive constants differ. Also,
7750 the addition of one can be handled by changing the condition. */
7751 case LT_EXPR:
7752 case LE_EXPR:
7753 case GT_EXPR:
7754 case GE_EXPR:
7755 case EQ_EXPR:
7756 case NE_EXPR:
7757 case UNORDERED_EXPR:
7758 case ORDERED_EXPR:
7759 case UNLT_EXPR:
7760 case UNLE_EXPR:
7761 case UNGT_EXPR:
7762 case UNGE_EXPR:
7763 case UNEQ_EXPR:
7764 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7765 if (temp != 0)
7766 return temp;
7768 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7769 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7770 && original_target
7771 && GET_CODE (original_target) == REG
7772 && (GET_MODE (original_target)
7773 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7775 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7776 VOIDmode, 0);
7778 if (temp != original_target)
7779 temp = copy_to_reg (temp);
7781 op1 = gen_label_rtx ();
7782 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7783 GET_MODE (temp), unsignedp, 0, op1);
7784 emit_move_insn (temp, const1_rtx);
7785 emit_label (op1);
7786 return temp;
7789 /* If no set-flag instruction, must generate a conditional
7790 store into a temporary variable. Drop through
7791 and handle this like && and ||. */
7793 case TRUTH_ANDIF_EXPR:
7794 case TRUTH_ORIF_EXPR:
7795 if (! ignore
7796 && (target == 0 || ! safe_from_p (target, exp, 1)
7797 /* Make sure we don't have a hard reg (such as function's return
7798 value) live across basic blocks, if not optimizing. */
7799 || (!optimize && GET_CODE (target) == REG
7800 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7801 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7803 if (target)
7804 emit_clr_insn (target);
7806 op1 = gen_label_rtx ();
7807 jumpifnot (exp, op1);
7809 if (target)
7810 emit_0_to_1_insn (target);
7812 emit_label (op1);
7813 return ignore ? const0_rtx : target;
7815 case TRUTH_NOT_EXPR:
7816 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7817 /* The parser is careful to generate TRUTH_NOT_EXPR
7818 only with operands that are always zero or one. */
7819 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7820 target, 1, OPTAB_LIB_WIDEN);
7821 if (temp == 0)
7822 abort ();
7823 return temp;
7825 case COMPOUND_EXPR:
7826 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7827 emit_queue ();
7828 return expand_expr (TREE_OPERAND (exp, 1),
7829 (ignore ? const0_rtx : target),
7830 VOIDmode, 0);
7832 case COND_EXPR:
7833 /* If we would have a "singleton" (see below) were it not for a
7834 conversion in each arm, bring that conversion back out. */
7835 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7836 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
7837 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
7838 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
7840 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
7841 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
7843 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
7844 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7845 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
7846 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
7847 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
7848 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7849 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
7850 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
7851 return expand_expr (build1 (NOP_EXPR, type,
7852 build (COND_EXPR, TREE_TYPE (true),
7853 TREE_OPERAND (exp, 0),
7854 true, false)),
7855 target, tmode, modifier);
7859 /* Note that COND_EXPRs whose type is a structure or union
7860 are required to be constructed to contain assignments of
7861 a temporary variable, so that we can evaluate them here
7862 for side effect only. If type is void, we must do likewise. */
7864 /* If an arm of the branch requires a cleanup,
7865 only that cleanup is performed. */
7867 tree singleton = 0;
7868 tree binary_op = 0, unary_op = 0;
7870 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
7871 convert it to our mode, if necessary. */
7872 if (integer_onep (TREE_OPERAND (exp, 1))
7873 && integer_zerop (TREE_OPERAND (exp, 2))
7874 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7876 if (ignore)
7878 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
7879 ro_modifier);
7880 return const0_rtx;
7883 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
7884 if (GET_MODE (op0) == mode)
7885 return op0;
7887 if (target == 0)
7888 target = gen_reg_rtx (mode);
7889 convert_move (target, op0, unsignedp);
7890 return target;
7893 /* Check for X ? A + B : A. If we have this, we can copy A to the
7894 output and conditionally add B. Similarly for unary operations.
7895 Don't do this if X has side-effects because those side effects
7896 might affect A or B and the "?" operation is a sequence point in
7897 ANSI. (operand_equal_p tests for side effects.) */
7899 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
7900 && operand_equal_p (TREE_OPERAND (exp, 2),
7901 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7902 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
7903 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
7904 && operand_equal_p (TREE_OPERAND (exp, 1),
7905 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7906 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
7907 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
7908 && operand_equal_p (TREE_OPERAND (exp, 2),
7909 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7910 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
7911 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
7912 && operand_equal_p (TREE_OPERAND (exp, 1),
7913 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7914 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
7916 /* If we are not to produce a result, we have no target. Otherwise,
7917 if a target was specified use it; it will not be used as an
7918 intermediate target unless it is safe. If no target, use a
7919 temporary. */
7921 if (ignore)
7922 temp = 0;
7923 else if (original_target
7924 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
7925 || (singleton && GET_CODE (original_target) == REG
7926 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
7927 && original_target == var_rtx (singleton)))
7928 && GET_MODE (original_target) == mode
7929 #ifdef HAVE_conditional_move
7930 && (! can_conditionally_move_p (mode)
7931 || GET_CODE (original_target) == REG
7932 || TREE_ADDRESSABLE (type))
7933 #endif
7934 && ! (GET_CODE (original_target) == MEM
7935 && MEM_VOLATILE_P (original_target)))
7936 temp = original_target;
7937 else if (TREE_ADDRESSABLE (type))
7938 abort ();
7939 else
7940 temp = assign_temp (type, 0, 0, 1);
7942 /* If we had X ? A + C : A, with C a constant power of 2, and we can
7943 do the test of X as a store-flag operation, do this as
7944 A + ((X != 0) << log C). Similarly for other simple binary
7945 operators. Only do for C == 1 if BRANCH_COST is low. */
7946 if (temp && singleton && binary_op
7947 && (TREE_CODE (binary_op) == PLUS_EXPR
7948 || TREE_CODE (binary_op) == MINUS_EXPR
7949 || TREE_CODE (binary_op) == BIT_IOR_EXPR
7950 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
7951 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
7952 : integer_onep (TREE_OPERAND (binary_op, 1)))
7953 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7955 rtx result;
7956 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
7957 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
7958 ? addv_optab : add_optab)
7959 : TREE_CODE (binary_op) == MINUS_EXPR
7960 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
7961 ? subv_optab : sub_optab)
7962 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
7963 : xor_optab);
7965 /* If we had X ? A : A + 1, do this as A + (X == 0).
7967 We have to invert the truth value here and then put it
7968 back later if do_store_flag fails. We cannot simply copy
7969 TREE_OPERAND (exp, 0) to another variable and modify that
7970 because invert_truthvalue can modify the tree pointed to
7971 by its argument. */
7972 if (singleton == TREE_OPERAND (exp, 1))
7973 TREE_OPERAND (exp, 0)
7974 = invert_truthvalue (TREE_OPERAND (exp, 0));
7976 result = do_store_flag (TREE_OPERAND (exp, 0),
7977 (safe_from_p (temp, singleton, 1)
7978 ? temp : NULL_RTX),
7979 mode, BRANCH_COST <= 1);
7981 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
7982 result = expand_shift (LSHIFT_EXPR, mode, result,
7983 build_int_2 (tree_log2
7984 (TREE_OPERAND
7985 (binary_op, 1)),
7987 (safe_from_p (temp, singleton, 1)
7988 ? temp : NULL_RTX), 0);
7990 if (result)
7992 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
7993 return expand_binop (mode, boptab, op1, result, temp,
7994 unsignedp, OPTAB_LIB_WIDEN);
7996 else if (singleton == TREE_OPERAND (exp, 1))
7997 TREE_OPERAND (exp, 0)
7998 = invert_truthvalue (TREE_OPERAND (exp, 0));
8001 do_pending_stack_adjust ();
8002 NO_DEFER_POP;
8003 op0 = gen_label_rtx ();
8005 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8007 if (temp != 0)
8009 /* If the target conflicts with the other operand of the
8010 binary op, we can't use it. Also, we can't use the target
8011 if it is a hard register, because evaluating the condition
8012 might clobber it. */
8013 if ((binary_op
8014 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8015 || (GET_CODE (temp) == REG
8016 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8017 temp = gen_reg_rtx (mode);
8018 store_expr (singleton, temp, 0);
8020 else
8021 expand_expr (singleton,
8022 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8023 if (singleton == TREE_OPERAND (exp, 1))
8024 jumpif (TREE_OPERAND (exp, 0), op0);
8025 else
8026 jumpifnot (TREE_OPERAND (exp, 0), op0);
8028 start_cleanup_deferral ();
8029 if (binary_op && temp == 0)
8030 /* Just touch the other operand. */
8031 expand_expr (TREE_OPERAND (binary_op, 1),
8032 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8033 else if (binary_op)
8034 store_expr (build (TREE_CODE (binary_op), type,
8035 make_tree (type, temp),
8036 TREE_OPERAND (binary_op, 1)),
8037 temp, 0);
8038 else
8039 store_expr (build1 (TREE_CODE (unary_op), type,
8040 make_tree (type, temp)),
8041 temp, 0);
8042 op1 = op0;
8044 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8045 comparison operator. If we have one of these cases, set the
8046 output to A, branch on A (cse will merge these two references),
8047 then set the output to FOO. */
8048 else if (temp
8049 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8050 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8051 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8052 TREE_OPERAND (exp, 1), 0)
8053 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8054 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8055 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8057 if (GET_CODE (temp) == REG
8058 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8059 temp = gen_reg_rtx (mode);
8060 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8061 jumpif (TREE_OPERAND (exp, 0), op0);
8063 start_cleanup_deferral ();
8064 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8065 op1 = op0;
8067 else if (temp
8068 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8069 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8070 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8071 TREE_OPERAND (exp, 2), 0)
8072 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8073 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8074 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8076 if (GET_CODE (temp) == REG
8077 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8078 temp = gen_reg_rtx (mode);
8079 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8080 jumpifnot (TREE_OPERAND (exp, 0), op0);
8082 start_cleanup_deferral ();
8083 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8084 op1 = op0;
8086 else
8088 op1 = gen_label_rtx ();
8089 jumpifnot (TREE_OPERAND (exp, 0), op0);
8091 start_cleanup_deferral ();
8093 /* One branch of the cond can be void, if it never returns. For
8094 example A ? throw : E */
8095 if (temp != 0
8096 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8097 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8098 else
8099 expand_expr (TREE_OPERAND (exp, 1),
8100 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8101 end_cleanup_deferral ();
8102 emit_queue ();
8103 emit_jump_insn (gen_jump (op1));
8104 emit_barrier ();
8105 emit_label (op0);
8106 start_cleanup_deferral ();
8107 if (temp != 0
8108 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8109 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8110 else
8111 expand_expr (TREE_OPERAND (exp, 2),
8112 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8115 end_cleanup_deferral ();
8117 emit_queue ();
8118 emit_label (op1);
8119 OK_DEFER_POP;
8121 return temp;
8124 case TARGET_EXPR:
8126 /* Something needs to be initialized, but we didn't know
8127 where that thing was when building the tree. For example,
8128 it could be the return value of a function, or a parameter
8129 to a function which lays down in the stack, or a temporary
8130 variable which must be passed by reference.
8132 We guarantee that the expression will either be constructed
8133 or copied into our original target. */
8135 tree slot = TREE_OPERAND (exp, 0);
8136 tree cleanups = NULL_TREE;
8137 tree exp1;
8139 if (TREE_CODE (slot) != VAR_DECL)
8140 abort ();
8142 if (! ignore)
8143 target = original_target;
8145 /* Set this here so that if we get a target that refers to a
8146 register variable that's already been used, put_reg_into_stack
8147 knows that it should fix up those uses. */
8148 TREE_USED (slot) = 1;
8150 if (target == 0)
8152 if (DECL_RTL (slot) != 0)
8154 target = DECL_RTL (slot);
8155 /* If we have already expanded the slot, so don't do
8156 it again. (mrs) */
8157 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8158 return target;
8160 else
8162 target = assign_temp (type, 2, 0, 1);
8163 /* All temp slots at this level must not conflict. */
8164 preserve_temp_slots (target);
8165 DECL_RTL (slot) = target;
8166 if (TREE_ADDRESSABLE (slot))
8167 put_var_into_stack (slot);
8169 /* Since SLOT is not known to the called function
8170 to belong to its stack frame, we must build an explicit
8171 cleanup. This case occurs when we must build up a reference
8172 to pass the reference as an argument. In this case,
8173 it is very likely that such a reference need not be
8174 built here. */
8176 if (TREE_OPERAND (exp, 2) == 0)
8177 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
8178 cleanups = TREE_OPERAND (exp, 2);
8181 else
8183 /* This case does occur, when expanding a parameter which
8184 needs to be constructed on the stack. The target
8185 is the actual stack address that we want to initialize.
8186 The function we call will perform the cleanup in this case. */
8188 /* If we have already assigned it space, use that space,
8189 not target that we were passed in, as our target
8190 parameter is only a hint. */
8191 if (DECL_RTL (slot) != 0)
8193 target = DECL_RTL (slot);
8194 /* If we have already expanded the slot, so don't do
8195 it again. (mrs) */
8196 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8197 return target;
8199 else
8201 DECL_RTL (slot) = target;
8202 /* If we must have an addressable slot, then make sure that
8203 the RTL that we just stored in slot is OK. */
8204 if (TREE_ADDRESSABLE (slot))
8205 put_var_into_stack (slot);
8209 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8210 /* Mark it as expanded. */
8211 TREE_OPERAND (exp, 1) = NULL_TREE;
8213 store_expr (exp1, target, 0);
8215 expand_decl_cleanup (NULL_TREE, cleanups);
8217 return target;
8220 case INIT_EXPR:
8222 tree lhs = TREE_OPERAND (exp, 0);
8223 tree rhs = TREE_OPERAND (exp, 1);
8224 tree noncopied_parts = 0;
8225 tree lhs_type = TREE_TYPE (lhs);
8227 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8228 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
8229 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
8230 TYPE_NONCOPIED_PARTS (lhs_type));
8231 while (noncopied_parts != 0)
8233 expand_assignment (TREE_VALUE (noncopied_parts),
8234 TREE_PURPOSE (noncopied_parts), 0, 0);
8235 noncopied_parts = TREE_CHAIN (noncopied_parts);
8237 return temp;
8240 case MODIFY_EXPR:
8242 /* If lhs is complex, expand calls in rhs before computing it.
8243 That's so we don't compute a pointer and save it over a call.
8244 If lhs is simple, compute it first so we can give it as a
8245 target if the rhs is just a call. This avoids an extra temp and copy
8246 and that prevents a partial-subsumption which makes bad code.
8247 Actually we could treat component_ref's of vars like vars. */
8249 tree lhs = TREE_OPERAND (exp, 0);
8250 tree rhs = TREE_OPERAND (exp, 1);
8251 tree noncopied_parts = 0;
8252 tree lhs_type = TREE_TYPE (lhs);
8254 temp = 0;
8256 if (TREE_CODE (lhs) != VAR_DECL
8257 && TREE_CODE (lhs) != RESULT_DECL
8258 && TREE_CODE (lhs) != PARM_DECL
8259 && ! (TREE_CODE (lhs) == INDIRECT_REF
8260 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
8262 /* Check for |= or &= of a bitfield of size one into another bitfield
8263 of size 1. In this case, (unless we need the result of the
8264 assignment) we can do this more efficiently with a
8265 test followed by an assignment, if necessary.
8267 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8268 things change so we do, this code should be enhanced to
8269 support it. */
8270 if (ignore
8271 && TREE_CODE (lhs) == COMPONENT_REF
8272 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8273 || TREE_CODE (rhs) == BIT_AND_EXPR)
8274 && TREE_OPERAND (rhs, 0) == lhs
8275 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8276 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8277 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8279 rtx label = gen_label_rtx ();
8281 do_jump (TREE_OPERAND (rhs, 1),
8282 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8283 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8284 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8285 (TREE_CODE (rhs) == BIT_IOR_EXPR
8286 ? integer_one_node
8287 : integer_zero_node)),
8288 0, 0);
8289 do_pending_stack_adjust ();
8290 emit_label (label);
8291 return const0_rtx;
8294 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
8295 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
8296 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
8297 TYPE_NONCOPIED_PARTS (lhs_type));
8299 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8300 while (noncopied_parts != 0)
8302 expand_assignment (TREE_PURPOSE (noncopied_parts),
8303 TREE_VALUE (noncopied_parts), 0, 0);
8304 noncopied_parts = TREE_CHAIN (noncopied_parts);
8306 return temp;
8309 case RETURN_EXPR:
8310 if (!TREE_OPERAND (exp, 0))
8311 expand_null_return ();
8312 else
8313 expand_return (TREE_OPERAND (exp, 0));
8314 return const0_rtx;
8316 case PREINCREMENT_EXPR:
8317 case PREDECREMENT_EXPR:
8318 return expand_increment (exp, 0, ignore);
8320 case POSTINCREMENT_EXPR:
8321 case POSTDECREMENT_EXPR:
8322 /* Faster to treat as pre-increment if result is not used. */
8323 return expand_increment (exp, ! ignore, ignore);
8325 case ADDR_EXPR:
8326 /* If nonzero, TEMP will be set to the address of something that might
8327 be a MEM corresponding to a stack slot. */
8328 temp = 0;
8330 /* Are we taking the address of a nested function? */
8331 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8332 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8333 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8334 && ! TREE_STATIC (exp))
8336 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8337 op0 = force_operand (op0, target);
8339 /* If we are taking the address of something erroneous, just
8340 return a zero. */
8341 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8342 return const0_rtx;
8343 else
8345 /* We make sure to pass const0_rtx down if we came in with
8346 ignore set, to avoid doing the cleanups twice for something. */
8347 op0 = expand_expr (TREE_OPERAND (exp, 0),
8348 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8349 (modifier == EXPAND_INITIALIZER
8350 ? modifier : EXPAND_CONST_ADDRESS));
8352 /* If we are going to ignore the result, OP0 will have been set
8353 to const0_rtx, so just return it. Don't get confused and
8354 think we are taking the address of the constant. */
8355 if (ignore)
8356 return op0;
8358 op0 = protect_from_queue (op0, 0);
8360 /* We would like the object in memory. If it is a constant, we can
8361 have it be statically allocated into memory. For a non-constant,
8362 we need to allocate some memory and store the value into it. */
8364 if (CONSTANT_P (op0))
8365 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8366 op0);
8367 else if (GET_CODE (op0) == MEM)
8369 mark_temp_addr_taken (op0);
8370 temp = XEXP (op0, 0);
8373 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8374 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8375 || GET_CODE (op0) == PARALLEL)
8377 /* If this object is in a register, it must be not
8378 be BLKmode. */
8379 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8380 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8382 mark_temp_addr_taken (memloc);
8383 if (GET_CODE (op0) == PARALLEL)
8384 /* Handle calls that pass values in multiple non-contiguous
8385 locations. The Irix 6 ABI has examples of this. */
8386 emit_group_store (memloc, op0,
8387 int_size_in_bytes (inner_type),
8388 TYPE_ALIGN (inner_type));
8389 else
8390 emit_move_insn (memloc, op0);
8391 op0 = memloc;
8394 if (GET_CODE (op0) != MEM)
8395 abort ();
8397 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8399 temp = XEXP (op0, 0);
8400 #ifdef POINTERS_EXTEND_UNSIGNED
8401 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8402 && mode == ptr_mode)
8403 temp = convert_memory_address (ptr_mode, temp);
8404 #endif
8405 return temp;
8408 op0 = force_operand (XEXP (op0, 0), target);
8411 if (flag_force_addr && GET_CODE (op0) != REG)
8412 op0 = force_reg (Pmode, op0);
8414 if (GET_CODE (op0) == REG
8415 && ! REG_USERVAR_P (op0))
8416 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8418 /* If we might have had a temp slot, add an equivalent address
8419 for it. */
8420 if (temp != 0)
8421 update_temp_slot_address (temp, op0);
8423 #ifdef POINTERS_EXTEND_UNSIGNED
8424 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8425 && mode == ptr_mode)
8426 op0 = convert_memory_address (ptr_mode, op0);
8427 #endif
8429 return op0;
8431 case ENTRY_VALUE_EXPR:
8432 abort ();
8434 /* COMPLEX type for Extended Pascal & Fortran */
8435 case COMPLEX_EXPR:
8437 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8438 rtx insns;
8440 /* Get the rtx code of the operands. */
8441 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8442 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8444 if (! target)
8445 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8447 start_sequence ();
8449 /* Move the real (op0) and imaginary (op1) parts to their location. */
8450 emit_move_insn (gen_realpart (mode, target), op0);
8451 emit_move_insn (gen_imagpart (mode, target), op1);
8453 insns = get_insns ();
8454 end_sequence ();
8456 /* Complex construction should appear as a single unit. */
8457 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8458 each with a separate pseudo as destination.
8459 It's not correct for flow to treat them as a unit. */
8460 if (GET_CODE (target) != CONCAT)
8461 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8462 else
8463 emit_insns (insns);
8465 return target;
8468 case REALPART_EXPR:
8469 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8470 return gen_realpart (mode, op0);
8472 case IMAGPART_EXPR:
8473 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8474 return gen_imagpart (mode, op0);
8476 case CONJ_EXPR:
8478 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8479 rtx imag_t;
8480 rtx insns;
8482 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8484 if (! target)
8485 target = gen_reg_rtx (mode);
8487 start_sequence ();
8489 /* Store the realpart and the negated imagpart to target. */
8490 emit_move_insn (gen_realpart (partmode, target),
8491 gen_realpart (partmode, op0));
8493 imag_t = gen_imagpart (partmode, target);
8494 temp = expand_unop (partmode,
8495 ! unsignedp && flag_trapv
8496 && (GET_MODE_CLASS(partmode) == MODE_INT)
8497 ? negv_optab : neg_optab,
8498 gen_imagpart (partmode, op0), imag_t, 0);
8499 if (temp != imag_t)
8500 emit_move_insn (imag_t, temp);
8502 insns = get_insns ();
8503 end_sequence ();
8505 /* Conjugate should appear as a single unit
8506 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8507 each with a separate pseudo as destination.
8508 It's not correct for flow to treat them as a unit. */
8509 if (GET_CODE (target) != CONCAT)
8510 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8511 else
8512 emit_insns (insns);
8514 return target;
8517 case TRY_CATCH_EXPR:
8519 tree handler = TREE_OPERAND (exp, 1);
8521 expand_eh_region_start ();
8523 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8525 expand_eh_region_end (handler);
8527 return op0;
8530 case TRY_FINALLY_EXPR:
8532 tree try_block = TREE_OPERAND (exp, 0);
8533 tree finally_block = TREE_OPERAND (exp, 1);
8534 rtx finally_label = gen_label_rtx ();
8535 rtx done_label = gen_label_rtx ();
8536 rtx return_link = gen_reg_rtx (Pmode);
8537 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8538 (tree) finally_label, (tree) return_link);
8539 TREE_SIDE_EFFECTS (cleanup) = 1;
8541 /* Start a new binding layer that will keep track of all cleanup
8542 actions to be performed. */
8543 expand_start_bindings (2);
8545 target_temp_slot_level = temp_slot_level;
8547 expand_decl_cleanup (NULL_TREE, cleanup);
8548 op0 = expand_expr (try_block, target, tmode, modifier);
8550 preserve_temp_slots (op0);
8551 expand_end_bindings (NULL_TREE, 0, 0);
8552 emit_jump (done_label);
8553 emit_label (finally_label);
8554 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8555 emit_indirect_jump (return_link);
8556 emit_label (done_label);
8557 return op0;
8560 case GOTO_SUBROUTINE_EXPR:
8562 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8563 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8564 rtx return_address = gen_label_rtx ();
8565 emit_move_insn (return_link,
8566 gen_rtx_LABEL_REF (Pmode, return_address));
8567 emit_jump (subr);
8568 emit_label (return_address);
8569 return const0_rtx;
8572 case POPDCC_EXPR:
8574 rtx dcc = get_dynamic_cleanup_chain ();
8575 emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc)));
8576 return const0_rtx;
8579 case POPDHC_EXPR:
8581 rtx dhc = get_dynamic_handler_chain ();
8582 emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc)));
8583 return const0_rtx;
8586 case VA_ARG_EXPR:
8587 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8589 default:
8590 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8593 /* Here to do an ordinary binary operator, generating an instruction
8594 from the optab already placed in `this_optab'. */
8595 binop:
8596 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8597 subtarget = 0;
8598 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8599 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8600 binop2:
8601 temp = expand_binop (mode, this_optab, op0, op1, target,
8602 unsignedp, OPTAB_LIB_WIDEN);
8603 if (temp == 0)
8604 abort ();
8605 return temp;
8608 /* Similar to expand_expr, except that we don't specify a target, target
8609 mode, or modifier and we return the alignment of the inner type. This is
8610 used in cases where it is not necessary to align the result to the
8611 alignment of its type as long as we know the alignment of the result, for
8612 example for comparisons of BLKmode values. */
8614 static rtx
8615 expand_expr_unaligned (exp, palign)
8616 register tree exp;
8617 unsigned int *palign;
8619 register rtx op0;
8620 tree type = TREE_TYPE (exp);
8621 register enum machine_mode mode = TYPE_MODE (type);
8623 /* Default the alignment we return to that of the type. */
8624 *palign = TYPE_ALIGN (type);
8626 /* The only cases in which we do anything special is if the resulting mode
8627 is BLKmode. */
8628 if (mode != BLKmode)
8629 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8631 switch (TREE_CODE (exp))
8633 case CONVERT_EXPR:
8634 case NOP_EXPR:
8635 case NON_LVALUE_EXPR:
8636 /* Conversions between BLKmode values don't change the underlying
8637 alignment or value. */
8638 if (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == BLKmode)
8639 return expand_expr_unaligned (TREE_OPERAND (exp, 0), palign);
8640 break;
8642 case ARRAY_REF:
8643 /* Much of the code for this case is copied directly from expand_expr.
8644 We need to duplicate it here because we will do something different
8645 in the fall-through case, so we need to handle the same exceptions
8646 it does. */
8648 tree array = TREE_OPERAND (exp, 0);
8649 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
8650 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
8651 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
8652 HOST_WIDE_INT i;
8654 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
8655 abort ();
8657 /* Optimize the special-case of a zero lower bound.
8659 We convert the low_bound to sizetype to avoid some problems
8660 with constant folding. (E.g. suppose the lower bound is 1,
8661 and its mode is QI. Without the conversion, (ARRAY
8662 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8663 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
8665 if (! integer_zerop (low_bound))
8666 index = size_diffop (index, convert (sizetype, low_bound));
8668 /* If this is a constant index into a constant array,
8669 just get the value from the array. Handle both the cases when
8670 we have an explicit constructor and when our operand is a variable
8671 that was declared const. */
8673 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
8674 && host_integerp (index, 0)
8675 && 0 > compare_tree_int (index,
8676 list_length (CONSTRUCTOR_ELTS
8677 (TREE_OPERAND (exp, 0)))))
8679 tree elem;
8681 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
8682 i = tree_low_cst (index, 0);
8683 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
8686 if (elem)
8687 return expand_expr_unaligned (fold (TREE_VALUE (elem)), palign);
8690 else if (optimize >= 1
8691 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8692 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8693 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
8695 if (TREE_CODE (index) == INTEGER_CST)
8697 tree init = DECL_INITIAL (array);
8699 if (TREE_CODE (init) == CONSTRUCTOR)
8701 tree elem;
8703 for (elem = CONSTRUCTOR_ELTS (init);
8704 ! tree_int_cst_equal (TREE_PURPOSE (elem), index);
8705 elem = TREE_CHAIN (elem))
8708 if (elem)
8709 return expand_expr_unaligned (fold (TREE_VALUE (elem)),
8710 palign);
8715 /* Fall through. */
8717 case COMPONENT_REF:
8718 case BIT_FIELD_REF:
8719 /* If the operand is a CONSTRUCTOR, we can just extract the
8720 appropriate field if it is present. Don't do this if we have
8721 already written the data since we want to refer to that copy
8722 and varasm.c assumes that's what we'll do. */
8723 if (TREE_CODE (exp) != ARRAY_REF
8724 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8725 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
8727 tree elt;
8729 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
8730 elt = TREE_CHAIN (elt))
8731 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
8732 /* Note that unlike the case in expand_expr, we know this is
8733 BLKmode and hence not an integer. */
8734 return expand_expr_unaligned (TREE_VALUE (elt), palign);
8738 enum machine_mode mode1;
8739 HOST_WIDE_INT bitsize, bitpos;
8740 tree offset;
8741 int volatilep = 0;
8742 unsigned int alignment;
8743 int unsignedp;
8744 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
8745 &mode1, &unsignedp, &volatilep,
8746 &alignment);
8748 /* If we got back the original object, something is wrong. Perhaps
8749 we are evaluating an expression too early. In any event, don't
8750 infinitely recurse. */
8751 if (tem == exp)
8752 abort ();
8754 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8756 /* If this is a constant, put it into a register if it is a
8757 legitimate constant and OFFSET is 0 and memory if it isn't. */
8758 if (CONSTANT_P (op0))
8760 enum machine_mode inner_mode = TYPE_MODE (TREE_TYPE (tem));
8762 if (inner_mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
8763 && offset == 0)
8764 op0 = force_reg (inner_mode, op0);
8765 else
8766 op0 = validize_mem (force_const_mem (inner_mode, op0));
8769 if (offset != 0)
8771 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
8773 /* If this object is in a register, put it into memory.
8774 This case can't occur in C, but can in Ada if we have
8775 unchecked conversion of an expression from a scalar type to
8776 an array or record type. */
8777 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8778 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8780 rtx memloc = assign_temp (TREE_TYPE (tem), 1, 1, 1);
8782 mark_temp_addr_taken (memloc);
8783 emit_move_insn (memloc, op0);
8784 op0 = memloc;
8787 if (GET_CODE (op0) != MEM)
8788 abort ();
8790 if (GET_MODE (offset_rtx) != ptr_mode)
8792 #ifdef POINTERS_EXTEND_UNSIGNED
8793 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
8794 #else
8795 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
8796 #endif
8799 op0 = change_address (op0, VOIDmode,
8800 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
8801 force_reg (ptr_mode,
8802 offset_rtx)));
8805 /* Don't forget about volatility even if this is a bitfield. */
8806 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
8808 op0 = copy_rtx (op0);
8809 MEM_VOLATILE_P (op0) = 1;
8812 /* Check the access. */
8813 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
8815 rtx to;
8816 int size;
8818 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
8819 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
8821 /* Check the access right of the pointer. */
8822 in_check_memory_usage = 1;
8823 if (size > BITS_PER_UNIT)
8824 emit_library_call (chkr_check_addr_libfunc,
8825 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
8826 to, ptr_mode, GEN_INT (size / BITS_PER_UNIT),
8827 TYPE_MODE (sizetype),
8828 GEN_INT (MEMORY_USE_RO),
8829 TYPE_MODE (integer_type_node));
8830 in_check_memory_usage = 0;
8833 /* In cases where an aligned union has an unaligned object
8834 as a field, we might be extracting a BLKmode value from
8835 an integer-mode (e.g., SImode) object. Handle this case
8836 by doing the extract into an object as wide as the field
8837 (which we know to be the width of a basic mode), then
8838 storing into memory, and changing the mode to BLKmode.
8839 If we ultimately want the address (EXPAND_CONST_ADDRESS or
8840 EXPAND_INITIALIZER), then we must not copy to a temporary. */
8841 if (mode1 == VOIDmode
8842 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8843 || (SLOW_UNALIGNED_ACCESS (mode1, alignment)
8844 && (TYPE_ALIGN (type) > alignment
8845 || bitpos % TYPE_ALIGN (type) != 0)))
8847 enum machine_mode ext_mode = mode_for_size (bitsize, MODE_INT, 1);
8849 if (ext_mode == BLKmode)
8851 /* In this case, BITPOS must start at a byte boundary. */
8852 if (GET_CODE (op0) != MEM
8853 || bitpos % BITS_PER_UNIT != 0)
8854 abort ();
8856 op0 = change_address (op0, VOIDmode,
8857 plus_constant (XEXP (op0, 0),
8858 bitpos / BITS_PER_UNIT));
8860 else
8862 rtx new = assign_stack_temp (ext_mode,
8863 bitsize / BITS_PER_UNIT, 0);
8865 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
8866 unsignedp, NULL_RTX, ext_mode,
8867 ext_mode, alignment,
8868 int_size_in_bytes (TREE_TYPE (tem)));
8870 /* If the result is a record type and BITSIZE is narrower than
8871 the mode of OP0, an integral mode, and this is a big endian
8872 machine, we must put the field into the high-order bits. */
8873 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
8874 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
8875 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
8876 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
8877 size_int (GET_MODE_BITSIZE
8878 (GET_MODE (op0))
8879 - bitsize),
8880 op0, 1);
8882 emit_move_insn (new, op0);
8883 op0 = copy_rtx (new);
8884 PUT_MODE (op0, BLKmode);
8887 else
8888 /* Get a reference to just this component. */
8889 op0 = change_address (op0, mode1,
8890 plus_constant (XEXP (op0, 0),
8891 (bitpos / BITS_PER_UNIT)));
8893 MEM_ALIAS_SET (op0) = get_alias_set (exp);
8895 /* Adjust the alignment in case the bit position is not
8896 a multiple of the alignment of the inner object. */
8897 while (bitpos % alignment != 0)
8898 alignment >>= 1;
8900 if (GET_CODE (XEXP (op0, 0)) == REG)
8901 mark_reg_pointer (XEXP (op0, 0), alignment);
8903 MEM_IN_STRUCT_P (op0) = 1;
8904 MEM_VOLATILE_P (op0) |= volatilep;
8906 *palign = alignment;
8907 return op0;
8910 default:
8911 break;
8915 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8918 /* Return the tree node if a ARG corresponds to a string constant or zero
8919 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
8920 in bytes within the string that ARG is accessing. The type of the
8921 offset will be `sizetype'. */
8923 tree
8924 string_constant (arg, ptr_offset)
8925 tree arg;
8926 tree *ptr_offset;
8928 STRIP_NOPS (arg);
8930 if (TREE_CODE (arg) == ADDR_EXPR
8931 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8933 *ptr_offset = size_zero_node;
8934 return TREE_OPERAND (arg, 0);
8936 else if (TREE_CODE (arg) == PLUS_EXPR)
8938 tree arg0 = TREE_OPERAND (arg, 0);
8939 tree arg1 = TREE_OPERAND (arg, 1);
8941 STRIP_NOPS (arg0);
8942 STRIP_NOPS (arg1);
8944 if (TREE_CODE (arg0) == ADDR_EXPR
8945 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
8947 *ptr_offset = convert (sizetype, arg1);
8948 return TREE_OPERAND (arg0, 0);
8950 else if (TREE_CODE (arg1) == ADDR_EXPR
8951 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
8953 *ptr_offset = convert (sizetype, arg0);
8954 return TREE_OPERAND (arg1, 0);
8958 return 0;
8961 /* Expand code for a post- or pre- increment or decrement
8962 and return the RTX for the result.
8963 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
8965 static rtx
8966 expand_increment (exp, post, ignore)
8967 register tree exp;
8968 int post, ignore;
8970 register rtx op0, op1;
8971 register rtx temp, value;
8972 register tree incremented = TREE_OPERAND (exp, 0);
8973 optab this_optab = add_optab;
8974 int icode;
8975 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
8976 int op0_is_copy = 0;
8977 int single_insn = 0;
8978 /* 1 means we can't store into OP0 directly,
8979 because it is a subreg narrower than a word,
8980 and we don't dare clobber the rest of the word. */
8981 int bad_subreg = 0;
8983 /* Stabilize any component ref that might need to be
8984 evaluated more than once below. */
8985 if (!post
8986 || TREE_CODE (incremented) == BIT_FIELD_REF
8987 || (TREE_CODE (incremented) == COMPONENT_REF
8988 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
8989 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
8990 incremented = stabilize_reference (incremented);
8991 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
8992 ones into save exprs so that they don't accidentally get evaluated
8993 more than once by the code below. */
8994 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
8995 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
8996 incremented = save_expr (incremented);
8998 /* Compute the operands as RTX.
8999 Note whether OP0 is the actual lvalue or a copy of it:
9000 I believe it is a copy iff it is a register or subreg
9001 and insns were generated in computing it. */
9003 temp = get_last_insn ();
9004 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
9006 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9007 in place but instead must do sign- or zero-extension during assignment,
9008 so we copy it into a new register and let the code below use it as
9009 a copy.
9011 Note that we can safely modify this SUBREG since it is know not to be
9012 shared (it was made by the expand_expr call above). */
9014 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9016 if (post)
9017 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9018 else
9019 bad_subreg = 1;
9021 else if (GET_CODE (op0) == SUBREG
9022 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9024 /* We cannot increment this SUBREG in place. If we are
9025 post-incrementing, get a copy of the old value. Otherwise,
9026 just mark that we cannot increment in place. */
9027 if (post)
9028 op0 = copy_to_reg (op0);
9029 else
9030 bad_subreg = 1;
9033 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9034 && temp != get_last_insn ());
9035 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
9036 EXPAND_MEMORY_USE_BAD);
9038 /* Decide whether incrementing or decrementing. */
9039 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9040 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9041 this_optab = sub_optab;
9043 /* Convert decrement by a constant into a negative increment. */
9044 if (this_optab == sub_optab
9045 && GET_CODE (op1) == CONST_INT)
9047 op1 = GEN_INT (-INTVAL (op1));
9048 this_optab = add_optab;
9051 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9052 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9054 /* For a preincrement, see if we can do this with a single instruction. */
9055 if (!post)
9057 icode = (int) this_optab->handlers[(int) mode].insn_code;
9058 if (icode != (int) CODE_FOR_nothing
9059 /* Make sure that OP0 is valid for operands 0 and 1
9060 of the insn we want to queue. */
9061 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9062 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9063 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9064 single_insn = 1;
9067 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9068 then we cannot just increment OP0. We must therefore contrive to
9069 increment the original value. Then, for postincrement, we can return
9070 OP0 since it is a copy of the old value. For preincrement, expand here
9071 unless we can do it with a single insn.
9073 Likewise if storing directly into OP0 would clobber high bits
9074 we need to preserve (bad_subreg). */
9075 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9077 /* This is the easiest way to increment the value wherever it is.
9078 Problems with multiple evaluation of INCREMENTED are prevented
9079 because either (1) it is a component_ref or preincrement,
9080 in which case it was stabilized above, or (2) it is an array_ref
9081 with constant index in an array in a register, which is
9082 safe to reevaluate. */
9083 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9084 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9085 ? MINUS_EXPR : PLUS_EXPR),
9086 TREE_TYPE (exp),
9087 incremented,
9088 TREE_OPERAND (exp, 1));
9090 while (TREE_CODE (incremented) == NOP_EXPR
9091 || TREE_CODE (incremented) == CONVERT_EXPR)
9093 newexp = convert (TREE_TYPE (incremented), newexp);
9094 incremented = TREE_OPERAND (incremented, 0);
9097 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9098 return post ? op0 : temp;
9101 if (post)
9103 /* We have a true reference to the value in OP0.
9104 If there is an insn to add or subtract in this mode, queue it.
9105 Queueing the increment insn avoids the register shuffling
9106 that often results if we must increment now and first save
9107 the old value for subsequent use. */
9109 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9110 op0 = stabilize (op0);
9111 #endif
9113 icode = (int) this_optab->handlers[(int) mode].insn_code;
9114 if (icode != (int) CODE_FOR_nothing
9115 /* Make sure that OP0 is valid for operands 0 and 1
9116 of the insn we want to queue. */
9117 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9118 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9120 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9121 op1 = force_reg (mode, op1);
9123 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9125 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9127 rtx addr = (general_operand (XEXP (op0, 0), mode)
9128 ? force_reg (Pmode, XEXP (op0, 0))
9129 : copy_to_reg (XEXP (op0, 0)));
9130 rtx temp, result;
9132 op0 = change_address (op0, VOIDmode, addr);
9133 temp = force_reg (GET_MODE (op0), op0);
9134 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9135 op1 = force_reg (mode, op1);
9137 /* The increment queue is LIFO, thus we have to `queue'
9138 the instructions in reverse order. */
9139 enqueue_insn (op0, gen_move_insn (op0, temp));
9140 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9141 return result;
9145 /* Preincrement, or we can't increment with one simple insn. */
9146 if (post)
9147 /* Save a copy of the value before inc or dec, to return it later. */
9148 temp = value = copy_to_reg (op0);
9149 else
9150 /* Arrange to return the incremented value. */
9151 /* Copy the rtx because expand_binop will protect from the queue,
9152 and the results of that would be invalid for us to return
9153 if our caller does emit_queue before using our result. */
9154 temp = copy_rtx (value = op0);
9156 /* Increment however we can. */
9157 op1 = expand_binop (mode, this_optab, value, op1,
9158 current_function_check_memory_usage ? NULL_RTX : op0,
9159 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9160 /* Make sure the value is stored into OP0. */
9161 if (op1 != op0)
9162 emit_move_insn (op0, op1);
9164 return temp;
9167 /* At the start of a function, record that we have no previously-pushed
9168 arguments waiting to be popped. */
9170 void
9171 init_pending_stack_adjust ()
9173 pending_stack_adjust = 0;
9176 /* When exiting from function, if safe, clear out any pending stack adjust
9177 so the adjustment won't get done.
9179 Note, if the current function calls alloca, then it must have a
9180 frame pointer regardless of the value of flag_omit_frame_pointer. */
9182 void
9183 clear_pending_stack_adjust ()
9185 #ifdef EXIT_IGNORE_STACK
9186 if (optimize > 0
9187 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9188 && EXIT_IGNORE_STACK
9189 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9190 && ! flag_inline_functions)
9192 stack_pointer_delta -= pending_stack_adjust,
9193 pending_stack_adjust = 0;
9195 #endif
9198 /* Pop any previously-pushed arguments that have not been popped yet. */
9200 void
9201 do_pending_stack_adjust ()
9203 if (inhibit_defer_pop == 0)
9205 if (pending_stack_adjust != 0)
9206 adjust_stack (GEN_INT (pending_stack_adjust));
9207 pending_stack_adjust = 0;
9211 /* Expand conditional expressions. */
9213 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9214 LABEL is an rtx of code CODE_LABEL, in this function and all the
9215 functions here. */
9217 void
9218 jumpifnot (exp, label)
9219 tree exp;
9220 rtx label;
9222 do_jump (exp, label, NULL_RTX);
9225 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9227 void
9228 jumpif (exp, label)
9229 tree exp;
9230 rtx label;
9232 do_jump (exp, NULL_RTX, label);
9235 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9236 the result is zero, or IF_TRUE_LABEL if the result is one.
9237 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9238 meaning fall through in that case.
9240 do_jump always does any pending stack adjust except when it does not
9241 actually perform a jump. An example where there is no jump
9242 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9244 This function is responsible for optimizing cases such as
9245 &&, || and comparison operators in EXP. */
9247 void
9248 do_jump (exp, if_false_label, if_true_label)
9249 tree exp;
9250 rtx if_false_label, if_true_label;
9252 register enum tree_code code = TREE_CODE (exp);
9253 /* Some cases need to create a label to jump to
9254 in order to properly fall through.
9255 These cases set DROP_THROUGH_LABEL nonzero. */
9256 rtx drop_through_label = 0;
9257 rtx temp;
9258 int i;
9259 tree type;
9260 enum machine_mode mode;
9262 #ifdef MAX_INTEGER_COMPUTATION_MODE
9263 check_max_integer_computation_mode (exp);
9264 #endif
9266 emit_queue ();
9268 switch (code)
9270 case ERROR_MARK:
9271 break;
9273 case INTEGER_CST:
9274 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9275 if (temp)
9276 emit_jump (temp);
9277 break;
9279 #if 0
9280 /* This is not true with #pragma weak */
9281 case ADDR_EXPR:
9282 /* The address of something can never be zero. */
9283 if (if_true_label)
9284 emit_jump (if_true_label);
9285 break;
9286 #endif
9288 case NOP_EXPR:
9289 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9290 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9291 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
9292 goto normal;
9293 case CONVERT_EXPR:
9294 /* If we are narrowing the operand, we have to do the compare in the
9295 narrower mode. */
9296 if ((TYPE_PRECISION (TREE_TYPE (exp))
9297 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9298 goto normal;
9299 case NON_LVALUE_EXPR:
9300 case REFERENCE_EXPR:
9301 case ABS_EXPR:
9302 case NEGATE_EXPR:
9303 case LROTATE_EXPR:
9304 case RROTATE_EXPR:
9305 /* These cannot change zero->non-zero or vice versa. */
9306 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9307 break;
9309 case WITH_RECORD_EXPR:
9310 /* Put the object on the placeholder list, recurse through our first
9311 operand, and pop the list. */
9312 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9313 placeholder_list);
9314 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9315 placeholder_list = TREE_CHAIN (placeholder_list);
9316 break;
9318 #if 0
9319 /* This is never less insns than evaluating the PLUS_EXPR followed by
9320 a test and can be longer if the test is eliminated. */
9321 case PLUS_EXPR:
9322 /* Reduce to minus. */
9323 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9324 TREE_OPERAND (exp, 0),
9325 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9326 TREE_OPERAND (exp, 1))));
9327 /* Process as MINUS. */
9328 #endif
9330 case MINUS_EXPR:
9331 /* Non-zero iff operands of minus differ. */
9332 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9333 TREE_OPERAND (exp, 0),
9334 TREE_OPERAND (exp, 1)),
9335 NE, NE, if_false_label, if_true_label);
9336 break;
9338 case BIT_AND_EXPR:
9339 /* If we are AND'ing with a small constant, do this comparison in the
9340 smallest type that fits. If the machine doesn't have comparisons
9341 that small, it will be converted back to the wider comparison.
9342 This helps if we are testing the sign bit of a narrower object.
9343 combine can't do this for us because it can't know whether a
9344 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9346 if (! SLOW_BYTE_ACCESS
9347 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9348 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9349 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9350 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9351 && (type = type_for_mode (mode, 1)) != 0
9352 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9353 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9354 != CODE_FOR_nothing))
9356 do_jump (convert (type, exp), if_false_label, if_true_label);
9357 break;
9359 goto normal;
9361 case TRUTH_NOT_EXPR:
9362 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9363 break;
9365 case TRUTH_ANDIF_EXPR:
9366 if (if_false_label == 0)
9367 if_false_label = drop_through_label = gen_label_rtx ();
9368 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9369 start_cleanup_deferral ();
9370 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9371 end_cleanup_deferral ();
9372 break;
9374 case TRUTH_ORIF_EXPR:
9375 if (if_true_label == 0)
9376 if_true_label = drop_through_label = gen_label_rtx ();
9377 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9378 start_cleanup_deferral ();
9379 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9380 end_cleanup_deferral ();
9381 break;
9383 case COMPOUND_EXPR:
9384 push_temp_slots ();
9385 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9386 preserve_temp_slots (NULL_RTX);
9387 free_temp_slots ();
9388 pop_temp_slots ();
9389 emit_queue ();
9390 do_pending_stack_adjust ();
9391 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9392 break;
9394 case COMPONENT_REF:
9395 case BIT_FIELD_REF:
9396 case ARRAY_REF:
9398 HOST_WIDE_INT bitsize, bitpos;
9399 int unsignedp;
9400 enum machine_mode mode;
9401 tree type;
9402 tree offset;
9403 int volatilep = 0;
9404 unsigned int alignment;
9406 /* Get description of this reference. We don't actually care
9407 about the underlying object here. */
9408 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9409 &unsignedp, &volatilep, &alignment);
9411 type = type_for_size (bitsize, unsignedp);
9412 if (! SLOW_BYTE_ACCESS
9413 && type != 0 && bitsize >= 0
9414 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9415 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9416 != CODE_FOR_nothing))
9418 do_jump (convert (type, exp), if_false_label, if_true_label);
9419 break;
9421 goto normal;
9424 case COND_EXPR:
9425 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9426 if (integer_onep (TREE_OPERAND (exp, 1))
9427 && integer_zerop (TREE_OPERAND (exp, 2)))
9428 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9430 else if (integer_zerop (TREE_OPERAND (exp, 1))
9431 && integer_onep (TREE_OPERAND (exp, 2)))
9432 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9434 else
9436 register rtx label1 = gen_label_rtx ();
9437 drop_through_label = gen_label_rtx ();
9439 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9441 start_cleanup_deferral ();
9442 /* Now the THEN-expression. */
9443 do_jump (TREE_OPERAND (exp, 1),
9444 if_false_label ? if_false_label : drop_through_label,
9445 if_true_label ? if_true_label : drop_through_label);
9446 /* In case the do_jump just above never jumps. */
9447 do_pending_stack_adjust ();
9448 emit_label (label1);
9450 /* Now the ELSE-expression. */
9451 do_jump (TREE_OPERAND (exp, 2),
9452 if_false_label ? if_false_label : drop_through_label,
9453 if_true_label ? if_true_label : drop_through_label);
9454 end_cleanup_deferral ();
9456 break;
9458 case EQ_EXPR:
9460 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9462 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9463 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9465 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9466 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9467 do_jump
9468 (fold
9469 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9470 fold (build (EQ_EXPR, TREE_TYPE (exp),
9471 fold (build1 (REALPART_EXPR,
9472 TREE_TYPE (inner_type),
9473 exp0)),
9474 fold (build1 (REALPART_EXPR,
9475 TREE_TYPE (inner_type),
9476 exp1)))),
9477 fold (build (EQ_EXPR, TREE_TYPE (exp),
9478 fold (build1 (IMAGPART_EXPR,
9479 TREE_TYPE (inner_type),
9480 exp0)),
9481 fold (build1 (IMAGPART_EXPR,
9482 TREE_TYPE (inner_type),
9483 exp1)))))),
9484 if_false_label, if_true_label);
9487 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9488 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9490 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9491 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9492 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9493 else
9494 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9495 break;
9498 case NE_EXPR:
9500 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9502 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9503 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9505 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9506 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9507 do_jump
9508 (fold
9509 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9510 fold (build (NE_EXPR, TREE_TYPE (exp),
9511 fold (build1 (REALPART_EXPR,
9512 TREE_TYPE (inner_type),
9513 exp0)),
9514 fold (build1 (REALPART_EXPR,
9515 TREE_TYPE (inner_type),
9516 exp1)))),
9517 fold (build (NE_EXPR, TREE_TYPE (exp),
9518 fold (build1 (IMAGPART_EXPR,
9519 TREE_TYPE (inner_type),
9520 exp0)),
9521 fold (build1 (IMAGPART_EXPR,
9522 TREE_TYPE (inner_type),
9523 exp1)))))),
9524 if_false_label, if_true_label);
9527 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9528 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9530 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9531 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9532 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9533 else
9534 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9535 break;
9538 case LT_EXPR:
9539 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9540 if (GET_MODE_CLASS (mode) == MODE_INT
9541 && ! can_compare_p (LT, mode, ccp_jump))
9542 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9543 else
9544 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9545 break;
9547 case LE_EXPR:
9548 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9549 if (GET_MODE_CLASS (mode) == MODE_INT
9550 && ! can_compare_p (LE, mode, ccp_jump))
9551 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9552 else
9553 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9554 break;
9556 case GT_EXPR:
9557 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9558 if (GET_MODE_CLASS (mode) == MODE_INT
9559 && ! can_compare_p (GT, mode, ccp_jump))
9560 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9561 else
9562 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9563 break;
9565 case GE_EXPR:
9566 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9567 if (GET_MODE_CLASS (mode) == MODE_INT
9568 && ! can_compare_p (GE, mode, ccp_jump))
9569 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9570 else
9571 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9572 break;
9574 case UNORDERED_EXPR:
9575 case ORDERED_EXPR:
9577 enum rtx_code cmp, rcmp;
9578 int do_rev;
9580 if (code == UNORDERED_EXPR)
9581 cmp = UNORDERED, rcmp = ORDERED;
9582 else
9583 cmp = ORDERED, rcmp = UNORDERED;
9584 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9586 do_rev = 0;
9587 if (! can_compare_p (cmp, mode, ccp_jump)
9588 && (can_compare_p (rcmp, mode, ccp_jump)
9589 /* If the target doesn't provide either UNORDERED or ORDERED
9590 comparisons, canonicalize on UNORDERED for the library. */
9591 || rcmp == UNORDERED))
9592 do_rev = 1;
9594 if (! do_rev)
9595 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9596 else
9597 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9599 break;
9602 enum rtx_code rcode1;
9603 enum tree_code tcode2;
9605 case UNLT_EXPR:
9606 rcode1 = UNLT;
9607 tcode2 = LT_EXPR;
9608 goto unordered_bcc;
9609 case UNLE_EXPR:
9610 rcode1 = UNLE;
9611 tcode2 = LE_EXPR;
9612 goto unordered_bcc;
9613 case UNGT_EXPR:
9614 rcode1 = UNGT;
9615 tcode2 = GT_EXPR;
9616 goto unordered_bcc;
9617 case UNGE_EXPR:
9618 rcode1 = UNGE;
9619 tcode2 = GE_EXPR;
9620 goto unordered_bcc;
9621 case UNEQ_EXPR:
9622 rcode1 = UNEQ;
9623 tcode2 = EQ_EXPR;
9624 goto unordered_bcc;
9626 unordered_bcc:
9627 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9628 if (can_compare_p (rcode1, mode, ccp_jump))
9629 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9630 if_true_label);
9631 else
9633 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9634 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9635 tree cmp0, cmp1;
9637 /* If the target doesn't support combined unordered
9638 compares, decompose into UNORDERED + comparison. */
9639 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9640 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9641 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9642 do_jump (exp, if_false_label, if_true_label);
9645 break;
9647 default:
9648 normal:
9649 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9650 #if 0
9651 /* This is not needed any more and causes poor code since it causes
9652 comparisons and tests from non-SI objects to have different code
9653 sequences. */
9654 /* Copy to register to avoid generating bad insns by cse
9655 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9656 if (!cse_not_expected && GET_CODE (temp) == MEM)
9657 temp = copy_to_reg (temp);
9658 #endif
9659 do_pending_stack_adjust ();
9660 /* Do any postincrements in the expression that was tested. */
9661 emit_queue ();
9663 if (GET_CODE (temp) == CONST_INT || GET_CODE (temp) == LABEL_REF)
9665 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9666 if (target)
9667 emit_jump (target);
9669 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9670 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9671 /* Note swapping the labels gives us not-equal. */
9672 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9673 else if (GET_MODE (temp) != VOIDmode)
9674 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9675 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9676 GET_MODE (temp), NULL_RTX, 0,
9677 if_false_label, if_true_label);
9678 else
9679 abort ();
9682 if (drop_through_label)
9684 /* If do_jump produces code that might be jumped around,
9685 do any stack adjusts from that code, before the place
9686 where control merges in. */
9687 do_pending_stack_adjust ();
9688 emit_label (drop_through_label);
9692 /* Given a comparison expression EXP for values too wide to be compared
9693 with one insn, test the comparison and jump to the appropriate label.
9694 The code of EXP is ignored; we always test GT if SWAP is 0,
9695 and LT if SWAP is 1. */
9697 static void
9698 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9699 tree exp;
9700 int swap;
9701 rtx if_false_label, if_true_label;
9703 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9704 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9705 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9706 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9708 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9711 /* Compare OP0 with OP1, word at a time, in mode MODE.
9712 UNSIGNEDP says to do unsigned comparison.
9713 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9715 void
9716 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9717 enum machine_mode mode;
9718 int unsignedp;
9719 rtx op0, op1;
9720 rtx if_false_label, if_true_label;
9722 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9723 rtx drop_through_label = 0;
9724 int i;
9726 if (! if_true_label || ! if_false_label)
9727 drop_through_label = gen_label_rtx ();
9728 if (! if_true_label)
9729 if_true_label = drop_through_label;
9730 if (! if_false_label)
9731 if_false_label = drop_through_label;
9733 /* Compare a word at a time, high order first. */
9734 for (i = 0; i < nwords; i++)
9736 rtx op0_word, op1_word;
9738 if (WORDS_BIG_ENDIAN)
9740 op0_word = operand_subword_force (op0, i, mode);
9741 op1_word = operand_subword_force (op1, i, mode);
9743 else
9745 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9746 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9749 /* All but high-order word must be compared as unsigned. */
9750 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9751 (unsignedp || i > 0), word_mode, NULL_RTX, 0,
9752 NULL_RTX, if_true_label);
9754 /* Consider lower words only if these are equal. */
9755 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9756 NULL_RTX, 0, NULL_RTX, if_false_label);
9759 if (if_false_label)
9760 emit_jump (if_false_label);
9761 if (drop_through_label)
9762 emit_label (drop_through_label);
9765 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9766 with one insn, test the comparison and jump to the appropriate label. */
9768 static void
9769 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9770 tree exp;
9771 rtx if_false_label, if_true_label;
9773 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9774 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9775 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9776 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9777 int i;
9778 rtx drop_through_label = 0;
9780 if (! if_false_label)
9781 drop_through_label = if_false_label = gen_label_rtx ();
9783 for (i = 0; i < nwords; i++)
9784 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
9785 operand_subword_force (op1, i, mode),
9786 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9787 word_mode, NULL_RTX, 0, if_false_label,
9788 NULL_RTX);
9790 if (if_true_label)
9791 emit_jump (if_true_label);
9792 if (drop_through_label)
9793 emit_label (drop_through_label);
9796 /* Jump according to whether OP0 is 0.
9797 We assume that OP0 has an integer mode that is too wide
9798 for the available compare insns. */
9800 void
9801 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9802 rtx op0;
9803 rtx if_false_label, if_true_label;
9805 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9806 rtx part;
9807 int i;
9808 rtx drop_through_label = 0;
9810 /* The fastest way of doing this comparison on almost any machine is to
9811 "or" all the words and compare the result. If all have to be loaded
9812 from memory and this is a very wide item, it's possible this may
9813 be slower, but that's highly unlikely. */
9815 part = gen_reg_rtx (word_mode);
9816 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
9817 for (i = 1; i < nwords && part != 0; i++)
9818 part = expand_binop (word_mode, ior_optab, part,
9819 operand_subword_force (op0, i, GET_MODE (op0)),
9820 part, 1, OPTAB_WIDEN);
9822 if (part != 0)
9824 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
9825 NULL_RTX, 0, if_false_label, if_true_label);
9827 return;
9830 /* If we couldn't do the "or" simply, do this with a series of compares. */
9831 if (! if_false_label)
9832 drop_through_label = if_false_label = gen_label_rtx ();
9834 for (i = 0; i < nwords; i++)
9835 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
9836 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0,
9837 if_false_label, NULL_RTX);
9839 if (if_true_label)
9840 emit_jump (if_true_label);
9842 if (drop_through_label)
9843 emit_label (drop_through_label);
9846 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
9847 (including code to compute the values to be compared)
9848 and set (CC0) according to the result.
9849 The decision as to signed or unsigned comparison must be made by the caller.
9851 We force a stack adjustment unless there are currently
9852 things pushed on the stack that aren't yet used.
9854 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9855 compared.
9857 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9858 size of MODE should be used. */
9861 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
9862 register rtx op0, op1;
9863 enum rtx_code code;
9864 int unsignedp;
9865 enum machine_mode mode;
9866 rtx size;
9867 unsigned int align;
9869 rtx tem;
9871 /* If one operand is constant, make it the second one. Only do this
9872 if the other operand is not constant as well. */
9874 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9875 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9877 tem = op0;
9878 op0 = op1;
9879 op1 = tem;
9880 code = swap_condition (code);
9883 if (flag_force_mem)
9885 op0 = force_not_mem (op0);
9886 op1 = force_not_mem (op1);
9889 do_pending_stack_adjust ();
9891 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9892 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9893 return tem;
9895 #if 0
9896 /* There's no need to do this now that combine.c can eliminate lots of
9897 sign extensions. This can be less efficient in certain cases on other
9898 machines. */
9900 /* If this is a signed equality comparison, we can do it as an
9901 unsigned comparison since zero-extension is cheaper than sign
9902 extension and comparisons with zero are done as unsigned. This is
9903 the case even on machines that can do fast sign extension, since
9904 zero-extension is easier to combine with other operations than
9905 sign-extension is. If we are comparing against a constant, we must
9906 convert it to what it would look like unsigned. */
9907 if ((code == EQ || code == NE) && ! unsignedp
9908 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9910 if (GET_CODE (op1) == CONST_INT
9911 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9912 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9913 unsignedp = 1;
9915 #endif
9917 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
9919 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
9922 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
9923 The decision as to signed or unsigned comparison must be made by the caller.
9925 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9926 compared.
9928 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9929 size of MODE should be used. */
9931 void
9932 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, align,
9933 if_false_label, if_true_label)
9934 register rtx op0, op1;
9935 enum rtx_code code;
9936 int unsignedp;
9937 enum machine_mode mode;
9938 rtx size;
9939 unsigned int align;
9940 rtx if_false_label, if_true_label;
9942 rtx tem;
9943 int dummy_true_label = 0;
9945 /* Reverse the comparison if that is safe and we want to jump if it is
9946 false. */
9947 if (! if_true_label && ! FLOAT_MODE_P (mode))
9949 if_true_label = if_false_label;
9950 if_false_label = 0;
9951 code = reverse_condition (code);
9954 /* If one operand is constant, make it the second one. Only do this
9955 if the other operand is not constant as well. */
9957 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9958 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9960 tem = op0;
9961 op0 = op1;
9962 op1 = tem;
9963 code = swap_condition (code);
9966 if (flag_force_mem)
9968 op0 = force_not_mem (op0);
9969 op1 = force_not_mem (op1);
9972 do_pending_stack_adjust ();
9974 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9975 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9977 if (tem == const_true_rtx)
9979 if (if_true_label)
9980 emit_jump (if_true_label);
9982 else
9984 if (if_false_label)
9985 emit_jump (if_false_label);
9987 return;
9990 #if 0
9991 /* There's no need to do this now that combine.c can eliminate lots of
9992 sign extensions. This can be less efficient in certain cases on other
9993 machines. */
9995 /* If this is a signed equality comparison, we can do it as an
9996 unsigned comparison since zero-extension is cheaper than sign
9997 extension and comparisons with zero are done as unsigned. This is
9998 the case even on machines that can do fast sign extension, since
9999 zero-extension is easier to combine with other operations than
10000 sign-extension is. If we are comparing against a constant, we must
10001 convert it to what it would look like unsigned. */
10002 if ((code == EQ || code == NE) && ! unsignedp
10003 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10005 if (GET_CODE (op1) == CONST_INT
10006 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10007 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10008 unsignedp = 1;
10010 #endif
10012 if (! if_true_label)
10014 dummy_true_label = 1;
10015 if_true_label = gen_label_rtx ();
10018 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, align,
10019 if_true_label);
10021 if (if_false_label)
10022 emit_jump (if_false_label);
10023 if (dummy_true_label)
10024 emit_label (if_true_label);
10027 /* Generate code for a comparison expression EXP (including code to compute
10028 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10029 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10030 generated code will drop through.
10031 SIGNED_CODE should be the rtx operation for this comparison for
10032 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10034 We force a stack adjustment unless there are currently
10035 things pushed on the stack that aren't yet used. */
10037 static void
10038 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10039 if_true_label)
10040 register tree exp;
10041 enum rtx_code signed_code, unsigned_code;
10042 rtx if_false_label, if_true_label;
10044 unsigned int align0, align1;
10045 register rtx op0, op1;
10046 register tree type;
10047 register enum machine_mode mode;
10048 int unsignedp;
10049 enum rtx_code code;
10051 /* Don't crash if the comparison was erroneous. */
10052 op0 = expand_expr_unaligned (TREE_OPERAND (exp, 0), &align0);
10053 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10054 return;
10056 op1 = expand_expr_unaligned (TREE_OPERAND (exp, 1), &align1);
10057 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10058 mode = TYPE_MODE (type);
10059 unsignedp = TREE_UNSIGNED (type);
10060 code = unsignedp ? unsigned_code : signed_code;
10062 #ifdef HAVE_canonicalize_funcptr_for_compare
10063 /* If function pointers need to be "canonicalized" before they can
10064 be reliably compared, then canonicalize them. */
10065 if (HAVE_canonicalize_funcptr_for_compare
10066 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10067 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10068 == FUNCTION_TYPE))
10070 rtx new_op0 = gen_reg_rtx (mode);
10072 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10073 op0 = new_op0;
10076 if (HAVE_canonicalize_funcptr_for_compare
10077 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10078 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10079 == FUNCTION_TYPE))
10081 rtx new_op1 = gen_reg_rtx (mode);
10083 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10084 op1 = new_op1;
10086 #endif
10088 /* Do any postincrements in the expression that was tested. */
10089 emit_queue ();
10091 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10092 ((mode == BLKmode)
10093 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10094 MIN (align0, align1),
10095 if_false_label, if_true_label);
10098 /* Generate code to calculate EXP using a store-flag instruction
10099 and return an rtx for the result. EXP is either a comparison
10100 or a TRUTH_NOT_EXPR whose operand is a comparison.
10102 If TARGET is nonzero, store the result there if convenient.
10104 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10105 cheap.
10107 Return zero if there is no suitable set-flag instruction
10108 available on this machine.
10110 Once expand_expr has been called on the arguments of the comparison,
10111 we are committed to doing the store flag, since it is not safe to
10112 re-evaluate the expression. We emit the store-flag insn by calling
10113 emit_store_flag, but only expand the arguments if we have a reason
10114 to believe that emit_store_flag will be successful. If we think that
10115 it will, but it isn't, we have to simulate the store-flag with a
10116 set/jump/set sequence. */
10118 static rtx
10119 do_store_flag (exp, target, mode, only_cheap)
10120 tree exp;
10121 rtx target;
10122 enum machine_mode mode;
10123 int only_cheap;
10125 enum rtx_code code;
10126 tree arg0, arg1, type;
10127 tree tem;
10128 enum machine_mode operand_mode;
10129 int invert = 0;
10130 int unsignedp;
10131 rtx op0, op1;
10132 enum insn_code icode;
10133 rtx subtarget = target;
10134 rtx result, label;
10136 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10137 result at the end. We can't simply invert the test since it would
10138 have already been inverted if it were valid. This case occurs for
10139 some floating-point comparisons. */
10141 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10142 invert = 1, exp = TREE_OPERAND (exp, 0);
10144 arg0 = TREE_OPERAND (exp, 0);
10145 arg1 = TREE_OPERAND (exp, 1);
10147 /* Don't crash if the comparison was erroneous. */
10148 if (arg0 == error_mark_node || arg1 == error_mark_node)
10149 return const0_rtx;
10151 type = TREE_TYPE (arg0);
10152 operand_mode = TYPE_MODE (type);
10153 unsignedp = TREE_UNSIGNED (type);
10155 /* We won't bother with BLKmode store-flag operations because it would mean
10156 passing a lot of information to emit_store_flag. */
10157 if (operand_mode == BLKmode)
10158 return 0;
10160 /* We won't bother with store-flag operations involving function pointers
10161 when function pointers must be canonicalized before comparisons. */
10162 #ifdef HAVE_canonicalize_funcptr_for_compare
10163 if (HAVE_canonicalize_funcptr_for_compare
10164 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10165 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10166 == FUNCTION_TYPE))
10167 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10168 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10169 == FUNCTION_TYPE))))
10170 return 0;
10171 #endif
10173 STRIP_NOPS (arg0);
10174 STRIP_NOPS (arg1);
10176 /* Get the rtx comparison code to use. We know that EXP is a comparison
10177 operation of some type. Some comparisons against 1 and -1 can be
10178 converted to comparisons with zero. Do so here so that the tests
10179 below will be aware that we have a comparison with zero. These
10180 tests will not catch constants in the first operand, but constants
10181 are rarely passed as the first operand. */
10183 switch (TREE_CODE (exp))
10185 case EQ_EXPR:
10186 code = EQ;
10187 break;
10188 case NE_EXPR:
10189 code = NE;
10190 break;
10191 case LT_EXPR:
10192 if (integer_onep (arg1))
10193 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10194 else
10195 code = unsignedp ? LTU : LT;
10196 break;
10197 case LE_EXPR:
10198 if (! unsignedp && integer_all_onesp (arg1))
10199 arg1 = integer_zero_node, code = LT;
10200 else
10201 code = unsignedp ? LEU : LE;
10202 break;
10203 case GT_EXPR:
10204 if (! unsignedp && integer_all_onesp (arg1))
10205 arg1 = integer_zero_node, code = GE;
10206 else
10207 code = unsignedp ? GTU : GT;
10208 break;
10209 case GE_EXPR:
10210 if (integer_onep (arg1))
10211 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10212 else
10213 code = unsignedp ? GEU : GE;
10214 break;
10216 case UNORDERED_EXPR:
10217 code = UNORDERED;
10218 break;
10219 case ORDERED_EXPR:
10220 code = ORDERED;
10221 break;
10222 case UNLT_EXPR:
10223 code = UNLT;
10224 break;
10225 case UNLE_EXPR:
10226 code = UNLE;
10227 break;
10228 case UNGT_EXPR:
10229 code = UNGT;
10230 break;
10231 case UNGE_EXPR:
10232 code = UNGE;
10233 break;
10234 case UNEQ_EXPR:
10235 code = UNEQ;
10236 break;
10238 default:
10239 abort ();
10242 /* Put a constant second. */
10243 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10245 tem = arg0; arg0 = arg1; arg1 = tem;
10246 code = swap_condition (code);
10249 /* If this is an equality or inequality test of a single bit, we can
10250 do this by shifting the bit being tested to the low-order bit and
10251 masking the result with the constant 1. If the condition was EQ,
10252 we xor it with 1. This does not require an scc insn and is faster
10253 than an scc insn even if we have it. */
10255 if ((code == NE || code == EQ)
10256 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10257 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10259 tree inner = TREE_OPERAND (arg0, 0);
10260 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10261 int ops_unsignedp;
10263 /* If INNER is a right shift of a constant and it plus BITNUM does
10264 not overflow, adjust BITNUM and INNER. */
10266 if (TREE_CODE (inner) == RSHIFT_EXPR
10267 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10268 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10269 && bitnum < TYPE_PRECISION (type)
10270 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10271 bitnum - TYPE_PRECISION (type)))
10273 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10274 inner = TREE_OPERAND (inner, 0);
10277 /* If we are going to be able to omit the AND below, we must do our
10278 operations as unsigned. If we must use the AND, we have a choice.
10279 Normally unsigned is faster, but for some machines signed is. */
10280 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10281 #ifdef LOAD_EXTEND_OP
10282 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10283 #else
10285 #endif
10288 if (! get_subtarget (subtarget)
10289 || GET_MODE (subtarget) != operand_mode
10290 || ! safe_from_p (subtarget, inner, 1))
10291 subtarget = 0;
10293 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10295 if (bitnum != 0)
10296 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10297 size_int (bitnum), subtarget, ops_unsignedp);
10299 if (GET_MODE (op0) != mode)
10300 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10302 if ((code == EQ && ! invert) || (code == NE && invert))
10303 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10304 ops_unsignedp, OPTAB_LIB_WIDEN);
10306 /* Put the AND last so it can combine with more things. */
10307 if (bitnum != TYPE_PRECISION (type) - 1)
10308 op0 = expand_and (op0, const1_rtx, subtarget);
10310 return op0;
10313 /* Now see if we are likely to be able to do this. Return if not. */
10314 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10315 return 0;
10317 icode = setcc_gen_code[(int) code];
10318 if (icode == CODE_FOR_nothing
10319 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10321 /* We can only do this if it is one of the special cases that
10322 can be handled without an scc insn. */
10323 if ((code == LT && integer_zerop (arg1))
10324 || (! only_cheap && code == GE && integer_zerop (arg1)))
10326 else if (BRANCH_COST >= 0
10327 && ! only_cheap && (code == NE || code == EQ)
10328 && TREE_CODE (type) != REAL_TYPE
10329 && ((abs_optab->handlers[(int) operand_mode].insn_code
10330 != CODE_FOR_nothing)
10331 || (ffs_optab->handlers[(int) operand_mode].insn_code
10332 != CODE_FOR_nothing)))
10334 else
10335 return 0;
10338 if (! get_subtarget (target)
10339 || GET_MODE (subtarget) != operand_mode
10340 || ! safe_from_p (subtarget, arg1, 1))
10341 subtarget = 0;
10343 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10344 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10346 if (target == 0)
10347 target = gen_reg_rtx (mode);
10349 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10350 because, if the emit_store_flag does anything it will succeed and
10351 OP0 and OP1 will not be used subsequently. */
10353 result = emit_store_flag (target, code,
10354 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10355 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10356 operand_mode, unsignedp, 1);
10358 if (result)
10360 if (invert)
10361 result = expand_binop (mode, xor_optab, result, const1_rtx,
10362 result, 0, OPTAB_LIB_WIDEN);
10363 return result;
10366 /* If this failed, we have to do this with set/compare/jump/set code. */
10367 if (GET_CODE (target) != REG
10368 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10369 target = gen_reg_rtx (GET_MODE (target));
10371 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10372 result = compare_from_rtx (op0, op1, code, unsignedp,
10373 operand_mode, NULL_RTX, 0);
10374 if (GET_CODE (result) == CONST_INT)
10375 return (((result == const0_rtx && ! invert)
10376 || (result != const0_rtx && invert))
10377 ? const0_rtx : const1_rtx);
10379 label = gen_label_rtx ();
10380 if (bcc_gen_fctn[(int) code] == 0)
10381 abort ();
10383 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10384 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10385 emit_label (label);
10387 return target;
10390 /* Generate a tablejump instruction (used for switch statements). */
10392 #ifdef HAVE_tablejump
10394 /* INDEX is the value being switched on, with the lowest value
10395 in the table already subtracted.
10396 MODE is its expected mode (needed if INDEX is constant).
10397 RANGE is the length of the jump table.
10398 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10400 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10401 index value is out of range. */
10403 void
10404 do_tablejump (index, mode, range, table_label, default_label)
10405 rtx index, range, table_label, default_label;
10406 enum machine_mode mode;
10408 register rtx temp, vector;
10410 /* Do an unsigned comparison (in the proper mode) between the index
10411 expression and the value which represents the length of the range.
10412 Since we just finished subtracting the lower bound of the range
10413 from the index expression, this comparison allows us to simultaneously
10414 check that the original index expression value is both greater than
10415 or equal to the minimum value of the range and less than or equal to
10416 the maximum value of the range. */
10418 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10419 0, default_label);
10421 /* If index is in range, it must fit in Pmode.
10422 Convert to Pmode so we can index with it. */
10423 if (mode != Pmode)
10424 index = convert_to_mode (Pmode, index, 1);
10426 /* Don't let a MEM slip thru, because then INDEX that comes
10427 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10428 and break_out_memory_refs will go to work on it and mess it up. */
10429 #ifdef PIC_CASE_VECTOR_ADDRESS
10430 if (flag_pic && GET_CODE (index) != REG)
10431 index = copy_to_mode_reg (Pmode, index);
10432 #endif
10434 /* If flag_force_addr were to affect this address
10435 it could interfere with the tricky assumptions made
10436 about addresses that contain label-refs,
10437 which may be valid only very near the tablejump itself. */
10438 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10439 GET_MODE_SIZE, because this indicates how large insns are. The other
10440 uses should all be Pmode, because they are addresses. This code
10441 could fail if addresses and insns are not the same size. */
10442 index = gen_rtx_PLUS (Pmode,
10443 gen_rtx_MULT (Pmode, index,
10444 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10445 gen_rtx_LABEL_REF (Pmode, table_label));
10446 #ifdef PIC_CASE_VECTOR_ADDRESS
10447 if (flag_pic)
10448 index = PIC_CASE_VECTOR_ADDRESS (index);
10449 else
10450 #endif
10451 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10452 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10453 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10454 RTX_UNCHANGING_P (vector) = 1;
10455 convert_move (temp, vector, 0);
10457 emit_jump_insn (gen_tablejump (temp, table_label));
10459 /* If we are generating PIC code or if the table is PC-relative, the
10460 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10461 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10462 emit_barrier ();
10465 #endif /* HAVE_tablejump */