index.html (5.4): Recommend against -I options for finding the ext headers.
[official-gcc.git] / gcc / expr.c
blobbc0b225704bcac5a239238264e27f9deaab408bf
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "obstack.h"
28 #include "flags.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "except.h"
32 #include "function.h"
33 #include "insn-config.h"
34 #include "insn-attr.h"
35 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
36 #include "expr.h"
37 #include "optabs.h"
38 #include "libfuncs.h"
39 #include "recog.h"
40 #include "reload.h"
41 #include "output.h"
42 #include "typeclass.h"
43 #include "toplev.h"
44 #include "ggc.h"
45 #include "langhooks.h"
46 #include "intl.h"
47 #include "tm_p.h"
49 /* Decide whether a function's arguments should be processed
50 from first to last or from last to first.
52 They should if the stack and args grow in opposite directions, but
53 only if we have push insns. */
55 #ifdef PUSH_ROUNDING
57 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
58 #define PUSH_ARGS_REVERSED /* If it's last to first. */
59 #endif
61 #endif
63 #ifndef STACK_PUSH_CODE
64 #ifdef STACK_GROWS_DOWNWARD
65 #define STACK_PUSH_CODE PRE_DEC
66 #else
67 #define STACK_PUSH_CODE PRE_INC
68 #endif
69 #endif
71 /* Assume that case vectors are not pc-relative. */
72 #ifndef CASE_VECTOR_PC_RELATIVE
73 #define CASE_VECTOR_PC_RELATIVE 0
74 #endif
76 /* If this is nonzero, we do not bother generating VOLATILE
77 around volatile memory references, and we are willing to
78 output indirect addresses. If cse is to follow, we reject
79 indirect addresses so a useful potential cse is generated;
80 if it is used only once, instruction combination will produce
81 the same indirect address eventually. */
82 int cse_not_expected;
84 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
85 static tree placeholder_list = 0;
87 /* This structure is used by move_by_pieces to describe the move to
88 be performed. */
89 struct move_by_pieces
91 rtx to;
92 rtx to_addr;
93 int autinc_to;
94 int explicit_inc_to;
95 rtx from;
96 rtx from_addr;
97 int autinc_from;
98 int explicit_inc_from;
99 unsigned HOST_WIDE_INT len;
100 HOST_WIDE_INT offset;
101 int reverse;
104 /* This structure is used by store_by_pieces to describe the clear to
105 be performed. */
107 struct store_by_pieces
109 rtx to;
110 rtx to_addr;
111 int autinc_to;
112 int explicit_inc_to;
113 unsigned HOST_WIDE_INT len;
114 HOST_WIDE_INT offset;
115 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
116 PTR constfundata;
117 int reverse;
120 extern struct obstack permanent_obstack;
122 static rtx enqueue_insn PARAMS ((rtx, rtx));
123 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
124 PARAMS ((unsigned HOST_WIDE_INT,
125 unsigned int));
126 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
127 struct move_by_pieces *));
128 static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
129 enum machine_mode));
130 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
131 unsigned int));
132 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
133 unsigned int));
134 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
135 enum machine_mode,
136 struct store_by_pieces *));
137 static rtx compress_float_constant PARAMS ((rtx, rtx));
138 static rtx get_subtarget PARAMS ((rtx));
139 static int is_zeros_p PARAMS ((tree));
140 static int mostly_zeros_p PARAMS ((tree));
141 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
142 HOST_WIDE_INT, enum machine_mode,
143 tree, tree, int, int));
144 static void store_constructor PARAMS ((tree, rtx, int, HOST_WIDE_INT));
145 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
146 HOST_WIDE_INT, enum machine_mode,
147 tree, enum machine_mode, int, tree,
148 int));
149 static rtx var_rtx PARAMS ((tree));
150 static HOST_WIDE_INT highest_pow2_factor PARAMS ((tree));
151 static HOST_WIDE_INT highest_pow2_factor_for_type PARAMS ((tree, tree));
152 static int is_aligning_offset PARAMS ((tree, tree));
153 static rtx expand_increment PARAMS ((tree, int, int));
154 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
155 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
156 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
157 rtx, rtx));
158 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
159 #ifdef PUSH_ROUNDING
160 static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
161 #endif
162 static void do_tablejump PARAMS ((rtx, enum machine_mode, rtx, rtx, rtx));
164 /* Record for each mode whether we can move a register directly to or
165 from an object of that mode in memory. If we can't, we won't try
166 to use that mode directly when accessing a field of that mode. */
168 static char direct_load[NUM_MACHINE_MODES];
169 static char direct_store[NUM_MACHINE_MODES];
171 /* Record for each mode whether we can float-extend from memory. */
173 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
175 /* If a memory-to-memory move would take MOVE_RATIO or more simple
176 move-instruction sequences, we will do a movstr or libcall instead. */
178 #ifndef MOVE_RATIO
179 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
180 #define MOVE_RATIO 2
181 #else
182 /* If we are optimizing for space (-Os), cut down the default move ratio. */
183 #define MOVE_RATIO (optimize_size ? 3 : 15)
184 #endif
185 #endif
187 /* This macro is used to determine whether move_by_pieces should be called
188 to perform a structure copy. */
189 #ifndef MOVE_BY_PIECES_P
190 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
191 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
192 #endif
194 /* This array records the insn_code of insns to perform block moves. */
195 enum insn_code movstr_optab[NUM_MACHINE_MODES];
197 /* This array records the insn_code of insns to perform block clears. */
198 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
200 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
202 #ifndef SLOW_UNALIGNED_ACCESS
203 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
204 #endif
206 /* This is run once per compilation to set up which modes can be used
207 directly in memory and to initialize the block move optab. */
209 void
210 init_expr_once ()
212 rtx insn, pat;
213 enum machine_mode mode;
214 int num_clobbers;
215 rtx mem, mem1;
217 /* Try indexing by frame ptr and try by stack ptr.
218 It is known that on the Convex the stack ptr isn't a valid index.
219 With luck, one or the other is valid on any machine. */
220 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
221 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
223 insn = rtx_alloc (INSN);
224 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
225 PATTERN (insn) = pat;
227 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
228 mode = (enum machine_mode) ((int) mode + 1))
230 int regno;
231 rtx reg;
233 direct_load[(int) mode] = direct_store[(int) mode] = 0;
234 PUT_MODE (mem, mode);
235 PUT_MODE (mem1, mode);
237 /* See if there is some register that can be used in this mode and
238 directly loaded or stored from memory. */
240 if (mode != VOIDmode && mode != BLKmode)
241 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
242 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
243 regno++)
245 if (! HARD_REGNO_MODE_OK (regno, mode))
246 continue;
248 reg = gen_rtx_REG (mode, regno);
250 SET_SRC (pat) = mem;
251 SET_DEST (pat) = reg;
252 if (recog (pat, insn, &num_clobbers) >= 0)
253 direct_load[(int) mode] = 1;
255 SET_SRC (pat) = mem1;
256 SET_DEST (pat) = reg;
257 if (recog (pat, insn, &num_clobbers) >= 0)
258 direct_load[(int) mode] = 1;
260 SET_SRC (pat) = reg;
261 SET_DEST (pat) = mem;
262 if (recog (pat, insn, &num_clobbers) >= 0)
263 direct_store[(int) mode] = 1;
265 SET_SRC (pat) = reg;
266 SET_DEST (pat) = mem1;
267 if (recog (pat, insn, &num_clobbers) >= 0)
268 direct_store[(int) mode] = 1;
272 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
274 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
275 mode = GET_MODE_WIDER_MODE (mode))
277 enum machine_mode srcmode;
278 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
279 srcmode = GET_MODE_WIDER_MODE (srcmode))
281 enum insn_code ic;
283 ic = can_extend_p (mode, srcmode, 0);
284 if (ic == CODE_FOR_nothing)
285 continue;
287 PUT_MODE (mem, srcmode);
289 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
290 float_extend_from_mem[mode][srcmode] = true;
295 /* This is run at the start of compiling a function. */
297 void
298 init_expr ()
300 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
302 pending_chain = 0;
303 pending_stack_adjust = 0;
304 stack_pointer_delta = 0;
305 inhibit_defer_pop = 0;
306 saveregs_value = 0;
307 apply_args_value = 0;
308 forced_labels = 0;
311 void
312 mark_expr_status (p)
313 struct expr_status *p;
315 if (p == NULL)
316 return;
318 ggc_mark_rtx (p->x_saveregs_value);
319 ggc_mark_rtx (p->x_apply_args_value);
320 ggc_mark_rtx (p->x_forced_labels);
323 void
324 free_expr_status (f)
325 struct function *f;
327 free (f->expr);
328 f->expr = NULL;
331 /* Small sanity check that the queue is empty at the end of a function. */
333 void
334 finish_expr_for_function ()
336 if (pending_chain)
337 abort ();
340 /* Manage the queue of increment instructions to be output
341 for POSTINCREMENT_EXPR expressions, etc. */
343 /* Queue up to increment (or change) VAR later. BODY says how:
344 BODY should be the same thing you would pass to emit_insn
345 to increment right away. It will go to emit_insn later on.
347 The value is a QUEUED expression to be used in place of VAR
348 where you want to guarantee the pre-incrementation value of VAR. */
350 static rtx
351 enqueue_insn (var, body)
352 rtx var, body;
354 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
355 body, pending_chain);
356 return pending_chain;
359 /* Use protect_from_queue to convert a QUEUED expression
360 into something that you can put immediately into an instruction.
361 If the queued incrementation has not happened yet,
362 protect_from_queue returns the variable itself.
363 If the incrementation has happened, protect_from_queue returns a temp
364 that contains a copy of the old value of the variable.
366 Any time an rtx which might possibly be a QUEUED is to be put
367 into an instruction, it must be passed through protect_from_queue first.
368 QUEUED expressions are not meaningful in instructions.
370 Do not pass a value through protect_from_queue and then hold
371 on to it for a while before putting it in an instruction!
372 If the queue is flushed in between, incorrect code will result. */
375 protect_from_queue (x, modify)
376 rtx x;
377 int modify;
379 RTX_CODE code = GET_CODE (x);
381 #if 0 /* A QUEUED can hang around after the queue is forced out. */
382 /* Shortcut for most common case. */
383 if (pending_chain == 0)
384 return x;
385 #endif
387 if (code != QUEUED)
389 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
390 use of autoincrement. Make a copy of the contents of the memory
391 location rather than a copy of the address, but not if the value is
392 of mode BLKmode. Don't modify X in place since it might be
393 shared. */
394 if (code == MEM && GET_MODE (x) != BLKmode
395 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
397 rtx y = XEXP (x, 0);
398 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
400 if (QUEUED_INSN (y))
402 rtx temp = gen_reg_rtx (GET_MODE (x));
404 emit_insn_before (gen_move_insn (temp, new),
405 QUEUED_INSN (y));
406 return temp;
409 /* Copy the address into a pseudo, so that the returned value
410 remains correct across calls to emit_queue. */
411 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
414 /* Otherwise, recursively protect the subexpressions of all
415 the kinds of rtx's that can contain a QUEUED. */
416 if (code == MEM)
418 rtx tem = protect_from_queue (XEXP (x, 0), 0);
419 if (tem != XEXP (x, 0))
421 x = copy_rtx (x);
422 XEXP (x, 0) = tem;
425 else if (code == PLUS || code == MULT)
427 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
428 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
429 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
431 x = copy_rtx (x);
432 XEXP (x, 0) = new0;
433 XEXP (x, 1) = new1;
436 return x;
438 /* If the increment has not happened, use the variable itself. Copy it
439 into a new pseudo so that the value remains correct across calls to
440 emit_queue. */
441 if (QUEUED_INSN (x) == 0)
442 return copy_to_reg (QUEUED_VAR (x));
443 /* If the increment has happened and a pre-increment copy exists,
444 use that copy. */
445 if (QUEUED_COPY (x) != 0)
446 return QUEUED_COPY (x);
447 /* The increment has happened but we haven't set up a pre-increment copy.
448 Set one up now, and use it. */
449 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
450 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
451 QUEUED_INSN (x));
452 return QUEUED_COPY (x);
455 /* Return nonzero if X contains a QUEUED expression:
456 if it contains anything that will be altered by a queued increment.
457 We handle only combinations of MEM, PLUS, MINUS and MULT operators
458 since memory addresses generally contain only those. */
461 queued_subexp_p (x)
462 rtx x;
464 enum rtx_code code = GET_CODE (x);
465 switch (code)
467 case QUEUED:
468 return 1;
469 case MEM:
470 return queued_subexp_p (XEXP (x, 0));
471 case MULT:
472 case PLUS:
473 case MINUS:
474 return (queued_subexp_p (XEXP (x, 0))
475 || queued_subexp_p (XEXP (x, 1)));
476 default:
477 return 0;
481 /* Perform all the pending incrementations. */
483 void
484 emit_queue ()
486 rtx p;
487 while ((p = pending_chain))
489 rtx body = QUEUED_BODY (p);
491 if (GET_CODE (body) == SEQUENCE)
493 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
494 emit_insn (QUEUED_BODY (p));
496 else
497 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
498 pending_chain = QUEUED_NEXT (p);
502 /* Copy data from FROM to TO, where the machine modes are not the same.
503 Both modes may be integer, or both may be floating.
504 UNSIGNEDP should be nonzero if FROM is an unsigned type.
505 This causes zero-extension instead of sign-extension. */
507 void
508 convert_move (to, from, unsignedp)
509 rtx to, from;
510 int unsignedp;
512 enum machine_mode to_mode = GET_MODE (to);
513 enum machine_mode from_mode = GET_MODE (from);
514 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
515 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
516 enum insn_code code;
517 rtx libcall;
519 /* rtx code for making an equivalent value. */
520 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
522 to = protect_from_queue (to, 1);
523 from = protect_from_queue (from, 0);
525 if (to_real != from_real)
526 abort ();
528 /* If FROM is a SUBREG that indicates that we have already done at least
529 the required extension, strip it. We don't handle such SUBREGs as
530 TO here. */
532 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
533 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
534 >= GET_MODE_SIZE (to_mode))
535 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
536 from = gen_lowpart (to_mode, from), from_mode = to_mode;
538 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
539 abort ();
541 if (to_mode == from_mode
542 || (from_mode == VOIDmode && CONSTANT_P (from)))
544 emit_move_insn (to, from);
545 return;
548 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
550 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
551 abort ();
553 if (VECTOR_MODE_P (to_mode))
554 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
555 else
556 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
558 emit_move_insn (to, from);
559 return;
562 if (to_real != from_real)
563 abort ();
565 if (to_real)
567 rtx value, insns;
569 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
571 /* Try converting directly if the insn is supported. */
572 if ((code = can_extend_p (to_mode, from_mode, 0))
573 != CODE_FOR_nothing)
575 emit_unop_insn (code, to, from, UNKNOWN);
576 return;
580 #ifdef HAVE_trunchfqf2
581 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
583 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
584 return;
586 #endif
587 #ifdef HAVE_trunctqfqf2
588 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
590 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
591 return;
593 #endif
594 #ifdef HAVE_truncsfqf2
595 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
597 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
598 return;
600 #endif
601 #ifdef HAVE_truncdfqf2
602 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
604 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
605 return;
607 #endif
608 #ifdef HAVE_truncxfqf2
609 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
611 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
612 return;
614 #endif
615 #ifdef HAVE_trunctfqf2
616 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
618 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
619 return;
621 #endif
623 #ifdef HAVE_trunctqfhf2
624 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
626 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
627 return;
629 #endif
630 #ifdef HAVE_truncsfhf2
631 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
633 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
634 return;
636 #endif
637 #ifdef HAVE_truncdfhf2
638 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
640 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
641 return;
643 #endif
644 #ifdef HAVE_truncxfhf2
645 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
647 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
648 return;
650 #endif
651 #ifdef HAVE_trunctfhf2
652 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
654 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
655 return;
657 #endif
659 #ifdef HAVE_truncsftqf2
660 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
662 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
663 return;
665 #endif
666 #ifdef HAVE_truncdftqf2
667 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
669 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
670 return;
672 #endif
673 #ifdef HAVE_truncxftqf2
674 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
676 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
677 return;
679 #endif
680 #ifdef HAVE_trunctftqf2
681 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
683 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
684 return;
686 #endif
688 #ifdef HAVE_truncdfsf2
689 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
691 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
692 return;
694 #endif
695 #ifdef HAVE_truncxfsf2
696 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
698 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
699 return;
701 #endif
702 #ifdef HAVE_trunctfsf2
703 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
705 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
706 return;
708 #endif
709 #ifdef HAVE_truncxfdf2
710 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
712 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
713 return;
715 #endif
716 #ifdef HAVE_trunctfdf2
717 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
719 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
720 return;
722 #endif
724 libcall = (rtx) 0;
725 switch (from_mode)
727 case SFmode:
728 switch (to_mode)
730 case DFmode:
731 libcall = extendsfdf2_libfunc;
732 break;
734 case XFmode:
735 libcall = extendsfxf2_libfunc;
736 break;
738 case TFmode:
739 libcall = extendsftf2_libfunc;
740 break;
742 default:
743 break;
745 break;
747 case DFmode:
748 switch (to_mode)
750 case SFmode:
751 libcall = truncdfsf2_libfunc;
752 break;
754 case XFmode:
755 libcall = extenddfxf2_libfunc;
756 break;
758 case TFmode:
759 libcall = extenddftf2_libfunc;
760 break;
762 default:
763 break;
765 break;
767 case XFmode:
768 switch (to_mode)
770 case SFmode:
771 libcall = truncxfsf2_libfunc;
772 break;
774 case DFmode:
775 libcall = truncxfdf2_libfunc;
776 break;
778 default:
779 break;
781 break;
783 case TFmode:
784 switch (to_mode)
786 case SFmode:
787 libcall = trunctfsf2_libfunc;
788 break;
790 case DFmode:
791 libcall = trunctfdf2_libfunc;
792 break;
794 default:
795 break;
797 break;
799 default:
800 break;
803 if (libcall == (rtx) 0)
804 /* This conversion is not implemented yet. */
805 abort ();
807 start_sequence ();
808 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
809 1, from, from_mode);
810 insns = get_insns ();
811 end_sequence ();
812 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
813 from));
814 return;
817 /* Now both modes are integers. */
819 /* Handle expanding beyond a word. */
820 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
821 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
823 rtx insns;
824 rtx lowpart;
825 rtx fill_value;
826 rtx lowfrom;
827 int i;
828 enum machine_mode lowpart_mode;
829 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
831 /* Try converting directly if the insn is supported. */
832 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
833 != CODE_FOR_nothing)
835 /* If FROM is a SUBREG, put it into a register. Do this
836 so that we always generate the same set of insns for
837 better cse'ing; if an intermediate assignment occurred,
838 we won't be doing the operation directly on the SUBREG. */
839 if (optimize > 0 && GET_CODE (from) == SUBREG)
840 from = force_reg (from_mode, from);
841 emit_unop_insn (code, to, from, equiv_code);
842 return;
844 /* Next, try converting via full word. */
845 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
846 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
847 != CODE_FOR_nothing))
849 if (GET_CODE (to) == REG)
850 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
851 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
852 emit_unop_insn (code, to,
853 gen_lowpart (word_mode, to), equiv_code);
854 return;
857 /* No special multiword conversion insn; do it by hand. */
858 start_sequence ();
860 /* Since we will turn this into a no conflict block, we must ensure
861 that the source does not overlap the target. */
863 if (reg_overlap_mentioned_p (to, from))
864 from = force_reg (from_mode, from);
866 /* Get a copy of FROM widened to a word, if necessary. */
867 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
868 lowpart_mode = word_mode;
869 else
870 lowpart_mode = from_mode;
872 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
874 lowpart = gen_lowpart (lowpart_mode, to);
875 emit_move_insn (lowpart, lowfrom);
877 /* Compute the value to put in each remaining word. */
878 if (unsignedp)
879 fill_value = const0_rtx;
880 else
882 #ifdef HAVE_slt
883 if (HAVE_slt
884 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
885 && STORE_FLAG_VALUE == -1)
887 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
888 lowpart_mode, 0);
889 fill_value = gen_reg_rtx (word_mode);
890 emit_insn (gen_slt (fill_value));
892 else
893 #endif
895 fill_value
896 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
897 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
898 NULL_RTX, 0);
899 fill_value = convert_to_mode (word_mode, fill_value, 1);
903 /* Fill the remaining words. */
904 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
906 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
907 rtx subword = operand_subword (to, index, 1, to_mode);
909 if (subword == 0)
910 abort ();
912 if (fill_value != subword)
913 emit_move_insn (subword, fill_value);
916 insns = get_insns ();
917 end_sequence ();
919 emit_no_conflict_block (insns, to, from, NULL_RTX,
920 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
921 return;
924 /* Truncating multi-word to a word or less. */
925 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
926 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
928 if (!((GET_CODE (from) == MEM
929 && ! MEM_VOLATILE_P (from)
930 && direct_load[(int) to_mode]
931 && ! mode_dependent_address_p (XEXP (from, 0)))
932 || GET_CODE (from) == REG
933 || GET_CODE (from) == SUBREG))
934 from = force_reg (from_mode, from);
935 convert_move (to, gen_lowpart (word_mode, from), 0);
936 return;
939 /* Handle pointer conversion. */ /* SPEE 900220. */
940 if (to_mode == PQImode)
942 if (from_mode != QImode)
943 from = convert_to_mode (QImode, from, unsignedp);
945 #ifdef HAVE_truncqipqi2
946 if (HAVE_truncqipqi2)
948 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
949 return;
951 #endif /* HAVE_truncqipqi2 */
952 abort ();
955 if (from_mode == PQImode)
957 if (to_mode != QImode)
959 from = convert_to_mode (QImode, from, unsignedp);
960 from_mode = QImode;
962 else
964 #ifdef HAVE_extendpqiqi2
965 if (HAVE_extendpqiqi2)
967 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
968 return;
970 #endif /* HAVE_extendpqiqi2 */
971 abort ();
975 if (to_mode == PSImode)
977 if (from_mode != SImode)
978 from = convert_to_mode (SImode, from, unsignedp);
980 #ifdef HAVE_truncsipsi2
981 if (HAVE_truncsipsi2)
983 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
984 return;
986 #endif /* HAVE_truncsipsi2 */
987 abort ();
990 if (from_mode == PSImode)
992 if (to_mode != SImode)
994 from = convert_to_mode (SImode, from, unsignedp);
995 from_mode = SImode;
997 else
999 #ifdef HAVE_extendpsisi2
1000 if (! unsignedp && HAVE_extendpsisi2)
1002 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1003 return;
1005 #endif /* HAVE_extendpsisi2 */
1006 #ifdef HAVE_zero_extendpsisi2
1007 if (unsignedp && HAVE_zero_extendpsisi2)
1009 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1010 return;
1012 #endif /* HAVE_zero_extendpsisi2 */
1013 abort ();
1017 if (to_mode == PDImode)
1019 if (from_mode != DImode)
1020 from = convert_to_mode (DImode, from, unsignedp);
1022 #ifdef HAVE_truncdipdi2
1023 if (HAVE_truncdipdi2)
1025 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1026 return;
1028 #endif /* HAVE_truncdipdi2 */
1029 abort ();
1032 if (from_mode == PDImode)
1034 if (to_mode != DImode)
1036 from = convert_to_mode (DImode, from, unsignedp);
1037 from_mode = DImode;
1039 else
1041 #ifdef HAVE_extendpdidi2
1042 if (HAVE_extendpdidi2)
1044 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1045 return;
1047 #endif /* HAVE_extendpdidi2 */
1048 abort ();
1052 /* Now follow all the conversions between integers
1053 no more than a word long. */
1055 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1056 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1057 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1058 GET_MODE_BITSIZE (from_mode)))
1060 if (!((GET_CODE (from) == MEM
1061 && ! MEM_VOLATILE_P (from)
1062 && direct_load[(int) to_mode]
1063 && ! mode_dependent_address_p (XEXP (from, 0)))
1064 || GET_CODE (from) == REG
1065 || GET_CODE (from) == SUBREG))
1066 from = force_reg (from_mode, from);
1067 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1068 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1069 from = copy_to_reg (from);
1070 emit_move_insn (to, gen_lowpart (to_mode, from));
1071 return;
1074 /* Handle extension. */
1075 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1077 /* Convert directly if that works. */
1078 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1079 != CODE_FOR_nothing)
1081 if (flag_force_mem)
1082 from = force_not_mem (from);
1084 emit_unop_insn (code, to, from, equiv_code);
1085 return;
1087 else
1089 enum machine_mode intermediate;
1090 rtx tmp;
1091 tree shift_amount;
1093 /* Search for a mode to convert via. */
1094 for (intermediate = from_mode; intermediate != VOIDmode;
1095 intermediate = GET_MODE_WIDER_MODE (intermediate))
1096 if (((can_extend_p (to_mode, intermediate, unsignedp)
1097 != CODE_FOR_nothing)
1098 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1099 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1100 GET_MODE_BITSIZE (intermediate))))
1101 && (can_extend_p (intermediate, from_mode, unsignedp)
1102 != CODE_FOR_nothing))
1104 convert_move (to, convert_to_mode (intermediate, from,
1105 unsignedp), unsignedp);
1106 return;
1109 /* No suitable intermediate mode.
1110 Generate what we need with shifts. */
1111 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1112 - GET_MODE_BITSIZE (from_mode), 0);
1113 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1114 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1115 to, unsignedp);
1116 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1117 to, unsignedp);
1118 if (tmp != to)
1119 emit_move_insn (to, tmp);
1120 return;
1124 /* Support special truncate insns for certain modes. */
1126 if (from_mode == DImode && to_mode == SImode)
1128 #ifdef HAVE_truncdisi2
1129 if (HAVE_truncdisi2)
1131 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1132 return;
1134 #endif
1135 convert_move (to, force_reg (from_mode, from), unsignedp);
1136 return;
1139 if (from_mode == DImode && to_mode == HImode)
1141 #ifdef HAVE_truncdihi2
1142 if (HAVE_truncdihi2)
1144 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1145 return;
1147 #endif
1148 convert_move (to, force_reg (from_mode, from), unsignedp);
1149 return;
1152 if (from_mode == DImode && to_mode == QImode)
1154 #ifdef HAVE_truncdiqi2
1155 if (HAVE_truncdiqi2)
1157 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1158 return;
1160 #endif
1161 convert_move (to, force_reg (from_mode, from), unsignedp);
1162 return;
1165 if (from_mode == SImode && to_mode == HImode)
1167 #ifdef HAVE_truncsihi2
1168 if (HAVE_truncsihi2)
1170 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1171 return;
1173 #endif
1174 convert_move (to, force_reg (from_mode, from), unsignedp);
1175 return;
1178 if (from_mode == SImode && to_mode == QImode)
1180 #ifdef HAVE_truncsiqi2
1181 if (HAVE_truncsiqi2)
1183 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1184 return;
1186 #endif
1187 convert_move (to, force_reg (from_mode, from), unsignedp);
1188 return;
1191 if (from_mode == HImode && to_mode == QImode)
1193 #ifdef HAVE_trunchiqi2
1194 if (HAVE_trunchiqi2)
1196 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1197 return;
1199 #endif
1200 convert_move (to, force_reg (from_mode, from), unsignedp);
1201 return;
1204 if (from_mode == TImode && to_mode == DImode)
1206 #ifdef HAVE_trunctidi2
1207 if (HAVE_trunctidi2)
1209 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1210 return;
1212 #endif
1213 convert_move (to, force_reg (from_mode, from), unsignedp);
1214 return;
1217 if (from_mode == TImode && to_mode == SImode)
1219 #ifdef HAVE_trunctisi2
1220 if (HAVE_trunctisi2)
1222 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1223 return;
1225 #endif
1226 convert_move (to, force_reg (from_mode, from), unsignedp);
1227 return;
1230 if (from_mode == TImode && to_mode == HImode)
1232 #ifdef HAVE_trunctihi2
1233 if (HAVE_trunctihi2)
1235 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1236 return;
1238 #endif
1239 convert_move (to, force_reg (from_mode, from), unsignedp);
1240 return;
1243 if (from_mode == TImode && to_mode == QImode)
1245 #ifdef HAVE_trunctiqi2
1246 if (HAVE_trunctiqi2)
1248 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1249 return;
1251 #endif
1252 convert_move (to, force_reg (from_mode, from), unsignedp);
1253 return;
1256 /* Handle truncation of volatile memrefs, and so on;
1257 the things that couldn't be truncated directly,
1258 and for which there was no special instruction. */
1259 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1261 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1262 emit_move_insn (to, temp);
1263 return;
1266 /* Mode combination is not recognized. */
1267 abort ();
1270 /* Return an rtx for a value that would result
1271 from converting X to mode MODE.
1272 Both X and MODE may be floating, or both integer.
1273 UNSIGNEDP is nonzero if X is an unsigned value.
1274 This can be done by referring to a part of X in place
1275 or by copying to a new temporary with conversion.
1277 This function *must not* call protect_from_queue
1278 except when putting X into an insn (in which case convert_move does it). */
1281 convert_to_mode (mode, x, unsignedp)
1282 enum machine_mode mode;
1283 rtx x;
1284 int unsignedp;
1286 return convert_modes (mode, VOIDmode, x, unsignedp);
1289 /* Return an rtx for a value that would result
1290 from converting X from mode OLDMODE to mode MODE.
1291 Both modes may be floating, or both integer.
1292 UNSIGNEDP is nonzero if X is an unsigned value.
1294 This can be done by referring to a part of X in place
1295 or by copying to a new temporary with conversion.
1297 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1299 This function *must not* call protect_from_queue
1300 except when putting X into an insn (in which case convert_move does it). */
1303 convert_modes (mode, oldmode, x, unsignedp)
1304 enum machine_mode mode, oldmode;
1305 rtx x;
1306 int unsignedp;
1308 rtx temp;
1310 /* If FROM is a SUBREG that indicates that we have already done at least
1311 the required extension, strip it. */
1313 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1314 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1315 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1316 x = gen_lowpart (mode, x);
1318 if (GET_MODE (x) != VOIDmode)
1319 oldmode = GET_MODE (x);
1321 if (mode == oldmode)
1322 return x;
1324 /* There is one case that we must handle specially: If we are converting
1325 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1326 we are to interpret the constant as unsigned, gen_lowpart will do
1327 the wrong if the constant appears negative. What we want to do is
1328 make the high-order word of the constant zero, not all ones. */
1330 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1331 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1332 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1334 HOST_WIDE_INT val = INTVAL (x);
1336 if (oldmode != VOIDmode
1337 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1339 int width = GET_MODE_BITSIZE (oldmode);
1341 /* We need to zero extend VAL. */
1342 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1345 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1348 /* We can do this with a gen_lowpart if both desired and current modes
1349 are integer, and this is either a constant integer, a register, or a
1350 non-volatile MEM. Except for the constant case where MODE is no
1351 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1353 if ((GET_CODE (x) == CONST_INT
1354 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1355 || (GET_MODE_CLASS (mode) == MODE_INT
1356 && GET_MODE_CLASS (oldmode) == MODE_INT
1357 && (GET_CODE (x) == CONST_DOUBLE
1358 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1359 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1360 && direct_load[(int) mode])
1361 || (GET_CODE (x) == REG
1362 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1363 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1365 /* ?? If we don't know OLDMODE, we have to assume here that
1366 X does not need sign- or zero-extension. This may not be
1367 the case, but it's the best we can do. */
1368 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1369 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1371 HOST_WIDE_INT val = INTVAL (x);
1372 int width = GET_MODE_BITSIZE (oldmode);
1374 /* We must sign or zero-extend in this case. Start by
1375 zero-extending, then sign extend if we need to. */
1376 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1377 if (! unsignedp
1378 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1379 val |= (HOST_WIDE_INT) (-1) << width;
1381 return gen_int_mode (val, mode);
1384 return gen_lowpart (mode, x);
1387 temp = gen_reg_rtx (mode);
1388 convert_move (temp, x, unsignedp);
1389 return temp;
1392 /* This macro is used to determine what the largest unit size that
1393 move_by_pieces can use is. */
1395 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1396 move efficiently, as opposed to MOVE_MAX which is the maximum
1397 number of bytes we can move with a single instruction. */
1399 #ifndef MOVE_MAX_PIECES
1400 #define MOVE_MAX_PIECES MOVE_MAX
1401 #endif
1403 /* Generate several move instructions to copy LEN bytes from block FROM to
1404 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1405 and TO through protect_from_queue before calling.
1407 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1408 used to push FROM to the stack.
1410 ALIGN is maximum alignment we can assume. */
1412 void
1413 move_by_pieces (to, from, len, align)
1414 rtx to, from;
1415 unsigned HOST_WIDE_INT len;
1416 unsigned int align;
1418 struct move_by_pieces data;
1419 rtx to_addr, from_addr = XEXP (from, 0);
1420 unsigned int max_size = MOVE_MAX_PIECES + 1;
1421 enum machine_mode mode = VOIDmode, tmode;
1422 enum insn_code icode;
1424 data.offset = 0;
1425 data.from_addr = from_addr;
1426 if (to)
1428 to_addr = XEXP (to, 0);
1429 data.to = to;
1430 data.autinc_to
1431 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1432 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1433 data.reverse
1434 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1436 else
1438 to_addr = NULL_RTX;
1439 data.to = NULL_RTX;
1440 data.autinc_to = 1;
1441 #ifdef STACK_GROWS_DOWNWARD
1442 data.reverse = 1;
1443 #else
1444 data.reverse = 0;
1445 #endif
1447 data.to_addr = to_addr;
1448 data.from = from;
1449 data.autinc_from
1450 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1451 || GET_CODE (from_addr) == POST_INC
1452 || GET_CODE (from_addr) == POST_DEC);
1454 data.explicit_inc_from = 0;
1455 data.explicit_inc_to = 0;
1456 if (data.reverse) data.offset = len;
1457 data.len = len;
1459 /* If copying requires more than two move insns,
1460 copy addresses to registers (to make displacements shorter)
1461 and use post-increment if available. */
1462 if (!(data.autinc_from && data.autinc_to)
1463 && move_by_pieces_ninsns (len, align) > 2)
1465 /* Find the mode of the largest move... */
1466 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1467 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1468 if (GET_MODE_SIZE (tmode) < max_size)
1469 mode = tmode;
1471 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1473 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1474 data.autinc_from = 1;
1475 data.explicit_inc_from = -1;
1477 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1479 data.from_addr = copy_addr_to_reg (from_addr);
1480 data.autinc_from = 1;
1481 data.explicit_inc_from = 1;
1483 if (!data.autinc_from && CONSTANT_P (from_addr))
1484 data.from_addr = copy_addr_to_reg (from_addr);
1485 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1487 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1488 data.autinc_to = 1;
1489 data.explicit_inc_to = -1;
1491 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1493 data.to_addr = copy_addr_to_reg (to_addr);
1494 data.autinc_to = 1;
1495 data.explicit_inc_to = 1;
1497 if (!data.autinc_to && CONSTANT_P (to_addr))
1498 data.to_addr = copy_addr_to_reg (to_addr);
1501 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1502 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1503 align = MOVE_MAX * BITS_PER_UNIT;
1505 /* First move what we can in the largest integer mode, then go to
1506 successively smaller modes. */
1508 while (max_size > 1)
1510 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1511 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1512 if (GET_MODE_SIZE (tmode) < max_size)
1513 mode = tmode;
1515 if (mode == VOIDmode)
1516 break;
1518 icode = mov_optab->handlers[(int) mode].insn_code;
1519 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1520 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1522 max_size = GET_MODE_SIZE (mode);
1525 /* The code above should have handled everything. */
1526 if (data.len > 0)
1527 abort ();
1530 /* Return number of insns required to move L bytes by pieces.
1531 ALIGN (in bits) is maximum alignment we can assume. */
1533 static unsigned HOST_WIDE_INT
1534 move_by_pieces_ninsns (l, align)
1535 unsigned HOST_WIDE_INT l;
1536 unsigned int align;
1538 unsigned HOST_WIDE_INT n_insns = 0;
1539 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1541 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1542 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1543 align = MOVE_MAX * BITS_PER_UNIT;
1545 while (max_size > 1)
1547 enum machine_mode mode = VOIDmode, tmode;
1548 enum insn_code icode;
1550 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1551 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1552 if (GET_MODE_SIZE (tmode) < max_size)
1553 mode = tmode;
1555 if (mode == VOIDmode)
1556 break;
1558 icode = mov_optab->handlers[(int) mode].insn_code;
1559 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1560 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1562 max_size = GET_MODE_SIZE (mode);
1565 if (l)
1566 abort ();
1567 return n_insns;
1570 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1571 with move instructions for mode MODE. GENFUN is the gen_... function
1572 to make a move insn for that mode. DATA has all the other info. */
1574 static void
1575 move_by_pieces_1 (genfun, mode, data)
1576 rtx (*genfun) PARAMS ((rtx, ...));
1577 enum machine_mode mode;
1578 struct move_by_pieces *data;
1580 unsigned int size = GET_MODE_SIZE (mode);
1581 rtx to1 = NULL_RTX, from1;
1583 while (data->len >= size)
1585 if (data->reverse)
1586 data->offset -= size;
1588 if (data->to)
1590 if (data->autinc_to)
1591 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1592 data->offset);
1593 else
1594 to1 = adjust_address (data->to, mode, data->offset);
1597 if (data->autinc_from)
1598 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1599 data->offset);
1600 else
1601 from1 = adjust_address (data->from, mode, data->offset);
1603 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1604 emit_insn (gen_add2_insn (data->to_addr,
1605 GEN_INT (-(HOST_WIDE_INT)size)));
1606 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1607 emit_insn (gen_add2_insn (data->from_addr,
1608 GEN_INT (-(HOST_WIDE_INT)size)));
1610 if (data->to)
1611 emit_insn ((*genfun) (to1, from1));
1612 else
1614 #ifdef PUSH_ROUNDING
1615 emit_single_push_insn (mode, from1, NULL);
1616 #else
1617 abort ();
1618 #endif
1621 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1622 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1623 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1624 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1626 if (! data->reverse)
1627 data->offset += size;
1629 data->len -= size;
1633 /* Emit code to move a block Y to a block X.
1634 This may be done with string-move instructions,
1635 with multiple scalar move instructions, or with a library call.
1637 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1638 with mode BLKmode.
1639 SIZE is an rtx that says how long they are.
1640 ALIGN is the maximum alignment we can assume they have.
1642 Return the address of the new block, if memcpy is called and returns it,
1643 0 otherwise. */
1646 emit_block_move (x, y, size)
1647 rtx x, y;
1648 rtx size;
1650 rtx retval = 0;
1651 #ifdef TARGET_MEM_FUNCTIONS
1652 static tree fn;
1653 tree call_expr, arg_list;
1654 #endif
1655 unsigned int align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1657 if (GET_MODE (x) != BLKmode)
1658 abort ();
1660 if (GET_MODE (y) != BLKmode)
1661 abort ();
1663 x = protect_from_queue (x, 1);
1664 y = protect_from_queue (y, 0);
1665 size = protect_from_queue (size, 0);
1667 if (GET_CODE (x) != MEM)
1668 abort ();
1669 if (GET_CODE (y) != MEM)
1670 abort ();
1671 if (size == 0)
1672 abort ();
1674 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1675 move_by_pieces (x, y, INTVAL (size), align);
1676 else
1678 /* Try the most limited insn first, because there's no point
1679 including more than one in the machine description unless
1680 the more limited one has some advantage. */
1682 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1683 enum machine_mode mode;
1685 /* Since this is a move insn, we don't care about volatility. */
1686 volatile_ok = 1;
1688 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1689 mode = GET_MODE_WIDER_MODE (mode))
1691 enum insn_code code = movstr_optab[(int) mode];
1692 insn_operand_predicate_fn pred;
1694 if (code != CODE_FOR_nothing
1695 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1696 here because if SIZE is less than the mode mask, as it is
1697 returned by the macro, it will definitely be less than the
1698 actual mode mask. */
1699 && ((GET_CODE (size) == CONST_INT
1700 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1701 <= (GET_MODE_MASK (mode) >> 1)))
1702 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1703 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1704 || (*pred) (x, BLKmode))
1705 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1706 || (*pred) (y, BLKmode))
1707 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1708 || (*pred) (opalign, VOIDmode)))
1710 rtx op2;
1711 rtx last = get_last_insn ();
1712 rtx pat;
1714 op2 = convert_to_mode (mode, size, 1);
1715 pred = insn_data[(int) code].operand[2].predicate;
1716 if (pred != 0 && ! (*pred) (op2, mode))
1717 op2 = copy_to_mode_reg (mode, op2);
1719 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1720 if (pat)
1722 emit_insn (pat);
1723 volatile_ok = 0;
1724 return 0;
1726 else
1727 delete_insns_since (last);
1731 volatile_ok = 0;
1733 /* X, Y, or SIZE may have been passed through protect_from_queue.
1735 It is unsafe to save the value generated by protect_from_queue
1736 and reuse it later. Consider what happens if emit_queue is
1737 called before the return value from protect_from_queue is used.
1739 Expansion of the CALL_EXPR below will call emit_queue before
1740 we are finished emitting RTL for argument setup. So if we are
1741 not careful we could get the wrong value for an argument.
1743 To avoid this problem we go ahead and emit code to copy X, Y &
1744 SIZE into new pseudos. We can then place those new pseudos
1745 into an RTL_EXPR and use them later, even after a call to
1746 emit_queue.
1748 Note this is not strictly needed for library calls since they
1749 do not call emit_queue before loading their arguments. However,
1750 we may need to have library calls call emit_queue in the future
1751 since failing to do so could cause problems for targets which
1752 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1753 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1754 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1756 #ifdef TARGET_MEM_FUNCTIONS
1757 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1758 #else
1759 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1760 TREE_UNSIGNED (integer_type_node));
1761 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1762 #endif
1764 #ifdef TARGET_MEM_FUNCTIONS
1765 /* It is incorrect to use the libcall calling conventions to call
1766 memcpy in this context.
1768 This could be a user call to memcpy and the user may wish to
1769 examine the return value from memcpy.
1771 For targets where libcalls and normal calls have different conventions
1772 for returning pointers, we could end up generating incorrect code.
1774 So instead of using a libcall sequence we build up a suitable
1775 CALL_EXPR and expand the call in the normal fashion. */
1776 if (fn == NULL_TREE)
1778 tree fntype;
1780 /* This was copied from except.c, I don't know if all this is
1781 necessary in this context or not. */
1782 fn = get_identifier ("memcpy");
1783 fntype = build_pointer_type (void_type_node);
1784 fntype = build_function_type (fntype, NULL_TREE);
1785 fn = build_decl (FUNCTION_DECL, fn, fntype);
1786 ggc_add_tree_root (&fn, 1);
1787 DECL_EXTERNAL (fn) = 1;
1788 TREE_PUBLIC (fn) = 1;
1789 DECL_ARTIFICIAL (fn) = 1;
1790 TREE_NOTHROW (fn) = 1;
1791 make_decl_rtl (fn, NULL);
1792 assemble_external (fn);
1795 /* We need to make an argument list for the function call.
1797 memcpy has three arguments, the first two are void * addresses and
1798 the last is a size_t byte count for the copy. */
1799 arg_list
1800 = build_tree_list (NULL_TREE,
1801 make_tree (build_pointer_type (void_type_node), x));
1802 TREE_CHAIN (arg_list)
1803 = build_tree_list (NULL_TREE,
1804 make_tree (build_pointer_type (void_type_node), y));
1805 TREE_CHAIN (TREE_CHAIN (arg_list))
1806 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1807 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1809 /* Now we have to build up the CALL_EXPR itself. */
1810 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1811 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1812 call_expr, arg_list, NULL_TREE);
1813 TREE_SIDE_EFFECTS (call_expr) = 1;
1815 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1816 #else
1817 emit_library_call (bcopy_libfunc, LCT_NORMAL,
1818 VOIDmode, 3, y, Pmode, x, Pmode,
1819 convert_to_mode (TYPE_MODE (integer_type_node), size,
1820 TREE_UNSIGNED (integer_type_node)),
1821 TYPE_MODE (integer_type_node));
1822 #endif
1824 /* If we are initializing a readonly value, show the above call
1825 clobbered it. Otherwise, a load from it may erroneously be hoisted
1826 from a loop. */
1827 if (RTX_UNCHANGING_P (x))
1828 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
1831 return retval;
1834 /* Copy all or part of a value X into registers starting at REGNO.
1835 The number of registers to be filled is NREGS. */
1837 void
1838 move_block_to_reg (regno, x, nregs, mode)
1839 int regno;
1840 rtx x;
1841 int nregs;
1842 enum machine_mode mode;
1844 int i;
1845 #ifdef HAVE_load_multiple
1846 rtx pat;
1847 rtx last;
1848 #endif
1850 if (nregs == 0)
1851 return;
1853 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1854 x = validize_mem (force_const_mem (mode, x));
1856 /* See if the machine can do this with a load multiple insn. */
1857 #ifdef HAVE_load_multiple
1858 if (HAVE_load_multiple)
1860 last = get_last_insn ();
1861 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1862 GEN_INT (nregs));
1863 if (pat)
1865 emit_insn (pat);
1866 return;
1868 else
1869 delete_insns_since (last);
1871 #endif
1873 for (i = 0; i < nregs; i++)
1874 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1875 operand_subword_force (x, i, mode));
1878 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1879 The number of registers to be filled is NREGS. SIZE indicates the number
1880 of bytes in the object X. */
1882 void
1883 move_block_from_reg (regno, x, nregs, size)
1884 int regno;
1885 rtx x;
1886 int nregs;
1887 int size;
1889 int i;
1890 #ifdef HAVE_store_multiple
1891 rtx pat;
1892 rtx last;
1893 #endif
1894 enum machine_mode mode;
1896 if (nregs == 0)
1897 return;
1899 /* If SIZE is that of a mode no bigger than a word, just use that
1900 mode's store operation. */
1901 if (size <= UNITS_PER_WORD
1902 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode
1903 && !FUNCTION_ARG_REG_LITTLE_ENDIAN)
1905 emit_move_insn (adjust_address (x, mode, 0), gen_rtx_REG (mode, regno));
1906 return;
1909 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1910 to the left before storing to memory. Note that the previous test
1911 doesn't handle all cases (e.g. SIZE == 3). */
1912 if (size < UNITS_PER_WORD
1913 && BYTES_BIG_ENDIAN
1914 && !FUNCTION_ARG_REG_LITTLE_ENDIAN)
1916 rtx tem = operand_subword (x, 0, 1, BLKmode);
1917 rtx shift;
1919 if (tem == 0)
1920 abort ();
1922 shift = expand_shift (LSHIFT_EXPR, word_mode,
1923 gen_rtx_REG (word_mode, regno),
1924 build_int_2 ((UNITS_PER_WORD - size)
1925 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1926 emit_move_insn (tem, shift);
1927 return;
1930 /* See if the machine can do this with a store multiple insn. */
1931 #ifdef HAVE_store_multiple
1932 if (HAVE_store_multiple)
1934 last = get_last_insn ();
1935 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1936 GEN_INT (nregs));
1937 if (pat)
1939 emit_insn (pat);
1940 return;
1942 else
1943 delete_insns_since (last);
1945 #endif
1947 for (i = 0; i < nregs; i++)
1949 rtx tem = operand_subword (x, i, 1, BLKmode);
1951 if (tem == 0)
1952 abort ();
1954 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1958 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1959 registers represented by a PARALLEL. SSIZE represents the total size of
1960 block SRC in bytes, or -1 if not known. */
1961 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatant assumption that
1962 the balance will be in what would be the low-order memory addresses, i.e.
1963 left justified for big endian, right justified for little endian. This
1964 happens to be true for the targets currently using this support. If this
1965 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1966 would be needed. */
1968 void
1969 emit_group_load (dst, orig_src, ssize)
1970 rtx dst, orig_src;
1971 int ssize;
1973 rtx *tmps, src;
1974 int start, i;
1976 if (GET_CODE (dst) != PARALLEL)
1977 abort ();
1979 /* Check for a NULL entry, used to indicate that the parameter goes
1980 both on the stack and in registers. */
1981 if (XEXP (XVECEXP (dst, 0, 0), 0))
1982 start = 0;
1983 else
1984 start = 1;
1986 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
1988 /* Process the pieces. */
1989 for (i = start; i < XVECLEN (dst, 0); i++)
1991 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1992 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1993 unsigned int bytelen = GET_MODE_SIZE (mode);
1994 int shift = 0;
1996 /* Handle trailing fragments that run over the size of the struct. */
1997 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1999 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2000 bytelen = ssize - bytepos;
2001 if (bytelen <= 0)
2002 abort ();
2005 /* If we won't be loading directly from memory, protect the real source
2006 from strange tricks we might play; but make sure that the source can
2007 be loaded directly into the destination. */
2008 src = orig_src;
2009 if (GET_CODE (orig_src) != MEM
2010 && (!CONSTANT_P (orig_src)
2011 || (GET_MODE (orig_src) != mode
2012 && GET_MODE (orig_src) != VOIDmode)))
2014 if (GET_MODE (orig_src) == VOIDmode)
2015 src = gen_reg_rtx (mode);
2016 else
2017 src = gen_reg_rtx (GET_MODE (orig_src));
2019 emit_move_insn (src, orig_src);
2022 /* Optimize the access just a bit. */
2023 if (GET_CODE (src) == MEM
2024 && MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode)
2025 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2026 && bytelen == GET_MODE_SIZE (mode))
2028 tmps[i] = gen_reg_rtx (mode);
2029 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2031 else if (GET_CODE (src) == CONCAT)
2033 if ((bytepos == 0
2034 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
2035 || (bytepos == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
2036 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1)))))
2038 tmps[i] = XEXP (src, bytepos != 0);
2039 if (! CONSTANT_P (tmps[i])
2040 && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
2041 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
2042 0, 1, NULL_RTX, mode, mode, ssize);
2044 else if (bytepos == 0)
2046 rtx mem = assign_stack_temp (GET_MODE (src),
2047 GET_MODE_SIZE (GET_MODE (src)), 0);
2048 emit_move_insn (mem, src);
2049 tmps[i] = adjust_address (mem, mode, 0);
2051 else
2052 abort ();
2054 else if (CONSTANT_P (src)
2055 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2056 tmps[i] = src;
2057 else
2058 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2059 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2060 mode, mode, ssize);
2062 if (BYTES_BIG_ENDIAN && shift)
2063 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2064 tmps[i], 0, OPTAB_WIDEN);
2067 emit_queue ();
2069 /* Copy the extracted pieces into the proper (probable) hard regs. */
2070 for (i = start; i < XVECLEN (dst, 0); i++)
2071 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2074 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2075 registers represented by a PARALLEL. SSIZE represents the total size of
2076 block DST, or -1 if not known. */
2078 void
2079 emit_group_store (orig_dst, src, ssize)
2080 rtx orig_dst, src;
2081 int ssize;
2083 rtx *tmps, dst;
2084 int start, i;
2086 if (GET_CODE (src) != PARALLEL)
2087 abort ();
2089 /* Check for a NULL entry, used to indicate that the parameter goes
2090 both on the stack and in registers. */
2091 if (XEXP (XVECEXP (src, 0, 0), 0))
2092 start = 0;
2093 else
2094 start = 1;
2096 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2098 /* Copy the (probable) hard regs into pseudos. */
2099 for (i = start; i < XVECLEN (src, 0); i++)
2101 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2102 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2103 emit_move_insn (tmps[i], reg);
2105 emit_queue ();
2107 /* If we won't be storing directly into memory, protect the real destination
2108 from strange tricks we might play. */
2109 dst = orig_dst;
2110 if (GET_CODE (dst) == PARALLEL)
2112 rtx temp;
2114 /* We can get a PARALLEL dst if there is a conditional expression in
2115 a return statement. In that case, the dst and src are the same,
2116 so no action is necessary. */
2117 if (rtx_equal_p (dst, src))
2118 return;
2120 /* It is unclear if we can ever reach here, but we may as well handle
2121 it. Allocate a temporary, and split this into a store/load to/from
2122 the temporary. */
2124 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2125 emit_group_store (temp, src, ssize);
2126 emit_group_load (dst, temp, ssize);
2127 return;
2129 else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
2131 dst = gen_reg_rtx (GET_MODE (orig_dst));
2132 /* Make life a bit easier for combine. */
2133 emit_move_insn (dst, const0_rtx);
2136 /* Process the pieces. */
2137 for (i = start; i < XVECLEN (src, 0); i++)
2139 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2140 enum machine_mode mode = GET_MODE (tmps[i]);
2141 unsigned int bytelen = GET_MODE_SIZE (mode);
2142 rtx dest = dst;
2144 /* Handle trailing fragments that run over the size of the struct. */
2145 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2147 if (BYTES_BIG_ENDIAN)
2149 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2150 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2151 tmps[i], 0, OPTAB_WIDEN);
2153 bytelen = ssize - bytepos;
2156 if (GET_CODE (dst) == CONCAT)
2158 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2159 dest = XEXP (dst, 0);
2160 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2162 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2163 dest = XEXP (dst, 1);
2165 else
2166 abort ();
2169 /* Optimize the access just a bit. */
2170 if (GET_CODE (dest) == MEM
2171 && MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode)
2172 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2173 && bytelen == GET_MODE_SIZE (mode))
2174 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2175 else
2176 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2177 mode, tmps[i], ssize);
2180 emit_queue ();
2182 /* Copy from the pseudo into the (probable) hard reg. */
2183 if (GET_CODE (dst) == REG)
2184 emit_move_insn (orig_dst, dst);
2187 /* Generate code to copy a BLKmode object of TYPE out of a
2188 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2189 is null, a stack temporary is created. TGTBLK is returned.
2191 The primary purpose of this routine is to handle functions
2192 that return BLKmode structures in registers. Some machines
2193 (the PA for example) want to return all small structures
2194 in registers regardless of the structure's alignment. */
2197 copy_blkmode_from_reg (tgtblk, srcreg, type)
2198 rtx tgtblk;
2199 rtx srcreg;
2200 tree type;
2202 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2203 rtx src = NULL, dst = NULL;
2204 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2205 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2207 if (tgtblk == 0)
2209 tgtblk = assign_temp (build_qualified_type (type,
2210 (TYPE_QUALS (type)
2211 | TYPE_QUAL_CONST)),
2212 0, 1, 1);
2213 preserve_temp_slots (tgtblk);
2216 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2217 into a new pseudo which is a full word.
2219 If FUNCTION_ARG_REG_LITTLE_ENDIAN is set and convert_to_mode does a copy,
2220 the wrong part of the register gets copied so we fake a type conversion
2221 in place. */
2222 if (GET_MODE (srcreg) != BLKmode
2223 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2225 if (FUNCTION_ARG_REG_LITTLE_ENDIAN)
2226 srcreg = simplify_gen_subreg (word_mode, srcreg, GET_MODE (srcreg), 0);
2227 else
2228 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2231 /* Structures whose size is not a multiple of a word are aligned
2232 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2233 machine, this means we must skip the empty high order bytes when
2234 calculating the bit offset. */
2235 if (BYTES_BIG_ENDIAN
2236 && !FUNCTION_ARG_REG_LITTLE_ENDIAN
2237 && bytes % UNITS_PER_WORD)
2238 big_endian_correction
2239 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2241 /* Copy the structure BITSIZE bites at a time.
2243 We could probably emit more efficient code for machines which do not use
2244 strict alignment, but it doesn't seem worth the effort at the current
2245 time. */
2246 for (bitpos = 0, xbitpos = big_endian_correction;
2247 bitpos < bytes * BITS_PER_UNIT;
2248 bitpos += bitsize, xbitpos += bitsize)
2250 /* We need a new source operand each time xbitpos is on a
2251 word boundary and when xbitpos == big_endian_correction
2252 (the first time through). */
2253 if (xbitpos % BITS_PER_WORD == 0
2254 || xbitpos == big_endian_correction)
2255 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2256 GET_MODE (srcreg));
2258 /* We need a new destination operand each time bitpos is on
2259 a word boundary. */
2260 if (bitpos % BITS_PER_WORD == 0)
2261 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2263 /* Use xbitpos for the source extraction (right justified) and
2264 xbitpos for the destination store (left justified). */
2265 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2266 extract_bit_field (src, bitsize,
2267 xbitpos % BITS_PER_WORD, 1,
2268 NULL_RTX, word_mode, word_mode,
2269 BITS_PER_WORD),
2270 BITS_PER_WORD);
2273 return tgtblk;
2276 /* Add a USE expression for REG to the (possibly empty) list pointed
2277 to by CALL_FUSAGE. REG must denote a hard register. */
2279 void
2280 use_reg (call_fusage, reg)
2281 rtx *call_fusage, reg;
2283 if (GET_CODE (reg) != REG
2284 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2285 abort ();
2287 *call_fusage
2288 = gen_rtx_EXPR_LIST (VOIDmode,
2289 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2292 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2293 starting at REGNO. All of these registers must be hard registers. */
2295 void
2296 use_regs (call_fusage, regno, nregs)
2297 rtx *call_fusage;
2298 int regno;
2299 int nregs;
2301 int i;
2303 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2304 abort ();
2306 for (i = 0; i < nregs; i++)
2307 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2310 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2311 PARALLEL REGS. This is for calls that pass values in multiple
2312 non-contiguous locations. The Irix 6 ABI has examples of this. */
2314 void
2315 use_group_regs (call_fusage, regs)
2316 rtx *call_fusage;
2317 rtx regs;
2319 int i;
2321 for (i = 0; i < XVECLEN (regs, 0); i++)
2323 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2325 /* A NULL entry means the parameter goes both on the stack and in
2326 registers. This can also be a MEM for targets that pass values
2327 partially on the stack and partially in registers. */
2328 if (reg != 0 && GET_CODE (reg) == REG)
2329 use_reg (call_fusage, reg);
2335 can_store_by_pieces (len, constfun, constfundata, align)
2336 unsigned HOST_WIDE_INT len;
2337 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2338 PTR constfundata;
2339 unsigned int align;
2341 unsigned HOST_WIDE_INT max_size, l;
2342 HOST_WIDE_INT offset = 0;
2343 enum machine_mode mode, tmode;
2344 enum insn_code icode;
2345 int reverse;
2346 rtx cst;
2348 if (! MOVE_BY_PIECES_P (len, align))
2349 return 0;
2351 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2352 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2353 align = MOVE_MAX * BITS_PER_UNIT;
2355 /* We would first store what we can in the largest integer mode, then go to
2356 successively smaller modes. */
2358 for (reverse = 0;
2359 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2360 reverse++)
2362 l = len;
2363 mode = VOIDmode;
2364 max_size = MOVE_MAX_PIECES + 1;
2365 while (max_size > 1)
2367 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2368 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2369 if (GET_MODE_SIZE (tmode) < max_size)
2370 mode = tmode;
2372 if (mode == VOIDmode)
2373 break;
2375 icode = mov_optab->handlers[(int) mode].insn_code;
2376 if (icode != CODE_FOR_nothing
2377 && align >= GET_MODE_ALIGNMENT (mode))
2379 unsigned int size = GET_MODE_SIZE (mode);
2381 while (l >= size)
2383 if (reverse)
2384 offset -= size;
2386 cst = (*constfun) (constfundata, offset, mode);
2387 if (!LEGITIMATE_CONSTANT_P (cst))
2388 return 0;
2390 if (!reverse)
2391 offset += size;
2393 l -= size;
2397 max_size = GET_MODE_SIZE (mode);
2400 /* The code above should have handled everything. */
2401 if (l != 0)
2402 abort ();
2405 return 1;
2408 /* Generate several move instructions to store LEN bytes generated by
2409 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2410 pointer which will be passed as argument in every CONSTFUN call.
2411 ALIGN is maximum alignment we can assume. */
2413 void
2414 store_by_pieces (to, len, constfun, constfundata, align)
2415 rtx to;
2416 unsigned HOST_WIDE_INT len;
2417 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2418 PTR constfundata;
2419 unsigned int align;
2421 struct store_by_pieces data;
2423 if (! MOVE_BY_PIECES_P (len, align))
2424 abort ();
2425 to = protect_from_queue (to, 1);
2426 data.constfun = constfun;
2427 data.constfundata = constfundata;
2428 data.len = len;
2429 data.to = to;
2430 store_by_pieces_1 (&data, align);
2433 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2434 rtx with BLKmode). The caller must pass TO through protect_from_queue
2435 before calling. ALIGN is maximum alignment we can assume. */
2437 static void
2438 clear_by_pieces (to, len, align)
2439 rtx to;
2440 unsigned HOST_WIDE_INT len;
2441 unsigned int align;
2443 struct store_by_pieces data;
2445 data.constfun = clear_by_pieces_1;
2446 data.constfundata = NULL;
2447 data.len = len;
2448 data.to = to;
2449 store_by_pieces_1 (&data, align);
2452 /* Callback routine for clear_by_pieces.
2453 Return const0_rtx unconditionally. */
2455 static rtx
2456 clear_by_pieces_1 (data, offset, mode)
2457 PTR data ATTRIBUTE_UNUSED;
2458 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2459 enum machine_mode mode ATTRIBUTE_UNUSED;
2461 return const0_rtx;
2464 /* Subroutine of clear_by_pieces and store_by_pieces.
2465 Generate several move instructions to store LEN bytes of block TO. (A MEM
2466 rtx with BLKmode). The caller must pass TO through protect_from_queue
2467 before calling. ALIGN is maximum alignment we can assume. */
2469 static void
2470 store_by_pieces_1 (data, align)
2471 struct store_by_pieces *data;
2472 unsigned int align;
2474 rtx to_addr = XEXP (data->to, 0);
2475 unsigned HOST_WIDE_INT max_size = MOVE_MAX_PIECES + 1;
2476 enum machine_mode mode = VOIDmode, tmode;
2477 enum insn_code icode;
2479 data->offset = 0;
2480 data->to_addr = to_addr;
2481 data->autinc_to
2482 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2483 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2485 data->explicit_inc_to = 0;
2486 data->reverse
2487 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2488 if (data->reverse)
2489 data->offset = data->len;
2491 /* If storing requires more than two move insns,
2492 copy addresses to registers (to make displacements shorter)
2493 and use post-increment if available. */
2494 if (!data->autinc_to
2495 && move_by_pieces_ninsns (data->len, align) > 2)
2497 /* Determine the main mode we'll be using. */
2498 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2499 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2500 if (GET_MODE_SIZE (tmode) < max_size)
2501 mode = tmode;
2503 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2505 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2506 data->autinc_to = 1;
2507 data->explicit_inc_to = -1;
2510 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2511 && ! data->autinc_to)
2513 data->to_addr = copy_addr_to_reg (to_addr);
2514 data->autinc_to = 1;
2515 data->explicit_inc_to = 1;
2518 if ( !data->autinc_to && CONSTANT_P (to_addr))
2519 data->to_addr = copy_addr_to_reg (to_addr);
2522 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2523 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2524 align = MOVE_MAX * BITS_PER_UNIT;
2526 /* First store what we can in the largest integer mode, then go to
2527 successively smaller modes. */
2529 while (max_size > 1)
2531 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2532 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2533 if (GET_MODE_SIZE (tmode) < max_size)
2534 mode = tmode;
2536 if (mode == VOIDmode)
2537 break;
2539 icode = mov_optab->handlers[(int) mode].insn_code;
2540 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2541 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2543 max_size = GET_MODE_SIZE (mode);
2546 /* The code above should have handled everything. */
2547 if (data->len != 0)
2548 abort ();
2551 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2552 with move instructions for mode MODE. GENFUN is the gen_... function
2553 to make a move insn for that mode. DATA has all the other info. */
2555 static void
2556 store_by_pieces_2 (genfun, mode, data)
2557 rtx (*genfun) PARAMS ((rtx, ...));
2558 enum machine_mode mode;
2559 struct store_by_pieces *data;
2561 unsigned int size = GET_MODE_SIZE (mode);
2562 rtx to1, cst;
2564 while (data->len >= size)
2566 if (data->reverse)
2567 data->offset -= size;
2569 if (data->autinc_to)
2570 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2571 data->offset);
2572 else
2573 to1 = adjust_address (data->to, mode, data->offset);
2575 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2576 emit_insn (gen_add2_insn (data->to_addr,
2577 GEN_INT (-(HOST_WIDE_INT) size)));
2579 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2580 emit_insn ((*genfun) (to1, cst));
2582 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2583 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2585 if (! data->reverse)
2586 data->offset += size;
2588 data->len -= size;
2592 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2593 its length in bytes. */
2596 clear_storage (object, size)
2597 rtx object;
2598 rtx size;
2600 #ifdef TARGET_MEM_FUNCTIONS
2601 static tree fn;
2602 tree call_expr, arg_list;
2603 #endif
2604 rtx retval = 0;
2605 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2606 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2608 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2609 just move a zero. Otherwise, do this a piece at a time. */
2610 if (GET_MODE (object) != BLKmode
2611 && GET_CODE (size) == CONST_INT
2612 && GET_MODE_SIZE (GET_MODE (object)) == (unsigned int) INTVAL (size))
2613 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2614 else
2616 object = protect_from_queue (object, 1);
2617 size = protect_from_queue (size, 0);
2619 if (GET_CODE (size) == CONST_INT
2620 && MOVE_BY_PIECES_P (INTVAL (size), align))
2621 clear_by_pieces (object, INTVAL (size), align);
2622 else
2624 /* Try the most limited insn first, because there's no point
2625 including more than one in the machine description unless
2626 the more limited one has some advantage. */
2628 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2629 enum machine_mode mode;
2631 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2632 mode = GET_MODE_WIDER_MODE (mode))
2634 enum insn_code code = clrstr_optab[(int) mode];
2635 insn_operand_predicate_fn pred;
2637 if (code != CODE_FOR_nothing
2638 /* We don't need MODE to be narrower than
2639 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2640 the mode mask, as it is returned by the macro, it will
2641 definitely be less than the actual mode mask. */
2642 && ((GET_CODE (size) == CONST_INT
2643 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2644 <= (GET_MODE_MASK (mode) >> 1)))
2645 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2646 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2647 || (*pred) (object, BLKmode))
2648 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2649 || (*pred) (opalign, VOIDmode)))
2651 rtx op1;
2652 rtx last = get_last_insn ();
2653 rtx pat;
2655 op1 = convert_to_mode (mode, size, 1);
2656 pred = insn_data[(int) code].operand[1].predicate;
2657 if (pred != 0 && ! (*pred) (op1, mode))
2658 op1 = copy_to_mode_reg (mode, op1);
2660 pat = GEN_FCN ((int) code) (object, op1, opalign);
2661 if (pat)
2663 emit_insn (pat);
2664 return 0;
2666 else
2667 delete_insns_since (last);
2671 /* OBJECT or SIZE may have been passed through protect_from_queue.
2673 It is unsafe to save the value generated by protect_from_queue
2674 and reuse it later. Consider what happens if emit_queue is
2675 called before the return value from protect_from_queue is used.
2677 Expansion of the CALL_EXPR below will call emit_queue before
2678 we are finished emitting RTL for argument setup. So if we are
2679 not careful we could get the wrong value for an argument.
2681 To avoid this problem we go ahead and emit code to copy OBJECT
2682 and SIZE into new pseudos. We can then place those new pseudos
2683 into an RTL_EXPR and use them later, even after a call to
2684 emit_queue.
2686 Note this is not strictly needed for library calls since they
2687 do not call emit_queue before loading their arguments. However,
2688 we may need to have library calls call emit_queue in the future
2689 since failing to do so could cause problems for targets which
2690 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2691 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2693 #ifdef TARGET_MEM_FUNCTIONS
2694 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2695 #else
2696 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2697 TREE_UNSIGNED (integer_type_node));
2698 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2699 #endif
2701 #ifdef TARGET_MEM_FUNCTIONS
2702 /* It is incorrect to use the libcall calling conventions to call
2703 memset in this context.
2705 This could be a user call to memset and the user may wish to
2706 examine the return value from memset.
2708 For targets where libcalls and normal calls have different
2709 conventions for returning pointers, we could end up generating
2710 incorrect code.
2712 So instead of using a libcall sequence we build up a suitable
2713 CALL_EXPR and expand the call in the normal fashion. */
2714 if (fn == NULL_TREE)
2716 tree fntype;
2718 /* This was copied from except.c, I don't know if all this is
2719 necessary in this context or not. */
2720 fn = get_identifier ("memset");
2721 fntype = build_pointer_type (void_type_node);
2722 fntype = build_function_type (fntype, NULL_TREE);
2723 fn = build_decl (FUNCTION_DECL, fn, fntype);
2724 ggc_add_tree_root (&fn, 1);
2725 DECL_EXTERNAL (fn) = 1;
2726 TREE_PUBLIC (fn) = 1;
2727 DECL_ARTIFICIAL (fn) = 1;
2728 TREE_NOTHROW (fn) = 1;
2729 make_decl_rtl (fn, NULL);
2730 assemble_external (fn);
2733 /* We need to make an argument list for the function call.
2735 memset has three arguments, the first is a void * addresses, the
2736 second an integer with the initialization value, the last is a
2737 size_t byte count for the copy. */
2738 arg_list
2739 = build_tree_list (NULL_TREE,
2740 make_tree (build_pointer_type (void_type_node),
2741 object));
2742 TREE_CHAIN (arg_list)
2743 = build_tree_list (NULL_TREE,
2744 make_tree (integer_type_node, const0_rtx));
2745 TREE_CHAIN (TREE_CHAIN (arg_list))
2746 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2747 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2749 /* Now we have to build up the CALL_EXPR itself. */
2750 call_expr = build1 (ADDR_EXPR,
2751 build_pointer_type (TREE_TYPE (fn)), fn);
2752 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2753 call_expr, arg_list, NULL_TREE);
2754 TREE_SIDE_EFFECTS (call_expr) = 1;
2756 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2757 #else
2758 emit_library_call (bzero_libfunc, LCT_NORMAL,
2759 VOIDmode, 2, object, Pmode, size,
2760 TYPE_MODE (integer_type_node));
2761 #endif
2763 /* If we are initializing a readonly value, show the above call
2764 clobbered it. Otherwise, a load from it may erroneously be
2765 hoisted from a loop. */
2766 if (RTX_UNCHANGING_P (object))
2767 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
2771 return retval;
2774 /* Generate code to copy Y into X.
2775 Both Y and X must have the same mode, except that
2776 Y can be a constant with VOIDmode.
2777 This mode cannot be BLKmode; use emit_block_move for that.
2779 Return the last instruction emitted. */
2782 emit_move_insn (x, y)
2783 rtx x, y;
2785 enum machine_mode mode = GET_MODE (x);
2786 rtx y_cst = NULL_RTX;
2787 rtx last_insn;
2789 x = protect_from_queue (x, 1);
2790 y = protect_from_queue (y, 0);
2792 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2793 abort ();
2795 /* Never force constant_p_rtx to memory. */
2796 if (GET_CODE (y) == CONSTANT_P_RTX)
2798 else if (CONSTANT_P (y))
2800 if (optimize
2801 && FLOAT_MODE_P (GET_MODE (x))
2802 && (last_insn = compress_float_constant (x, y)))
2803 return last_insn;
2805 if (!LEGITIMATE_CONSTANT_P (y))
2807 y_cst = y;
2808 y = force_const_mem (mode, y);
2812 /* If X or Y are memory references, verify that their addresses are valid
2813 for the machine. */
2814 if (GET_CODE (x) == MEM
2815 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2816 && ! push_operand (x, GET_MODE (x)))
2817 || (flag_force_addr
2818 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2819 x = validize_mem (x);
2821 if (GET_CODE (y) == MEM
2822 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2823 || (flag_force_addr
2824 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2825 y = validize_mem (y);
2827 if (mode == BLKmode)
2828 abort ();
2830 last_insn = emit_move_insn_1 (x, y);
2832 if (y_cst && GET_CODE (x) == REG)
2833 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
2835 return last_insn;
2838 /* Low level part of emit_move_insn.
2839 Called just like emit_move_insn, but assumes X and Y
2840 are basically valid. */
2843 emit_move_insn_1 (x, y)
2844 rtx x, y;
2846 enum machine_mode mode = GET_MODE (x);
2847 enum machine_mode submode;
2848 enum mode_class class = GET_MODE_CLASS (mode);
2850 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2851 abort ();
2853 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2854 return
2855 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2857 /* Expand complex moves by moving real part and imag part, if possible. */
2858 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2859 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2860 * BITS_PER_UNIT),
2861 (class == MODE_COMPLEX_INT
2862 ? MODE_INT : MODE_FLOAT),
2864 && (mov_optab->handlers[(int) submode].insn_code
2865 != CODE_FOR_nothing))
2867 /* Don't split destination if it is a stack push. */
2868 int stack = push_operand (x, GET_MODE (x));
2870 #ifdef PUSH_ROUNDING
2871 /* In case we output to the stack, but the size is smaller machine can
2872 push exactly, we need to use move instructions. */
2873 if (stack
2874 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
2875 != GET_MODE_SIZE (submode)))
2877 rtx temp;
2878 HOST_WIDE_INT offset1, offset2;
2880 /* Do not use anti_adjust_stack, since we don't want to update
2881 stack_pointer_delta. */
2882 temp = expand_binop (Pmode,
2883 #ifdef STACK_GROWS_DOWNWARD
2884 sub_optab,
2885 #else
2886 add_optab,
2887 #endif
2888 stack_pointer_rtx,
2889 GEN_INT
2890 (PUSH_ROUNDING
2891 (GET_MODE_SIZE (GET_MODE (x)))),
2892 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2894 if (temp != stack_pointer_rtx)
2895 emit_move_insn (stack_pointer_rtx, temp);
2897 #ifdef STACK_GROWS_DOWNWARD
2898 offset1 = 0;
2899 offset2 = GET_MODE_SIZE (submode);
2900 #else
2901 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2902 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2903 + GET_MODE_SIZE (submode));
2904 #endif
2906 emit_move_insn (change_address (x, submode,
2907 gen_rtx_PLUS (Pmode,
2908 stack_pointer_rtx,
2909 GEN_INT (offset1))),
2910 gen_realpart (submode, y));
2911 emit_move_insn (change_address (x, submode,
2912 gen_rtx_PLUS (Pmode,
2913 stack_pointer_rtx,
2914 GEN_INT (offset2))),
2915 gen_imagpart (submode, y));
2917 else
2918 #endif
2919 /* If this is a stack, push the highpart first, so it
2920 will be in the argument order.
2922 In that case, change_address is used only to convert
2923 the mode, not to change the address. */
2924 if (stack)
2926 /* Note that the real part always precedes the imag part in memory
2927 regardless of machine's endianness. */
2928 #ifdef STACK_GROWS_DOWNWARD
2929 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2930 (gen_rtx_MEM (submode, XEXP (x, 0)),
2931 gen_imagpart (submode, y)));
2932 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2933 (gen_rtx_MEM (submode, XEXP (x, 0)),
2934 gen_realpart (submode, y)));
2935 #else
2936 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2937 (gen_rtx_MEM (submode, XEXP (x, 0)),
2938 gen_realpart (submode, y)));
2939 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2940 (gen_rtx_MEM (submode, XEXP (x, 0)),
2941 gen_imagpart (submode, y)));
2942 #endif
2944 else
2946 rtx realpart_x, realpart_y;
2947 rtx imagpart_x, imagpart_y;
2949 /* If this is a complex value with each part being smaller than a
2950 word, the usual calling sequence will likely pack the pieces into
2951 a single register. Unfortunately, SUBREG of hard registers only
2952 deals in terms of words, so we have a problem converting input
2953 arguments to the CONCAT of two registers that is used elsewhere
2954 for complex values. If this is before reload, we can copy it into
2955 memory and reload. FIXME, we should see about using extract and
2956 insert on integer registers, but complex short and complex char
2957 variables should be rarely used. */
2958 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2959 && (reload_in_progress | reload_completed) == 0)
2961 int packed_dest_p
2962 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2963 int packed_src_p
2964 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2966 if (packed_dest_p || packed_src_p)
2968 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2969 ? MODE_FLOAT : MODE_INT);
2971 enum machine_mode reg_mode
2972 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2974 if (reg_mode != BLKmode)
2976 rtx mem = assign_stack_temp (reg_mode,
2977 GET_MODE_SIZE (mode), 0);
2978 rtx cmem = adjust_address (mem, mode, 0);
2980 cfun->cannot_inline
2981 = N_("function using short complex types cannot be inline");
2983 if (packed_dest_p)
2985 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2987 emit_move_insn_1 (cmem, y);
2988 return emit_move_insn_1 (sreg, mem);
2990 else
2992 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2994 emit_move_insn_1 (mem, sreg);
2995 return emit_move_insn_1 (x, cmem);
3001 realpart_x = gen_realpart (submode, x);
3002 realpart_y = gen_realpart (submode, y);
3003 imagpart_x = gen_imagpart (submode, x);
3004 imagpart_y = gen_imagpart (submode, y);
3006 /* Show the output dies here. This is necessary for SUBREGs
3007 of pseudos since we cannot track their lifetimes correctly;
3008 hard regs shouldn't appear here except as return values.
3009 We never want to emit such a clobber after reload. */
3010 if (x != y
3011 && ! (reload_in_progress || reload_completed)
3012 && (GET_CODE (realpart_x) == SUBREG
3013 || GET_CODE (imagpart_x) == SUBREG))
3014 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3016 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3017 (realpart_x, realpart_y));
3018 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3019 (imagpart_x, imagpart_y));
3022 return get_last_insn ();
3025 /* This will handle any multi-word mode that lacks a move_insn pattern.
3026 However, you will get better code if you define such patterns,
3027 even if they must turn into multiple assembler instructions. */
3028 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
3030 rtx last_insn = 0;
3031 rtx seq, inner;
3032 int need_clobber;
3033 int i;
3035 #ifdef PUSH_ROUNDING
3037 /* If X is a push on the stack, do the push now and replace
3038 X with a reference to the stack pointer. */
3039 if (push_operand (x, GET_MODE (x)))
3041 rtx temp;
3042 enum rtx_code code;
3044 /* Do not use anti_adjust_stack, since we don't want to update
3045 stack_pointer_delta. */
3046 temp = expand_binop (Pmode,
3047 #ifdef STACK_GROWS_DOWNWARD
3048 sub_optab,
3049 #else
3050 add_optab,
3051 #endif
3052 stack_pointer_rtx,
3053 GEN_INT
3054 (PUSH_ROUNDING
3055 (GET_MODE_SIZE (GET_MODE (x)))),
3056 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3058 if (temp != stack_pointer_rtx)
3059 emit_move_insn (stack_pointer_rtx, temp);
3061 code = GET_CODE (XEXP (x, 0));
3063 /* Just hope that small offsets off SP are OK. */
3064 if (code == POST_INC)
3065 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3066 GEN_INT (-((HOST_WIDE_INT)
3067 GET_MODE_SIZE (GET_MODE (x)))));
3068 else if (code == POST_DEC)
3069 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3070 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3071 else
3072 temp = stack_pointer_rtx;
3074 x = change_address (x, VOIDmode, temp);
3076 #endif
3078 /* If we are in reload, see if either operand is a MEM whose address
3079 is scheduled for replacement. */
3080 if (reload_in_progress && GET_CODE (x) == MEM
3081 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3082 x = replace_equiv_address_nv (x, inner);
3083 if (reload_in_progress && GET_CODE (y) == MEM
3084 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3085 y = replace_equiv_address_nv (y, inner);
3087 start_sequence ();
3089 need_clobber = 0;
3090 for (i = 0;
3091 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3092 i++)
3094 rtx xpart = operand_subword (x, i, 1, mode);
3095 rtx ypart = operand_subword (y, i, 1, mode);
3097 /* If we can't get a part of Y, put Y into memory if it is a
3098 constant. Otherwise, force it into a register. If we still
3099 can't get a part of Y, abort. */
3100 if (ypart == 0 && CONSTANT_P (y))
3102 y = force_const_mem (mode, y);
3103 ypart = operand_subword (y, i, 1, mode);
3105 else if (ypart == 0)
3106 ypart = operand_subword_force (y, i, mode);
3108 if (xpart == 0 || ypart == 0)
3109 abort ();
3111 need_clobber |= (GET_CODE (xpart) == SUBREG);
3113 last_insn = emit_move_insn (xpart, ypart);
3116 seq = gen_sequence ();
3117 end_sequence ();
3119 /* Show the output dies here. This is necessary for SUBREGs
3120 of pseudos since we cannot track their lifetimes correctly;
3121 hard regs shouldn't appear here except as return values.
3122 We never want to emit such a clobber after reload. */
3123 if (x != y
3124 && ! (reload_in_progress || reload_completed)
3125 && need_clobber != 0)
3126 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3128 emit_insn (seq);
3130 return last_insn;
3132 else
3133 abort ();
3136 /* If Y is representable exactly in a narrower mode, and the target can
3137 perform the extension directly from constant or memory, then emit the
3138 move as an extension. */
3140 static rtx
3141 compress_float_constant (x, y)
3142 rtx x, y;
3144 enum machine_mode dstmode = GET_MODE (x);
3145 enum machine_mode orig_srcmode = GET_MODE (y);
3146 enum machine_mode srcmode;
3147 REAL_VALUE_TYPE r;
3149 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3151 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3152 srcmode != orig_srcmode;
3153 srcmode = GET_MODE_WIDER_MODE (srcmode))
3155 enum insn_code ic;
3156 rtx trunc_y, last_insn;
3158 /* Skip if the target can't extend this way. */
3159 ic = can_extend_p (dstmode, srcmode, 0);
3160 if (ic == CODE_FOR_nothing)
3161 continue;
3163 /* Skip if the narrowed value isn't exact. */
3164 if (! exact_real_truncate (srcmode, &r))
3165 continue;
3167 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3169 if (LEGITIMATE_CONSTANT_P (trunc_y))
3171 /* Skip if the target needs extra instructions to perform
3172 the extension. */
3173 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3174 continue;
3176 else if (float_extend_from_mem[dstmode][srcmode])
3177 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3178 else
3179 continue;
3181 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3182 last_insn = get_last_insn ();
3184 if (GET_CODE (x) == REG)
3185 REG_NOTES (last_insn)
3186 = gen_rtx_EXPR_LIST (REG_EQUAL, y, REG_NOTES (last_insn));
3188 return last_insn;
3191 return NULL_RTX;
3194 /* Pushing data onto the stack. */
3196 /* Push a block of length SIZE (perhaps variable)
3197 and return an rtx to address the beginning of the block.
3198 Note that it is not possible for the value returned to be a QUEUED.
3199 The value may be virtual_outgoing_args_rtx.
3201 EXTRA is the number of bytes of padding to push in addition to SIZE.
3202 BELOW nonzero means this padding comes at low addresses;
3203 otherwise, the padding comes at high addresses. */
3206 push_block (size, extra, below)
3207 rtx size;
3208 int extra, below;
3210 rtx temp;
3212 size = convert_modes (Pmode, ptr_mode, size, 1);
3213 if (CONSTANT_P (size))
3214 anti_adjust_stack (plus_constant (size, extra));
3215 else if (GET_CODE (size) == REG && extra == 0)
3216 anti_adjust_stack (size);
3217 else
3219 temp = copy_to_mode_reg (Pmode, size);
3220 if (extra != 0)
3221 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3222 temp, 0, OPTAB_LIB_WIDEN);
3223 anti_adjust_stack (temp);
3226 #ifndef STACK_GROWS_DOWNWARD
3227 if (0)
3228 #else
3229 if (1)
3230 #endif
3232 temp = virtual_outgoing_args_rtx;
3233 if (extra != 0 && below)
3234 temp = plus_constant (temp, extra);
3236 else
3238 if (GET_CODE (size) == CONST_INT)
3239 temp = plus_constant (virtual_outgoing_args_rtx,
3240 -INTVAL (size) - (below ? 0 : extra));
3241 else if (extra != 0 && !below)
3242 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3243 negate_rtx (Pmode, plus_constant (size, extra)));
3244 else
3245 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3246 negate_rtx (Pmode, size));
3249 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3252 #ifdef PUSH_ROUNDING
3254 /* Emit single push insn. */
3256 static void
3257 emit_single_push_insn (mode, x, type)
3258 rtx x;
3259 enum machine_mode mode;
3260 tree type;
3262 rtx dest_addr;
3263 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3264 rtx dest;
3265 enum insn_code icode;
3266 insn_operand_predicate_fn pred;
3268 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3269 /* If there is push pattern, use it. Otherwise try old way of throwing
3270 MEM representing push operation to move expander. */
3271 icode = push_optab->handlers[(int) mode].insn_code;
3272 if (icode != CODE_FOR_nothing)
3274 if (((pred = insn_data[(int) icode].operand[0].predicate)
3275 && !((*pred) (x, mode))))
3276 x = force_reg (mode, x);
3277 emit_insn (GEN_FCN (icode) (x));
3278 return;
3280 if (GET_MODE_SIZE (mode) == rounded_size)
3281 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3282 else
3284 #ifdef STACK_GROWS_DOWNWARD
3285 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3286 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3287 #else
3288 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3289 GEN_INT (rounded_size));
3290 #endif
3291 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3294 dest = gen_rtx_MEM (mode, dest_addr);
3296 if (type != 0)
3298 set_mem_attributes (dest, type, 1);
3300 if (flag_optimize_sibling_calls)
3301 /* Function incoming arguments may overlap with sibling call
3302 outgoing arguments and we cannot allow reordering of reads
3303 from function arguments with stores to outgoing arguments
3304 of sibling calls. */
3305 set_mem_alias_set (dest, 0);
3307 emit_move_insn (dest, x);
3309 #endif
3311 /* Generate code to push X onto the stack, assuming it has mode MODE and
3312 type TYPE.
3313 MODE is redundant except when X is a CONST_INT (since they don't
3314 carry mode info).
3315 SIZE is an rtx for the size of data to be copied (in bytes),
3316 needed only if X is BLKmode.
3318 ALIGN (in bits) is maximum alignment we can assume.
3320 If PARTIAL and REG are both nonzero, then copy that many of the first
3321 words of X into registers starting with REG, and push the rest of X.
3322 The amount of space pushed is decreased by PARTIAL words,
3323 rounded *down* to a multiple of PARM_BOUNDARY.
3324 REG must be a hard register in this case.
3325 If REG is zero but PARTIAL is not, take any all others actions for an
3326 argument partially in registers, but do not actually load any
3327 registers.
3329 EXTRA is the amount in bytes of extra space to leave next to this arg.
3330 This is ignored if an argument block has already been allocated.
3332 On a machine that lacks real push insns, ARGS_ADDR is the address of
3333 the bottom of the argument block for this call. We use indexing off there
3334 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3335 argument block has not been preallocated.
3337 ARGS_SO_FAR is the size of args previously pushed for this call.
3339 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3340 for arguments passed in registers. If nonzero, it will be the number
3341 of bytes required. */
3343 void
3344 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3345 args_addr, args_so_far, reg_parm_stack_space,
3346 alignment_pad)
3347 rtx x;
3348 enum machine_mode mode;
3349 tree type;
3350 rtx size;
3351 unsigned int align;
3352 int partial;
3353 rtx reg;
3354 int extra;
3355 rtx args_addr;
3356 rtx args_so_far;
3357 int reg_parm_stack_space;
3358 rtx alignment_pad;
3360 rtx xinner;
3361 enum direction stack_direction
3362 #ifdef STACK_GROWS_DOWNWARD
3363 = downward;
3364 #else
3365 = upward;
3366 #endif
3368 /* Decide where to pad the argument: `downward' for below,
3369 `upward' for above, or `none' for don't pad it.
3370 Default is below for small data on big-endian machines; else above. */
3371 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3373 /* Invert direction if stack is post-decrement.
3374 FIXME: why? */
3375 if (STACK_PUSH_CODE == POST_DEC)
3376 if (where_pad != none)
3377 where_pad = (where_pad == downward ? upward : downward);
3379 xinner = x = protect_from_queue (x, 0);
3381 if (mode == BLKmode)
3383 /* Copy a block into the stack, entirely or partially. */
3385 rtx temp;
3386 int used = partial * UNITS_PER_WORD;
3387 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3388 int skip;
3390 if (size == 0)
3391 abort ();
3393 used -= offset;
3395 /* USED is now the # of bytes we need not copy to the stack
3396 because registers will take care of them. */
3398 if (partial != 0)
3399 xinner = adjust_address (xinner, BLKmode, used);
3401 /* If the partial register-part of the arg counts in its stack size,
3402 skip the part of stack space corresponding to the registers.
3403 Otherwise, start copying to the beginning of the stack space,
3404 by setting SKIP to 0. */
3405 skip = (reg_parm_stack_space == 0) ? 0 : used;
3407 #ifdef PUSH_ROUNDING
3408 /* Do it with several push insns if that doesn't take lots of insns
3409 and if there is no difficulty with push insns that skip bytes
3410 on the stack for alignment purposes. */
3411 if (args_addr == 0
3412 && PUSH_ARGS
3413 && GET_CODE (size) == CONST_INT
3414 && skip == 0
3415 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3416 /* Here we avoid the case of a structure whose weak alignment
3417 forces many pushes of a small amount of data,
3418 and such small pushes do rounding that causes trouble. */
3419 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3420 || align >= BIGGEST_ALIGNMENT
3421 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3422 == (align / BITS_PER_UNIT)))
3423 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3425 /* Push padding now if padding above and stack grows down,
3426 or if padding below and stack grows up.
3427 But if space already allocated, this has already been done. */
3428 if (extra && args_addr == 0
3429 && where_pad != none && where_pad != stack_direction)
3430 anti_adjust_stack (GEN_INT (extra));
3432 move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
3434 else
3435 #endif /* PUSH_ROUNDING */
3437 rtx target;
3439 /* Otherwise make space on the stack and copy the data
3440 to the address of that space. */
3442 /* Deduct words put into registers from the size we must copy. */
3443 if (partial != 0)
3445 if (GET_CODE (size) == CONST_INT)
3446 size = GEN_INT (INTVAL (size) - used);
3447 else
3448 size = expand_binop (GET_MODE (size), sub_optab, size,
3449 GEN_INT (used), NULL_RTX, 0,
3450 OPTAB_LIB_WIDEN);
3453 /* Get the address of the stack space.
3454 In this case, we do not deal with EXTRA separately.
3455 A single stack adjust will do. */
3456 if (! args_addr)
3458 temp = push_block (size, extra, where_pad == downward);
3459 extra = 0;
3461 else if (GET_CODE (args_so_far) == CONST_INT)
3462 temp = memory_address (BLKmode,
3463 plus_constant (args_addr,
3464 skip + INTVAL (args_so_far)));
3465 else
3466 temp = memory_address (BLKmode,
3467 plus_constant (gen_rtx_PLUS (Pmode,
3468 args_addr,
3469 args_so_far),
3470 skip));
3471 target = gen_rtx_MEM (BLKmode, temp);
3473 if (type != 0)
3475 set_mem_attributes (target, type, 1);
3476 /* Function incoming arguments may overlap with sibling call
3477 outgoing arguments and we cannot allow reordering of reads
3478 from function arguments with stores to outgoing arguments
3479 of sibling calls. */
3480 set_mem_alias_set (target, 0);
3482 else
3483 set_mem_align (target, align);
3485 /* TEMP is the address of the block. Copy the data there. */
3486 if (GET_CODE (size) == CONST_INT
3487 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3489 move_by_pieces (target, xinner, INTVAL (size), align);
3490 goto ret;
3492 else
3494 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3495 enum machine_mode mode;
3497 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3498 mode != VOIDmode;
3499 mode = GET_MODE_WIDER_MODE (mode))
3501 enum insn_code code = movstr_optab[(int) mode];
3502 insn_operand_predicate_fn pred;
3504 if (code != CODE_FOR_nothing
3505 && ((GET_CODE (size) == CONST_INT
3506 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3507 <= (GET_MODE_MASK (mode) >> 1)))
3508 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3509 && (!(pred = insn_data[(int) code].operand[0].predicate)
3510 || ((*pred) (target, BLKmode)))
3511 && (!(pred = insn_data[(int) code].operand[1].predicate)
3512 || ((*pred) (xinner, BLKmode)))
3513 && (!(pred = insn_data[(int) code].operand[3].predicate)
3514 || ((*pred) (opalign, VOIDmode))))
3516 rtx op2 = convert_to_mode (mode, size, 1);
3517 rtx last = get_last_insn ();
3518 rtx pat;
3520 pred = insn_data[(int) code].operand[2].predicate;
3521 if (pred != 0 && ! (*pred) (op2, mode))
3522 op2 = copy_to_mode_reg (mode, op2);
3524 pat = GEN_FCN ((int) code) (target, xinner,
3525 op2, opalign);
3526 if (pat)
3528 emit_insn (pat);
3529 goto ret;
3531 else
3532 delete_insns_since (last);
3537 if (!ACCUMULATE_OUTGOING_ARGS)
3539 /* If the source is referenced relative to the stack pointer,
3540 copy it to another register to stabilize it. We do not need
3541 to do this if we know that we won't be changing sp. */
3543 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3544 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3545 temp = copy_to_reg (temp);
3548 /* Make inhibit_defer_pop nonzero around the library call
3549 to force it to pop the bcopy-arguments right away. */
3550 NO_DEFER_POP;
3551 #ifdef TARGET_MEM_FUNCTIONS
3552 emit_library_call (memcpy_libfunc, LCT_NORMAL,
3553 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3554 convert_to_mode (TYPE_MODE (sizetype),
3555 size, TREE_UNSIGNED (sizetype)),
3556 TYPE_MODE (sizetype));
3557 #else
3558 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3559 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3560 convert_to_mode (TYPE_MODE (integer_type_node),
3561 size,
3562 TREE_UNSIGNED (integer_type_node)),
3563 TYPE_MODE (integer_type_node));
3564 #endif
3565 OK_DEFER_POP;
3568 else if (partial > 0)
3570 /* Scalar partly in registers. */
3572 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3573 int i;
3574 int not_stack;
3575 /* # words of start of argument
3576 that we must make space for but need not store. */
3577 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3578 int args_offset = INTVAL (args_so_far);
3579 int skip;
3581 /* Push padding now if padding above and stack grows down,
3582 or if padding below and stack grows up.
3583 But if space already allocated, this has already been done. */
3584 if (extra && args_addr == 0
3585 && where_pad != none && where_pad != stack_direction)
3586 anti_adjust_stack (GEN_INT (extra));
3588 /* If we make space by pushing it, we might as well push
3589 the real data. Otherwise, we can leave OFFSET nonzero
3590 and leave the space uninitialized. */
3591 if (args_addr == 0)
3592 offset = 0;
3594 /* Now NOT_STACK gets the number of words that we don't need to
3595 allocate on the stack. */
3596 not_stack = partial - offset;
3598 /* If the partial register-part of the arg counts in its stack size,
3599 skip the part of stack space corresponding to the registers.
3600 Otherwise, start copying to the beginning of the stack space,
3601 by setting SKIP to 0. */
3602 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3604 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3605 x = validize_mem (force_const_mem (mode, x));
3607 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3608 SUBREGs of such registers are not allowed. */
3609 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3610 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3611 x = copy_to_reg (x);
3613 /* Loop over all the words allocated on the stack for this arg. */
3614 /* We can do it by words, because any scalar bigger than a word
3615 has a size a multiple of a word. */
3616 #ifndef PUSH_ARGS_REVERSED
3617 for (i = not_stack; i < size; i++)
3618 #else
3619 for (i = size - 1; i >= not_stack; i--)
3620 #endif
3621 if (i >= not_stack + offset)
3622 emit_push_insn (operand_subword_force (x, i, mode),
3623 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3624 0, args_addr,
3625 GEN_INT (args_offset + ((i - not_stack + skip)
3626 * UNITS_PER_WORD)),
3627 reg_parm_stack_space, alignment_pad);
3629 else
3631 rtx addr;
3632 rtx target = NULL_RTX;
3633 rtx dest;
3635 /* Push padding now if padding above and stack grows down,
3636 or if padding below and stack grows up.
3637 But if space already allocated, this has already been done. */
3638 if (extra && args_addr == 0
3639 && where_pad != none && where_pad != stack_direction)
3640 anti_adjust_stack (GEN_INT (extra));
3642 #ifdef PUSH_ROUNDING
3643 if (args_addr == 0 && PUSH_ARGS)
3644 emit_single_push_insn (mode, x, type);
3645 else
3646 #endif
3648 if (GET_CODE (args_so_far) == CONST_INT)
3649 addr
3650 = memory_address (mode,
3651 plus_constant (args_addr,
3652 INTVAL (args_so_far)));
3653 else
3654 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3655 args_so_far));
3656 target = addr;
3657 dest = gen_rtx_MEM (mode, addr);
3658 if (type != 0)
3660 set_mem_attributes (dest, type, 1);
3661 /* Function incoming arguments may overlap with sibling call
3662 outgoing arguments and we cannot allow reordering of reads
3663 from function arguments with stores to outgoing arguments
3664 of sibling calls. */
3665 set_mem_alias_set (dest, 0);
3668 emit_move_insn (dest, x);
3673 ret:
3674 /* If part should go in registers, copy that part
3675 into the appropriate registers. Do this now, at the end,
3676 since mem-to-mem copies above may do function calls. */
3677 if (partial > 0 && reg != 0)
3679 /* Handle calls that pass values in multiple non-contiguous locations.
3680 The Irix 6 ABI has examples of this. */
3681 if (GET_CODE (reg) == PARALLEL)
3682 emit_group_load (reg, x, -1); /* ??? size? */
3683 else
3684 move_block_to_reg (REGNO (reg), x, partial, mode);
3687 if (extra && args_addr == 0 && where_pad == stack_direction)
3688 anti_adjust_stack (GEN_INT (extra));
3690 if (alignment_pad && args_addr == 0)
3691 anti_adjust_stack (alignment_pad);
3694 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3695 operations. */
3697 static rtx
3698 get_subtarget (x)
3699 rtx x;
3701 return ((x == 0
3702 /* Only registers can be subtargets. */
3703 || GET_CODE (x) != REG
3704 /* If the register is readonly, it can't be set more than once. */
3705 || RTX_UNCHANGING_P (x)
3706 /* Don't use hard regs to avoid extending their life. */
3707 || REGNO (x) < FIRST_PSEUDO_REGISTER
3708 /* Avoid subtargets inside loops,
3709 since they hide some invariant expressions. */
3710 || preserve_subexpressions_p ())
3711 ? 0 : x);
3714 /* Expand an assignment that stores the value of FROM into TO.
3715 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3716 (This may contain a QUEUED rtx;
3717 if the value is constant, this rtx is a constant.)
3718 Otherwise, the returned value is NULL_RTX.
3720 SUGGEST_REG is no longer actually used.
3721 It used to mean, copy the value through a register
3722 and return that register, if that is possible.
3723 We now use WANT_VALUE to decide whether to do this. */
3726 expand_assignment (to, from, want_value, suggest_reg)
3727 tree to, from;
3728 int want_value;
3729 int suggest_reg ATTRIBUTE_UNUSED;
3731 rtx to_rtx = 0;
3732 rtx result;
3734 /* Don't crash if the lhs of the assignment was erroneous. */
3736 if (TREE_CODE (to) == ERROR_MARK)
3738 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3739 return want_value ? result : NULL_RTX;
3742 /* Assignment of a structure component needs special treatment
3743 if the structure component's rtx is not simply a MEM.
3744 Assignment of an array element at a constant index, and assignment of
3745 an array element in an unaligned packed structure field, has the same
3746 problem. */
3748 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3749 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF)
3751 enum machine_mode mode1;
3752 HOST_WIDE_INT bitsize, bitpos;
3753 rtx orig_to_rtx;
3754 tree offset;
3755 int unsignedp;
3756 int volatilep = 0;
3757 tree tem;
3759 push_temp_slots ();
3760 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3761 &unsignedp, &volatilep);
3763 /* If we are going to use store_bit_field and extract_bit_field,
3764 make sure to_rtx will be safe for multiple use. */
3766 if (mode1 == VOIDmode && want_value)
3767 tem = stabilize_reference (tem);
3769 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3771 if (offset != 0)
3773 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
3775 if (GET_CODE (to_rtx) != MEM)
3776 abort ();
3778 #ifdef POINTERS_EXTEND_UNSIGNED
3779 if (GET_MODE (offset_rtx) != Pmode)
3780 offset_rtx = convert_memory_address (Pmode, offset_rtx);
3781 #else
3782 if (GET_MODE (offset_rtx) != ptr_mode)
3783 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3784 #endif
3786 /* A constant address in TO_RTX can have VOIDmode, we must not try
3787 to call force_reg for that case. Avoid that case. */
3788 if (GET_CODE (to_rtx) == MEM
3789 && GET_MODE (to_rtx) == BLKmode
3790 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3791 && bitsize > 0
3792 && (bitpos % bitsize) == 0
3793 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3794 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3796 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3797 bitpos = 0;
3800 to_rtx = offset_address (to_rtx, offset_rtx,
3801 highest_pow2_factor_for_type (TREE_TYPE (to),
3802 offset));
3805 if (GET_CODE (to_rtx) == MEM)
3807 tree old_expr = MEM_EXPR (to_rtx);
3809 /* If the field is at offset zero, we could have been given the
3810 DECL_RTX of the parent struct. Don't munge it. */
3811 to_rtx = shallow_copy_rtx (to_rtx);
3813 set_mem_attributes (to_rtx, to, 0);
3815 /* If we changed MEM_EXPR, that means we're now referencing
3816 the COMPONENT_REF, which means that MEM_OFFSET must be
3817 relative to that field. But we've not yet reflected BITPOS
3818 in TO_RTX. This will be done in store_field. Adjust for
3819 that by biasing MEM_OFFSET by -bitpos. */
3820 if (MEM_EXPR (to_rtx) != old_expr && MEM_OFFSET (to_rtx)
3821 && (bitpos / BITS_PER_UNIT) != 0)
3822 set_mem_offset (to_rtx, GEN_INT (INTVAL (MEM_OFFSET (to_rtx))
3823 - (bitpos / BITS_PER_UNIT)));
3826 /* Deal with volatile and readonly fields. The former is only done
3827 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3828 if (volatilep && GET_CODE (to_rtx) == MEM)
3830 if (to_rtx == orig_to_rtx)
3831 to_rtx = copy_rtx (to_rtx);
3832 MEM_VOLATILE_P (to_rtx) = 1;
3835 if (TREE_CODE (to) == COMPONENT_REF
3836 && TREE_READONLY (TREE_OPERAND (to, 1)))
3838 if (to_rtx == orig_to_rtx)
3839 to_rtx = copy_rtx (to_rtx);
3840 RTX_UNCHANGING_P (to_rtx) = 1;
3843 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
3845 if (to_rtx == orig_to_rtx)
3846 to_rtx = copy_rtx (to_rtx);
3847 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3850 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3851 (want_value
3852 /* Spurious cast for HPUX compiler. */
3853 ? ((enum machine_mode)
3854 TYPE_MODE (TREE_TYPE (to)))
3855 : VOIDmode),
3856 unsignedp, TREE_TYPE (tem), get_alias_set (to));
3858 preserve_temp_slots (result);
3859 free_temp_slots ();
3860 pop_temp_slots ();
3862 /* If the value is meaningful, convert RESULT to the proper mode.
3863 Otherwise, return nothing. */
3864 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3865 TYPE_MODE (TREE_TYPE (from)),
3866 result,
3867 TREE_UNSIGNED (TREE_TYPE (to)))
3868 : NULL_RTX);
3871 /* If the rhs is a function call and its value is not an aggregate,
3872 call the function before we start to compute the lhs.
3873 This is needed for correct code for cases such as
3874 val = setjmp (buf) on machines where reference to val
3875 requires loading up part of an address in a separate insn.
3877 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3878 since it might be a promoted variable where the zero- or sign- extension
3879 needs to be done. Handling this in the normal way is safe because no
3880 computation is done before the call. */
3881 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3882 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3883 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3884 && GET_CODE (DECL_RTL (to)) == REG))
3886 rtx value;
3888 push_temp_slots ();
3889 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3890 if (to_rtx == 0)
3891 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3893 /* Handle calls that return values in multiple non-contiguous locations.
3894 The Irix 6 ABI has examples of this. */
3895 if (GET_CODE (to_rtx) == PARALLEL)
3896 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)));
3897 else if (GET_MODE (to_rtx) == BLKmode)
3898 emit_block_move (to_rtx, value, expr_size (from));
3899 else
3901 #ifdef POINTERS_EXTEND_UNSIGNED
3902 if (POINTER_TYPE_P (TREE_TYPE (to))
3903 && GET_MODE (to_rtx) != GET_MODE (value))
3904 value = convert_memory_address (GET_MODE (to_rtx), value);
3905 #endif
3906 emit_move_insn (to_rtx, value);
3908 preserve_temp_slots (to_rtx);
3909 free_temp_slots ();
3910 pop_temp_slots ();
3911 return want_value ? to_rtx : NULL_RTX;
3914 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3915 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3917 if (to_rtx == 0)
3918 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3920 /* Don't move directly into a return register. */
3921 if (TREE_CODE (to) == RESULT_DECL
3922 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3924 rtx temp;
3926 push_temp_slots ();
3927 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3929 if (GET_CODE (to_rtx) == PARALLEL)
3930 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)));
3931 else
3932 emit_move_insn (to_rtx, temp);
3934 preserve_temp_slots (to_rtx);
3935 free_temp_slots ();
3936 pop_temp_slots ();
3937 return want_value ? to_rtx : NULL_RTX;
3940 /* In case we are returning the contents of an object which overlaps
3941 the place the value is being stored, use a safe function when copying
3942 a value through a pointer into a structure value return block. */
3943 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3944 && current_function_returns_struct
3945 && !current_function_returns_pcc_struct)
3947 rtx from_rtx, size;
3949 push_temp_slots ();
3950 size = expr_size (from);
3951 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
3953 #ifdef TARGET_MEM_FUNCTIONS
3954 emit_library_call (memmove_libfunc, LCT_NORMAL,
3955 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3956 XEXP (from_rtx, 0), Pmode,
3957 convert_to_mode (TYPE_MODE (sizetype),
3958 size, TREE_UNSIGNED (sizetype)),
3959 TYPE_MODE (sizetype));
3960 #else
3961 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3962 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3963 XEXP (to_rtx, 0), Pmode,
3964 convert_to_mode (TYPE_MODE (integer_type_node),
3965 size, TREE_UNSIGNED (integer_type_node)),
3966 TYPE_MODE (integer_type_node));
3967 #endif
3969 preserve_temp_slots (to_rtx);
3970 free_temp_slots ();
3971 pop_temp_slots ();
3972 return want_value ? to_rtx : NULL_RTX;
3975 /* Compute FROM and store the value in the rtx we got. */
3977 push_temp_slots ();
3978 result = store_expr (from, to_rtx, want_value);
3979 preserve_temp_slots (result);
3980 free_temp_slots ();
3981 pop_temp_slots ();
3982 return want_value ? result : NULL_RTX;
3985 /* Generate code for computing expression EXP,
3986 and storing the value into TARGET.
3987 TARGET may contain a QUEUED rtx.
3989 If WANT_VALUE is nonzero, return a copy of the value
3990 not in TARGET, so that we can be sure to use the proper
3991 value in a containing expression even if TARGET has something
3992 else stored in it. If possible, we copy the value through a pseudo
3993 and return that pseudo. Or, if the value is constant, we try to
3994 return the constant. In some cases, we return a pseudo
3995 copied *from* TARGET.
3997 If the mode is BLKmode then we may return TARGET itself.
3998 It turns out that in BLKmode it doesn't cause a problem.
3999 because C has no operators that could combine two different
4000 assignments into the same BLKmode object with different values
4001 with no sequence point. Will other languages need this to
4002 be more thorough?
4004 If WANT_VALUE is 0, we return NULL, to make sure
4005 to catch quickly any cases where the caller uses the value
4006 and fails to set WANT_VALUE. */
4009 store_expr (exp, target, want_value)
4010 tree exp;
4011 rtx target;
4012 int want_value;
4014 rtx temp;
4015 int dont_return_target = 0;
4016 int dont_store_target = 0;
4018 if (TREE_CODE (exp) == COMPOUND_EXPR)
4020 /* Perform first part of compound expression, then assign from second
4021 part. */
4022 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4023 emit_queue ();
4024 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4026 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4028 /* For conditional expression, get safe form of the target. Then
4029 test the condition, doing the appropriate assignment on either
4030 side. This avoids the creation of unnecessary temporaries.
4031 For non-BLKmode, it is more efficient not to do this. */
4033 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4035 emit_queue ();
4036 target = protect_from_queue (target, 1);
4038 do_pending_stack_adjust ();
4039 NO_DEFER_POP;
4040 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4041 start_cleanup_deferral ();
4042 store_expr (TREE_OPERAND (exp, 1), target, 0);
4043 end_cleanup_deferral ();
4044 emit_queue ();
4045 emit_jump_insn (gen_jump (lab2));
4046 emit_barrier ();
4047 emit_label (lab1);
4048 start_cleanup_deferral ();
4049 store_expr (TREE_OPERAND (exp, 2), target, 0);
4050 end_cleanup_deferral ();
4051 emit_queue ();
4052 emit_label (lab2);
4053 OK_DEFER_POP;
4055 return want_value ? target : NULL_RTX;
4057 else if (queued_subexp_p (target))
4058 /* If target contains a postincrement, let's not risk
4059 using it as the place to generate the rhs. */
4061 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4063 /* Expand EXP into a new pseudo. */
4064 temp = gen_reg_rtx (GET_MODE (target));
4065 temp = expand_expr (exp, temp, GET_MODE (target), 0);
4067 else
4068 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
4070 /* If target is volatile, ANSI requires accessing the value
4071 *from* the target, if it is accessed. So make that happen.
4072 In no case return the target itself. */
4073 if (! MEM_VOLATILE_P (target) && want_value)
4074 dont_return_target = 1;
4076 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
4077 && GET_MODE (target) != BLKmode)
4078 /* If target is in memory and caller wants value in a register instead,
4079 arrange that. Pass TARGET as target for expand_expr so that,
4080 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4081 We know expand_expr will not use the target in that case.
4082 Don't do this if TARGET is volatile because we are supposed
4083 to write it and then read it. */
4085 temp = expand_expr (exp, target, GET_MODE (target), 0);
4086 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4088 /* If TEMP is already in the desired TARGET, only copy it from
4089 memory and don't store it there again. */
4090 if (temp == target
4091 || (rtx_equal_p (temp, target)
4092 && ! side_effects_p (temp) && ! side_effects_p (target)))
4093 dont_store_target = 1;
4094 temp = copy_to_reg (temp);
4096 dont_return_target = 1;
4098 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4099 /* If this is an scalar in a register that is stored in a wider mode
4100 than the declared mode, compute the result into its declared mode
4101 and then convert to the wider mode. Our value is the computed
4102 expression. */
4104 rtx inner_target = 0;
4106 /* If we don't want a value, we can do the conversion inside EXP,
4107 which will often result in some optimizations. Do the conversion
4108 in two steps: first change the signedness, if needed, then
4109 the extend. But don't do this if the type of EXP is a subtype
4110 of something else since then the conversion might involve
4111 more than just converting modes. */
4112 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4113 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4115 if (TREE_UNSIGNED (TREE_TYPE (exp))
4116 != SUBREG_PROMOTED_UNSIGNED_P (target))
4117 exp = convert
4118 ((*lang_hooks.types.signed_or_unsigned_type)
4119 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4121 exp = convert ((*lang_hooks.types.type_for_mode)
4122 (GET_MODE (SUBREG_REG (target)),
4123 SUBREG_PROMOTED_UNSIGNED_P (target)),
4124 exp);
4126 inner_target = SUBREG_REG (target);
4129 temp = expand_expr (exp, inner_target, VOIDmode, 0);
4131 /* If TEMP is a volatile MEM and we want a result value, make
4132 the access now so it gets done only once. Likewise if
4133 it contains TARGET. */
4134 if (GET_CODE (temp) == MEM && want_value
4135 && (MEM_VOLATILE_P (temp)
4136 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
4137 temp = copy_to_reg (temp);
4139 /* If TEMP is a VOIDmode constant, use convert_modes to make
4140 sure that we properly convert it. */
4141 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4143 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4144 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4145 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4146 GET_MODE (target), temp,
4147 SUBREG_PROMOTED_UNSIGNED_P (target));
4150 convert_move (SUBREG_REG (target), temp,
4151 SUBREG_PROMOTED_UNSIGNED_P (target));
4153 /* If we promoted a constant, change the mode back down to match
4154 target. Otherwise, the caller might get confused by a result whose
4155 mode is larger than expected. */
4157 if (want_value && GET_MODE (temp) != GET_MODE (target))
4159 if (GET_MODE (temp) != VOIDmode)
4161 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4162 SUBREG_PROMOTED_VAR_P (temp) = 1;
4163 SUBREG_PROMOTED_UNSIGNED_SET (temp,
4164 SUBREG_PROMOTED_UNSIGNED_P (target));
4166 else
4167 temp = convert_modes (GET_MODE (target),
4168 GET_MODE (SUBREG_REG (target)),
4169 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4172 return want_value ? temp : NULL_RTX;
4174 else
4176 temp = expand_expr (exp, target, GET_MODE (target), 0);
4177 /* Return TARGET if it's a specified hardware register.
4178 If TARGET is a volatile mem ref, either return TARGET
4179 or return a reg copied *from* TARGET; ANSI requires this.
4181 Otherwise, if TEMP is not TARGET, return TEMP
4182 if it is constant (for efficiency),
4183 or if we really want the correct value. */
4184 if (!(target && GET_CODE (target) == REG
4185 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4186 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4187 && ! rtx_equal_p (temp, target)
4188 && (CONSTANT_P (temp) || want_value))
4189 dont_return_target = 1;
4192 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4193 the same as that of TARGET, adjust the constant. This is needed, for
4194 example, in case it is a CONST_DOUBLE and we want only a word-sized
4195 value. */
4196 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4197 && TREE_CODE (exp) != ERROR_MARK
4198 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4199 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4200 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4202 /* If value was not generated in the target, store it there.
4203 Convert the value to TARGET's type first if necessary.
4204 If TEMP and TARGET compare equal according to rtx_equal_p, but
4205 one or both of them are volatile memory refs, we have to distinguish
4206 two cases:
4207 - expand_expr has used TARGET. In this case, we must not generate
4208 another copy. This can be detected by TARGET being equal according
4209 to == .
4210 - expand_expr has not used TARGET - that means that the source just
4211 happens to have the same RTX form. Since temp will have been created
4212 by expand_expr, it will compare unequal according to == .
4213 We must generate a copy in this case, to reach the correct number
4214 of volatile memory references. */
4216 if ((! rtx_equal_p (temp, target)
4217 || (temp != target && (side_effects_p (temp)
4218 || side_effects_p (target))))
4219 && TREE_CODE (exp) != ERROR_MARK
4220 && ! dont_store_target
4221 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4222 but TARGET is not valid memory reference, TEMP will differ
4223 from TARGET although it is really the same location. */
4224 && (TREE_CODE_CLASS (TREE_CODE (exp)) != 'd'
4225 || target != DECL_RTL_IF_SET (exp)))
4227 target = protect_from_queue (target, 1);
4228 if (GET_MODE (temp) != GET_MODE (target)
4229 && GET_MODE (temp) != VOIDmode)
4231 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4232 if (dont_return_target)
4234 /* In this case, we will return TEMP,
4235 so make sure it has the proper mode.
4236 But don't forget to store the value into TARGET. */
4237 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4238 emit_move_insn (target, temp);
4240 else
4241 convert_move (target, temp, unsignedp);
4244 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4246 /* Handle copying a string constant into an array. The string
4247 constant may be shorter than the array. So copy just the string's
4248 actual length, and clear the rest. First get the size of the data
4249 type of the string, which is actually the size of the target. */
4250 rtx size = expr_size (exp);
4252 if (GET_CODE (size) == CONST_INT
4253 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4254 emit_block_move (target, temp, size);
4255 else
4257 /* Compute the size of the data to copy from the string. */
4258 tree copy_size
4259 = size_binop (MIN_EXPR,
4260 make_tree (sizetype, size),
4261 size_int (TREE_STRING_LENGTH (exp)));
4262 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
4263 VOIDmode, 0);
4264 rtx label = 0;
4266 /* Copy that much. */
4267 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx, 0);
4268 emit_block_move (target, temp, copy_size_rtx);
4270 /* Figure out how much is left in TARGET that we have to clear.
4271 Do all calculations in ptr_mode. */
4272 if (GET_CODE (copy_size_rtx) == CONST_INT)
4274 size = plus_constant (size, -INTVAL (copy_size_rtx));
4275 target = adjust_address (target, BLKmode,
4276 INTVAL (copy_size_rtx));
4278 else
4280 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4281 copy_size_rtx, NULL_RTX, 0,
4282 OPTAB_LIB_WIDEN);
4284 #ifdef POINTERS_EXTEND_UNSIGNED
4285 if (GET_MODE (copy_size_rtx) != Pmode)
4286 copy_size_rtx = convert_memory_address (Pmode,
4287 copy_size_rtx);
4288 #endif
4290 target = offset_address (target, copy_size_rtx,
4291 highest_pow2_factor (copy_size));
4292 label = gen_label_rtx ();
4293 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4294 GET_MODE (size), 0, label);
4297 if (size != const0_rtx)
4298 clear_storage (target, size);
4300 if (label)
4301 emit_label (label);
4304 /* Handle calls that return values in multiple non-contiguous locations.
4305 The Irix 6 ABI has examples of this. */
4306 else if (GET_CODE (target) == PARALLEL)
4307 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)));
4308 else if (GET_MODE (temp) == BLKmode)
4309 emit_block_move (target, temp, expr_size (exp));
4310 else
4311 emit_move_insn (target, temp);
4314 /* If we don't want a value, return NULL_RTX. */
4315 if (! want_value)
4316 return NULL_RTX;
4318 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4319 ??? The latter test doesn't seem to make sense. */
4320 else if (dont_return_target && GET_CODE (temp) != MEM)
4321 return temp;
4323 /* Return TARGET itself if it is a hard register. */
4324 else if (want_value && GET_MODE (target) != BLKmode
4325 && ! (GET_CODE (target) == REG
4326 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4327 return copy_to_reg (target);
4329 else
4330 return target;
4333 /* Return 1 if EXP just contains zeros. */
4335 static int
4336 is_zeros_p (exp)
4337 tree exp;
4339 tree elt;
4341 switch (TREE_CODE (exp))
4343 case CONVERT_EXPR:
4344 case NOP_EXPR:
4345 case NON_LVALUE_EXPR:
4346 case VIEW_CONVERT_EXPR:
4347 return is_zeros_p (TREE_OPERAND (exp, 0));
4349 case INTEGER_CST:
4350 return integer_zerop (exp);
4352 case COMPLEX_CST:
4353 return
4354 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4356 case REAL_CST:
4357 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4359 case VECTOR_CST:
4360 for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4361 elt = TREE_CHAIN (elt))
4362 if (!is_zeros_p (TREE_VALUE (elt)))
4363 return 0;
4365 return 1;
4367 case CONSTRUCTOR:
4368 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4369 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4370 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4371 if (! is_zeros_p (TREE_VALUE (elt)))
4372 return 0;
4374 return 1;
4376 default:
4377 return 0;
4381 /* Return 1 if EXP contains mostly (3/4) zeros. */
4383 static int
4384 mostly_zeros_p (exp)
4385 tree exp;
4387 if (TREE_CODE (exp) == CONSTRUCTOR)
4389 int elts = 0, zeros = 0;
4390 tree elt = CONSTRUCTOR_ELTS (exp);
4391 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4393 /* If there are no ranges of true bits, it is all zero. */
4394 return elt == NULL_TREE;
4396 for (; elt; elt = TREE_CHAIN (elt))
4398 /* We do not handle the case where the index is a RANGE_EXPR,
4399 so the statistic will be somewhat inaccurate.
4400 We do make a more accurate count in store_constructor itself,
4401 so since this function is only used for nested array elements,
4402 this should be close enough. */
4403 if (mostly_zeros_p (TREE_VALUE (elt)))
4404 zeros++;
4405 elts++;
4408 return 4 * zeros >= 3 * elts;
4411 return is_zeros_p (exp);
4414 /* Helper function for store_constructor.
4415 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4416 TYPE is the type of the CONSTRUCTOR, not the element type.
4417 CLEARED is as for store_constructor.
4418 ALIAS_SET is the alias set to use for any stores.
4420 This provides a recursive shortcut back to store_constructor when it isn't
4421 necessary to go through store_field. This is so that we can pass through
4422 the cleared field to let store_constructor know that we may not have to
4423 clear a substructure if the outer structure has already been cleared. */
4425 static void
4426 store_constructor_field (target, bitsize, bitpos, mode, exp, type, cleared,
4427 alias_set)
4428 rtx target;
4429 unsigned HOST_WIDE_INT bitsize;
4430 HOST_WIDE_INT bitpos;
4431 enum machine_mode mode;
4432 tree exp, type;
4433 int cleared;
4434 int alias_set;
4436 if (TREE_CODE (exp) == CONSTRUCTOR
4437 && bitpos % BITS_PER_UNIT == 0
4438 /* If we have a non-zero bitpos for a register target, then we just
4439 let store_field do the bitfield handling. This is unlikely to
4440 generate unnecessary clear instructions anyways. */
4441 && (bitpos == 0 || GET_CODE (target) == MEM))
4443 if (GET_CODE (target) == MEM)
4444 target
4445 = adjust_address (target,
4446 GET_MODE (target) == BLKmode
4447 || 0 != (bitpos
4448 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4449 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4452 /* Update the alias set, if required. */
4453 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4454 && MEM_ALIAS_SET (target) != 0)
4456 target = copy_rtx (target);
4457 set_mem_alias_set (target, alias_set);
4460 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4462 else
4463 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4464 alias_set);
4467 /* Store the value of constructor EXP into the rtx TARGET.
4468 TARGET is either a REG or a MEM; we know it cannot conflict, since
4469 safe_from_p has been called.
4470 CLEARED is true if TARGET is known to have been zero'd.
4471 SIZE is the number of bytes of TARGET we are allowed to modify: this
4472 may not be the same as the size of EXP if we are assigning to a field
4473 which has been packed to exclude padding bits. */
4475 static void
4476 store_constructor (exp, target, cleared, size)
4477 tree exp;
4478 rtx target;
4479 int cleared;
4480 HOST_WIDE_INT size;
4482 tree type = TREE_TYPE (exp);
4483 #ifdef WORD_REGISTER_OPERATIONS
4484 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4485 #endif
4487 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4488 || TREE_CODE (type) == QUAL_UNION_TYPE)
4490 tree elt;
4492 /* We either clear the aggregate or indicate the value is dead. */
4493 if ((TREE_CODE (type) == UNION_TYPE
4494 || TREE_CODE (type) == QUAL_UNION_TYPE)
4495 && ! cleared
4496 && ! CONSTRUCTOR_ELTS (exp))
4497 /* If the constructor is empty, clear the union. */
4499 clear_storage (target, expr_size (exp));
4500 cleared = 1;
4503 /* If we are building a static constructor into a register,
4504 set the initial value as zero so we can fold the value into
4505 a constant. But if more than one register is involved,
4506 this probably loses. */
4507 else if (! cleared && GET_CODE (target) == REG && TREE_STATIC (exp)
4508 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4510 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4511 cleared = 1;
4514 /* If the constructor has fewer fields than the structure
4515 or if we are initializing the structure to mostly zeros,
4516 clear the whole structure first. Don't do this if TARGET is a
4517 register whose mode size isn't equal to SIZE since clear_storage
4518 can't handle this case. */
4519 else if (! cleared && size > 0
4520 && ((list_length (CONSTRUCTOR_ELTS (exp))
4521 != fields_length (type))
4522 || mostly_zeros_p (exp))
4523 && (GET_CODE (target) != REG
4524 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4525 == size)))
4527 clear_storage (target, GEN_INT (size));
4528 cleared = 1;
4531 if (! cleared)
4532 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4534 /* Store each element of the constructor into
4535 the corresponding field of TARGET. */
4537 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4539 tree field = TREE_PURPOSE (elt);
4540 tree value = TREE_VALUE (elt);
4541 enum machine_mode mode;
4542 HOST_WIDE_INT bitsize;
4543 HOST_WIDE_INT bitpos = 0;
4544 int unsignedp;
4545 tree offset;
4546 rtx to_rtx = target;
4548 /* Just ignore missing fields.
4549 We cleared the whole structure, above,
4550 if any fields are missing. */
4551 if (field == 0)
4552 continue;
4554 if (cleared && is_zeros_p (value))
4555 continue;
4557 if (host_integerp (DECL_SIZE (field), 1))
4558 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4559 else
4560 bitsize = -1;
4562 unsignedp = TREE_UNSIGNED (field);
4563 mode = DECL_MODE (field);
4564 if (DECL_BIT_FIELD (field))
4565 mode = VOIDmode;
4567 offset = DECL_FIELD_OFFSET (field);
4568 if (host_integerp (offset, 0)
4569 && host_integerp (bit_position (field), 0))
4571 bitpos = int_bit_position (field);
4572 offset = 0;
4574 else
4575 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4577 if (offset)
4579 rtx offset_rtx;
4581 if (contains_placeholder_p (offset))
4582 offset = build (WITH_RECORD_EXPR, sizetype,
4583 offset, make_tree (TREE_TYPE (exp), target));
4585 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4586 if (GET_CODE (to_rtx) != MEM)
4587 abort ();
4589 #ifdef POINTERS_EXTEND_UNSIGNED
4590 if (GET_MODE (offset_rtx) != Pmode)
4591 offset_rtx = convert_memory_address (Pmode, offset_rtx);
4592 #else
4593 if (GET_MODE (offset_rtx) != ptr_mode)
4594 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4595 #endif
4597 to_rtx = offset_address (to_rtx, offset_rtx,
4598 highest_pow2_factor (offset));
4601 if (TREE_READONLY (field))
4603 if (GET_CODE (to_rtx) == MEM)
4604 to_rtx = copy_rtx (to_rtx);
4606 RTX_UNCHANGING_P (to_rtx) = 1;
4609 #ifdef WORD_REGISTER_OPERATIONS
4610 /* If this initializes a field that is smaller than a word, at the
4611 start of a word, try to widen it to a full word.
4612 This special case allows us to output C++ member function
4613 initializations in a form that the optimizers can understand. */
4614 if (GET_CODE (target) == REG
4615 && bitsize < BITS_PER_WORD
4616 && bitpos % BITS_PER_WORD == 0
4617 && GET_MODE_CLASS (mode) == MODE_INT
4618 && TREE_CODE (value) == INTEGER_CST
4619 && exp_size >= 0
4620 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4622 tree type = TREE_TYPE (value);
4624 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4626 type = (*lang_hooks.types.type_for_size)
4627 (BITS_PER_WORD, TREE_UNSIGNED (type));
4628 value = convert (type, value);
4631 if (BYTES_BIG_ENDIAN)
4632 value
4633 = fold (build (LSHIFT_EXPR, type, value,
4634 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4635 bitsize = BITS_PER_WORD;
4636 mode = word_mode;
4638 #endif
4640 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4641 && DECL_NONADDRESSABLE_P (field))
4643 to_rtx = copy_rtx (to_rtx);
4644 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4647 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4648 value, type, cleared,
4649 get_alias_set (TREE_TYPE (field)));
4652 else if (TREE_CODE (type) == ARRAY_TYPE
4653 || TREE_CODE (type) == VECTOR_TYPE)
4655 tree elt;
4656 int i;
4657 int need_to_clear;
4658 tree domain = TYPE_DOMAIN (type);
4659 tree elttype = TREE_TYPE (type);
4660 int const_bounds_p;
4661 HOST_WIDE_INT minelt = 0;
4662 HOST_WIDE_INT maxelt = 0;
4664 /* Vectors are like arrays, but the domain is stored via an array
4665 type indirectly. */
4666 if (TREE_CODE (type) == VECTOR_TYPE)
4668 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
4669 the same field as TYPE_DOMAIN, we are not guaranteed that
4670 it always will. */
4671 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
4672 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
4675 const_bounds_p = (TYPE_MIN_VALUE (domain)
4676 && TYPE_MAX_VALUE (domain)
4677 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4678 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4680 /* If we have constant bounds for the range of the type, get them. */
4681 if (const_bounds_p)
4683 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4684 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4687 /* If the constructor has fewer elements than the array,
4688 clear the whole array first. Similarly if this is
4689 static constructor of a non-BLKmode object. */
4690 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4691 need_to_clear = 1;
4692 else
4694 HOST_WIDE_INT count = 0, zero_count = 0;
4695 need_to_clear = ! const_bounds_p;
4697 /* This loop is a more accurate version of the loop in
4698 mostly_zeros_p (it handles RANGE_EXPR in an index).
4699 It is also needed to check for missing elements. */
4700 for (elt = CONSTRUCTOR_ELTS (exp);
4701 elt != NULL_TREE && ! need_to_clear;
4702 elt = TREE_CHAIN (elt))
4704 tree index = TREE_PURPOSE (elt);
4705 HOST_WIDE_INT this_node_count;
4707 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4709 tree lo_index = TREE_OPERAND (index, 0);
4710 tree hi_index = TREE_OPERAND (index, 1);
4712 if (! host_integerp (lo_index, 1)
4713 || ! host_integerp (hi_index, 1))
4715 need_to_clear = 1;
4716 break;
4719 this_node_count = (tree_low_cst (hi_index, 1)
4720 - tree_low_cst (lo_index, 1) + 1);
4722 else
4723 this_node_count = 1;
4725 count += this_node_count;
4726 if (mostly_zeros_p (TREE_VALUE (elt)))
4727 zero_count += this_node_count;
4730 /* Clear the entire array first if there are any missing elements,
4731 or if the incidence of zero elements is >= 75%. */
4732 if (! need_to_clear
4733 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4734 need_to_clear = 1;
4737 if (need_to_clear && size > 0)
4739 if (! cleared)
4741 if (REG_P (target))
4742 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4743 else
4744 clear_storage (target, GEN_INT (size));
4746 cleared = 1;
4748 else if (REG_P (target))
4749 /* Inform later passes that the old value is dead. */
4750 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4752 /* Store each element of the constructor into
4753 the corresponding element of TARGET, determined
4754 by counting the elements. */
4755 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4756 elt;
4757 elt = TREE_CHAIN (elt), i++)
4759 enum machine_mode mode;
4760 HOST_WIDE_INT bitsize;
4761 HOST_WIDE_INT bitpos;
4762 int unsignedp;
4763 tree value = TREE_VALUE (elt);
4764 tree index = TREE_PURPOSE (elt);
4765 rtx xtarget = target;
4767 if (cleared && is_zeros_p (value))
4768 continue;
4770 unsignedp = TREE_UNSIGNED (elttype);
4771 mode = TYPE_MODE (elttype);
4772 if (mode == BLKmode)
4773 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4774 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4775 : -1);
4776 else
4777 bitsize = GET_MODE_BITSIZE (mode);
4779 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4781 tree lo_index = TREE_OPERAND (index, 0);
4782 tree hi_index = TREE_OPERAND (index, 1);
4783 rtx index_r, pos_rtx, hi_r, loop_top, loop_end;
4784 struct nesting *loop;
4785 HOST_WIDE_INT lo, hi, count;
4786 tree position;
4788 /* If the range is constant and "small", unroll the loop. */
4789 if (const_bounds_p
4790 && host_integerp (lo_index, 0)
4791 && host_integerp (hi_index, 0)
4792 && (lo = tree_low_cst (lo_index, 0),
4793 hi = tree_low_cst (hi_index, 0),
4794 count = hi - lo + 1,
4795 (GET_CODE (target) != MEM
4796 || count <= 2
4797 || (host_integerp (TYPE_SIZE (elttype), 1)
4798 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4799 <= 40 * 8)))))
4801 lo -= minelt; hi -= minelt;
4802 for (; lo <= hi; lo++)
4804 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4806 if (GET_CODE (target) == MEM
4807 && !MEM_KEEP_ALIAS_SET_P (target)
4808 && TREE_CODE (type) == ARRAY_TYPE
4809 && TYPE_NONALIASED_COMPONENT (type))
4811 target = copy_rtx (target);
4812 MEM_KEEP_ALIAS_SET_P (target) = 1;
4815 store_constructor_field
4816 (target, bitsize, bitpos, mode, value, type, cleared,
4817 get_alias_set (elttype));
4820 else
4822 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4823 loop_top = gen_label_rtx ();
4824 loop_end = gen_label_rtx ();
4826 unsignedp = TREE_UNSIGNED (domain);
4828 index = build_decl (VAR_DECL, NULL_TREE, domain);
4830 index_r
4831 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4832 &unsignedp, 0));
4833 SET_DECL_RTL (index, index_r);
4834 if (TREE_CODE (value) == SAVE_EXPR
4835 && SAVE_EXPR_RTL (value) == 0)
4837 /* Make sure value gets expanded once before the
4838 loop. */
4839 expand_expr (value, const0_rtx, VOIDmode, 0);
4840 emit_queue ();
4842 store_expr (lo_index, index_r, 0);
4843 loop = expand_start_loop (0);
4845 /* Assign value to element index. */
4846 position
4847 = convert (ssizetype,
4848 fold (build (MINUS_EXPR, TREE_TYPE (index),
4849 index, TYPE_MIN_VALUE (domain))));
4850 position = size_binop (MULT_EXPR, position,
4851 convert (ssizetype,
4852 TYPE_SIZE_UNIT (elttype)));
4854 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4855 xtarget = offset_address (target, pos_rtx,
4856 highest_pow2_factor (position));
4857 xtarget = adjust_address (xtarget, mode, 0);
4858 if (TREE_CODE (value) == CONSTRUCTOR)
4859 store_constructor (value, xtarget, cleared,
4860 bitsize / BITS_PER_UNIT);
4861 else
4862 store_expr (value, xtarget, 0);
4864 expand_exit_loop_if_false (loop,
4865 build (LT_EXPR, integer_type_node,
4866 index, hi_index));
4868 expand_increment (build (PREINCREMENT_EXPR,
4869 TREE_TYPE (index),
4870 index, integer_one_node), 0, 0);
4871 expand_end_loop ();
4872 emit_label (loop_end);
4875 else if ((index != 0 && ! host_integerp (index, 0))
4876 || ! host_integerp (TYPE_SIZE (elttype), 1))
4878 tree position;
4880 if (index == 0)
4881 index = ssize_int (1);
4883 if (minelt)
4884 index = convert (ssizetype,
4885 fold (build (MINUS_EXPR, index,
4886 TYPE_MIN_VALUE (domain))));
4888 position = size_binop (MULT_EXPR, index,
4889 convert (ssizetype,
4890 TYPE_SIZE_UNIT (elttype)));
4891 xtarget = offset_address (target,
4892 expand_expr (position, 0, VOIDmode, 0),
4893 highest_pow2_factor (position));
4894 xtarget = adjust_address (xtarget, mode, 0);
4895 store_expr (value, xtarget, 0);
4897 else
4899 if (index != 0)
4900 bitpos = ((tree_low_cst (index, 0) - minelt)
4901 * tree_low_cst (TYPE_SIZE (elttype), 1));
4902 else
4903 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4905 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
4906 && TREE_CODE (type) == ARRAY_TYPE
4907 && TYPE_NONALIASED_COMPONENT (type))
4909 target = copy_rtx (target);
4910 MEM_KEEP_ALIAS_SET_P (target) = 1;
4913 store_constructor_field (target, bitsize, bitpos, mode, value,
4914 type, cleared, get_alias_set (elttype));
4920 /* Set constructor assignments. */
4921 else if (TREE_CODE (type) == SET_TYPE)
4923 tree elt = CONSTRUCTOR_ELTS (exp);
4924 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4925 tree domain = TYPE_DOMAIN (type);
4926 tree domain_min, domain_max, bitlength;
4928 /* The default implementation strategy is to extract the constant
4929 parts of the constructor, use that to initialize the target,
4930 and then "or" in whatever non-constant ranges we need in addition.
4932 If a large set is all zero or all ones, it is
4933 probably better to set it using memset (if available) or bzero.
4934 Also, if a large set has just a single range, it may also be
4935 better to first clear all the first clear the set (using
4936 bzero/memset), and set the bits we want. */
4938 /* Check for all zeros. */
4939 if (elt == NULL_TREE && size > 0)
4941 if (!cleared)
4942 clear_storage (target, GEN_INT (size));
4943 return;
4946 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4947 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4948 bitlength = size_binop (PLUS_EXPR,
4949 size_diffop (domain_max, domain_min),
4950 ssize_int (1));
4952 nbits = tree_low_cst (bitlength, 1);
4954 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4955 are "complicated" (more than one range), initialize (the
4956 constant parts) by copying from a constant. */
4957 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4958 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4960 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4961 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4962 char *bit_buffer = (char *) alloca (nbits);
4963 HOST_WIDE_INT word = 0;
4964 unsigned int bit_pos = 0;
4965 unsigned int ibit = 0;
4966 unsigned int offset = 0; /* In bytes from beginning of set. */
4968 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4969 for (;;)
4971 if (bit_buffer[ibit])
4973 if (BYTES_BIG_ENDIAN)
4974 word |= (1 << (set_word_size - 1 - bit_pos));
4975 else
4976 word |= 1 << bit_pos;
4979 bit_pos++; ibit++;
4980 if (bit_pos >= set_word_size || ibit == nbits)
4982 if (word != 0 || ! cleared)
4984 rtx datum = GEN_INT (word);
4985 rtx to_rtx;
4987 /* The assumption here is that it is safe to use
4988 XEXP if the set is multi-word, but not if
4989 it's single-word. */
4990 if (GET_CODE (target) == MEM)
4991 to_rtx = adjust_address (target, mode, offset);
4992 else if (offset == 0)
4993 to_rtx = target;
4994 else
4995 abort ();
4996 emit_move_insn (to_rtx, datum);
4999 if (ibit == nbits)
5000 break;
5001 word = 0;
5002 bit_pos = 0;
5003 offset += set_word_size / BITS_PER_UNIT;
5007 else if (!cleared)
5008 /* Don't bother clearing storage if the set is all ones. */
5009 if (TREE_CHAIN (elt) != NULL_TREE
5010 || (TREE_PURPOSE (elt) == NULL_TREE
5011 ? nbits != 1
5012 : ( ! host_integerp (TREE_VALUE (elt), 0)
5013 || ! host_integerp (TREE_PURPOSE (elt), 0)
5014 || (tree_low_cst (TREE_VALUE (elt), 0)
5015 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5016 != (HOST_WIDE_INT) nbits))))
5017 clear_storage (target, expr_size (exp));
5019 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5021 /* Start of range of element or NULL. */
5022 tree startbit = TREE_PURPOSE (elt);
5023 /* End of range of element, or element value. */
5024 tree endbit = TREE_VALUE (elt);
5025 #ifdef TARGET_MEM_FUNCTIONS
5026 HOST_WIDE_INT startb, endb;
5027 #endif
5028 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5030 bitlength_rtx = expand_expr (bitlength,
5031 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5033 /* Handle non-range tuple element like [ expr ]. */
5034 if (startbit == NULL_TREE)
5036 startbit = save_expr (endbit);
5037 endbit = startbit;
5040 startbit = convert (sizetype, startbit);
5041 endbit = convert (sizetype, endbit);
5042 if (! integer_zerop (domain_min))
5044 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5045 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5047 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5048 EXPAND_CONST_ADDRESS);
5049 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5050 EXPAND_CONST_ADDRESS);
5052 if (REG_P (target))
5054 targetx
5055 = assign_temp
5056 ((build_qualified_type ((*lang_hooks.types.type_for_mode)
5057 (GET_MODE (target), 0),
5058 TYPE_QUAL_CONST)),
5059 0, 1, 1);
5060 emit_move_insn (targetx, target);
5063 else if (GET_CODE (target) == MEM)
5064 targetx = target;
5065 else
5066 abort ();
5068 #ifdef TARGET_MEM_FUNCTIONS
5069 /* Optimization: If startbit and endbit are
5070 constants divisible by BITS_PER_UNIT,
5071 call memset instead. */
5072 if (TREE_CODE (startbit) == INTEGER_CST
5073 && TREE_CODE (endbit) == INTEGER_CST
5074 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5075 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5077 emit_library_call (memset_libfunc, LCT_NORMAL,
5078 VOIDmode, 3,
5079 plus_constant (XEXP (targetx, 0),
5080 startb / BITS_PER_UNIT),
5081 Pmode,
5082 constm1_rtx, TYPE_MODE (integer_type_node),
5083 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5084 TYPE_MODE (sizetype));
5086 else
5087 #endif
5088 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
5089 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
5090 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5091 startbit_rtx, TYPE_MODE (sizetype),
5092 endbit_rtx, TYPE_MODE (sizetype));
5094 if (REG_P (target))
5095 emit_move_insn (target, targetx);
5099 else
5100 abort ();
5103 /* Store the value of EXP (an expression tree)
5104 into a subfield of TARGET which has mode MODE and occupies
5105 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5106 If MODE is VOIDmode, it means that we are storing into a bit-field.
5108 If VALUE_MODE is VOIDmode, return nothing in particular.
5109 UNSIGNEDP is not used in this case.
5111 Otherwise, return an rtx for the value stored. This rtx
5112 has mode VALUE_MODE if that is convenient to do.
5113 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5115 TYPE is the type of the underlying object,
5117 ALIAS_SET is the alias set for the destination. This value will
5118 (in general) be different from that for TARGET, since TARGET is a
5119 reference to the containing structure. */
5121 static rtx
5122 store_field (target, bitsize, bitpos, mode, exp, value_mode, unsignedp, type,
5123 alias_set)
5124 rtx target;
5125 HOST_WIDE_INT bitsize;
5126 HOST_WIDE_INT bitpos;
5127 enum machine_mode mode;
5128 tree exp;
5129 enum machine_mode value_mode;
5130 int unsignedp;
5131 tree type;
5132 int alias_set;
5134 HOST_WIDE_INT width_mask = 0;
5136 if (TREE_CODE (exp) == ERROR_MARK)
5137 return const0_rtx;
5139 /* If we have nothing to store, do nothing unless the expression has
5140 side-effects. */
5141 if (bitsize == 0)
5142 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5143 else if (bitsize >=0 && bitsize < HOST_BITS_PER_WIDE_INT)
5144 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5146 /* If we are storing into an unaligned field of an aligned union that is
5147 in a register, we may have the mode of TARGET being an integer mode but
5148 MODE == BLKmode. In that case, get an aligned object whose size and
5149 alignment are the same as TARGET and store TARGET into it (we can avoid
5150 the store if the field being stored is the entire width of TARGET). Then
5151 call ourselves recursively to store the field into a BLKmode version of
5152 that object. Finally, load from the object into TARGET. This is not
5153 very efficient in general, but should only be slightly more expensive
5154 than the otherwise-required unaligned accesses. Perhaps this can be
5155 cleaned up later. */
5157 if (mode == BLKmode
5158 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5160 rtx object
5161 = assign_temp
5162 (build_qualified_type (type, TYPE_QUALS (type) | TYPE_QUAL_CONST),
5163 0, 1, 1);
5164 rtx blk_object = adjust_address (object, BLKmode, 0);
5166 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5167 emit_move_insn (object, target);
5169 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5170 alias_set);
5172 emit_move_insn (target, object);
5174 /* We want to return the BLKmode version of the data. */
5175 return blk_object;
5178 if (GET_CODE (target) == CONCAT)
5180 /* We're storing into a struct containing a single __complex. */
5182 if (bitpos != 0)
5183 abort ();
5184 return store_expr (exp, target, 0);
5187 /* If the structure is in a register or if the component
5188 is a bit field, we cannot use addressing to access it.
5189 Use bit-field techniques or SUBREG to store in it. */
5191 if (mode == VOIDmode
5192 || (mode != BLKmode && ! direct_store[(int) mode]
5193 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5194 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5195 || GET_CODE (target) == REG
5196 || GET_CODE (target) == SUBREG
5197 /* If the field isn't aligned enough to store as an ordinary memref,
5198 store it as a bit field. */
5199 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
5200 && (MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode)
5201 || bitpos % GET_MODE_ALIGNMENT (mode)))
5202 /* If the RHS and field are a constant size and the size of the
5203 RHS isn't the same size as the bitfield, we must use bitfield
5204 operations. */
5205 || (bitsize >= 0
5206 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5207 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5209 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5211 /* If BITSIZE is narrower than the size of the type of EXP
5212 we will be narrowing TEMP. Normally, what's wanted are the
5213 low-order bits. However, if EXP's type is a record and this is
5214 big-endian machine, we want the upper BITSIZE bits. */
5215 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5216 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5217 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5218 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5219 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5220 - bitsize),
5221 temp, 1);
5223 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5224 MODE. */
5225 if (mode != VOIDmode && mode != BLKmode
5226 && mode != TYPE_MODE (TREE_TYPE (exp)))
5227 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5229 /* If the modes of TARGET and TEMP are both BLKmode, both
5230 must be in memory and BITPOS must be aligned on a byte
5231 boundary. If so, we simply do a block copy. */
5232 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5234 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5235 || bitpos % BITS_PER_UNIT != 0)
5236 abort ();
5238 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5239 emit_block_move (target, temp,
5240 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5241 / BITS_PER_UNIT));
5243 return value_mode == VOIDmode ? const0_rtx : target;
5246 /* Store the value in the bitfield. */
5247 store_bit_field (target, bitsize, bitpos, mode, temp,
5248 int_size_in_bytes (type));
5250 if (value_mode != VOIDmode)
5252 /* The caller wants an rtx for the value.
5253 If possible, avoid refetching from the bitfield itself. */
5254 if (width_mask != 0
5255 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5257 tree count;
5258 enum machine_mode tmode;
5260 tmode = GET_MODE (temp);
5261 if (tmode == VOIDmode)
5262 tmode = value_mode;
5264 if (unsignedp)
5265 return expand_and (tmode, temp,
5266 gen_int_mode (width_mask, tmode),
5267 NULL_RTX);
5269 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5270 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5271 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5274 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5275 NULL_RTX, value_mode, VOIDmode,
5276 int_size_in_bytes (type));
5278 return const0_rtx;
5280 else
5282 rtx addr = XEXP (target, 0);
5283 rtx to_rtx = target;
5285 /* If a value is wanted, it must be the lhs;
5286 so make the address stable for multiple use. */
5288 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5289 && ! CONSTANT_ADDRESS_P (addr)
5290 /* A frame-pointer reference is already stable. */
5291 && ! (GET_CODE (addr) == PLUS
5292 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5293 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5294 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5295 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5297 /* Now build a reference to just the desired component. */
5299 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5301 if (to_rtx == target)
5302 to_rtx = copy_rtx (to_rtx);
5304 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5305 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5306 set_mem_alias_set (to_rtx, alias_set);
5308 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5312 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5313 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5314 codes and find the ultimate containing object, which we return.
5316 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5317 bit position, and *PUNSIGNEDP to the signedness of the field.
5318 If the position of the field is variable, we store a tree
5319 giving the variable offset (in units) in *POFFSET.
5320 This offset is in addition to the bit position.
5321 If the position is not variable, we store 0 in *POFFSET.
5323 If any of the extraction expressions is volatile,
5324 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5326 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5327 is a mode that can be used to access the field. In that case, *PBITSIZE
5328 is redundant.
5330 If the field describes a variable-sized object, *PMODE is set to
5331 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5332 this case, but the address of the object can be found. */
5334 tree
5335 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5336 punsignedp, pvolatilep)
5337 tree exp;
5338 HOST_WIDE_INT *pbitsize;
5339 HOST_WIDE_INT *pbitpos;
5340 tree *poffset;
5341 enum machine_mode *pmode;
5342 int *punsignedp;
5343 int *pvolatilep;
5345 tree size_tree = 0;
5346 enum machine_mode mode = VOIDmode;
5347 tree offset = size_zero_node;
5348 tree bit_offset = bitsize_zero_node;
5349 tree placeholder_ptr = 0;
5350 tree tem;
5352 /* First get the mode, signedness, and size. We do this from just the
5353 outermost expression. */
5354 if (TREE_CODE (exp) == COMPONENT_REF)
5356 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5357 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5358 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5360 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5362 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5364 size_tree = TREE_OPERAND (exp, 1);
5365 *punsignedp = TREE_UNSIGNED (exp);
5367 else
5369 mode = TYPE_MODE (TREE_TYPE (exp));
5370 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5372 if (mode == BLKmode)
5373 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5374 else
5375 *pbitsize = GET_MODE_BITSIZE (mode);
5378 if (size_tree != 0)
5380 if (! host_integerp (size_tree, 1))
5381 mode = BLKmode, *pbitsize = -1;
5382 else
5383 *pbitsize = tree_low_cst (size_tree, 1);
5386 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5387 and find the ultimate containing object. */
5388 while (1)
5390 if (TREE_CODE (exp) == BIT_FIELD_REF)
5391 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5392 else if (TREE_CODE (exp) == COMPONENT_REF)
5394 tree field = TREE_OPERAND (exp, 1);
5395 tree this_offset = DECL_FIELD_OFFSET (field);
5397 /* If this field hasn't been filled in yet, don't go
5398 past it. This should only happen when folding expressions
5399 made during type construction. */
5400 if (this_offset == 0)
5401 break;
5402 else if (! TREE_CONSTANT (this_offset)
5403 && contains_placeholder_p (this_offset))
5404 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5406 offset = size_binop (PLUS_EXPR, offset, this_offset);
5407 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5408 DECL_FIELD_BIT_OFFSET (field));
5410 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5413 else if (TREE_CODE (exp) == ARRAY_REF
5414 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5416 tree index = TREE_OPERAND (exp, 1);
5417 tree array = TREE_OPERAND (exp, 0);
5418 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5419 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5420 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5422 /* We assume all arrays have sizes that are a multiple of a byte.
5423 First subtract the lower bound, if any, in the type of the
5424 index, then convert to sizetype and multiply by the size of the
5425 array element. */
5426 if (low_bound != 0 && ! integer_zerop (low_bound))
5427 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5428 index, low_bound));
5430 /* If the index has a self-referential type, pass it to a
5431 WITH_RECORD_EXPR; if the component size is, pass our
5432 component to one. */
5433 if (! TREE_CONSTANT (index)
5434 && contains_placeholder_p (index))
5435 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5436 if (! TREE_CONSTANT (unit_size)
5437 && contains_placeholder_p (unit_size))
5438 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5440 offset = size_binop (PLUS_EXPR, offset,
5441 size_binop (MULT_EXPR,
5442 convert (sizetype, index),
5443 unit_size));
5446 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5448 tree new = find_placeholder (exp, &placeholder_ptr);
5450 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5451 We might have been called from tree optimization where we
5452 haven't set up an object yet. */
5453 if (new == 0)
5454 break;
5455 else
5456 exp = new;
5458 continue;
5460 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5461 && TREE_CODE (exp) != VIEW_CONVERT_EXPR
5462 && ! ((TREE_CODE (exp) == NOP_EXPR
5463 || TREE_CODE (exp) == CONVERT_EXPR)
5464 && (TYPE_MODE (TREE_TYPE (exp))
5465 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5466 break;
5468 /* If any reference in the chain is volatile, the effect is volatile. */
5469 if (TREE_THIS_VOLATILE (exp))
5470 *pvolatilep = 1;
5472 exp = TREE_OPERAND (exp, 0);
5475 /* If OFFSET is constant, see if we can return the whole thing as a
5476 constant bit position. Otherwise, split it up. */
5477 if (host_integerp (offset, 0)
5478 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5479 bitsize_unit_node))
5480 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5481 && host_integerp (tem, 0))
5482 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5483 else
5484 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5486 *pmode = mode;
5487 return exp;
5490 /* Return 1 if T is an expression that get_inner_reference handles. */
5493 handled_component_p (t)
5494 tree t;
5496 switch (TREE_CODE (t))
5498 case BIT_FIELD_REF:
5499 case COMPONENT_REF:
5500 case ARRAY_REF:
5501 case ARRAY_RANGE_REF:
5502 case NON_LVALUE_EXPR:
5503 case VIEW_CONVERT_EXPR:
5504 return 1;
5506 case NOP_EXPR:
5507 case CONVERT_EXPR:
5508 return (TYPE_MODE (TREE_TYPE (t))
5509 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5511 default:
5512 return 0;
5516 /* Given an rtx VALUE that may contain additions and multiplications, return
5517 an equivalent value that just refers to a register, memory, or constant.
5518 This is done by generating instructions to perform the arithmetic and
5519 returning a pseudo-register containing the value.
5521 The returned value may be a REG, SUBREG, MEM or constant. */
5524 force_operand (value, target)
5525 rtx value, target;
5527 rtx op1, op2;
5528 /* Use subtarget as the target for operand 0 of a binary operation. */
5529 rtx subtarget = get_subtarget (target);
5530 enum rtx_code code = GET_CODE (value);
5532 /* Check for a PIC address load. */
5533 if ((code == PLUS || code == MINUS)
5534 && XEXP (value, 0) == pic_offset_table_rtx
5535 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5536 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5537 || GET_CODE (XEXP (value, 1)) == CONST))
5539 if (!subtarget)
5540 subtarget = gen_reg_rtx (GET_MODE (value));
5541 emit_move_insn (subtarget, value);
5542 return subtarget;
5545 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5547 if (!target)
5548 target = gen_reg_rtx (GET_MODE (value));
5549 convert_move (target, force_operand (XEXP (value, 0), NULL),
5550 code == ZERO_EXTEND);
5551 return target;
5554 if (GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c')
5556 op2 = XEXP (value, 1);
5557 if (!CONSTANT_P (op2) && !(GET_CODE (op2) == REG && op2 != subtarget))
5558 subtarget = 0;
5559 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5561 code = PLUS;
5562 op2 = negate_rtx (GET_MODE (value), op2);
5565 /* Check for an addition with OP2 a constant integer and our first
5566 operand a PLUS of a virtual register and something else. In that
5567 case, we want to emit the sum of the virtual register and the
5568 constant first and then add the other value. This allows virtual
5569 register instantiation to simply modify the constant rather than
5570 creating another one around this addition. */
5571 if (code == PLUS && GET_CODE (op2) == CONST_INT
5572 && GET_CODE (XEXP (value, 0)) == PLUS
5573 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5574 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5575 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5577 rtx temp = expand_simple_binop (GET_MODE (value), code,
5578 XEXP (XEXP (value, 0), 0), op2,
5579 subtarget, 0, OPTAB_LIB_WIDEN);
5580 return expand_simple_binop (GET_MODE (value), code, temp,
5581 force_operand (XEXP (XEXP (value,
5582 0), 1), 0),
5583 target, 0, OPTAB_LIB_WIDEN);
5586 op1 = force_operand (XEXP (value, 0), subtarget);
5587 op2 = force_operand (op2, NULL_RTX);
5588 switch (code)
5590 case MULT:
5591 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5592 case DIV:
5593 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5594 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5595 target, 1, OPTAB_LIB_WIDEN);
5596 else
5597 return expand_divmod (0,
5598 FLOAT_MODE_P (GET_MODE (value))
5599 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5600 GET_MODE (value), op1, op2, target, 0);
5601 break;
5602 case MOD:
5603 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5604 target, 0);
5605 break;
5606 case UDIV:
5607 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5608 target, 1);
5609 break;
5610 case UMOD:
5611 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5612 target, 1);
5613 break;
5614 case ASHIFTRT:
5615 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5616 target, 0, OPTAB_LIB_WIDEN);
5617 break;
5618 default:
5619 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5620 target, 1, OPTAB_LIB_WIDEN);
5623 if (GET_RTX_CLASS (code) == '1')
5625 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5626 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5629 #ifdef INSN_SCHEDULING
5630 /* On machines that have insn scheduling, we want all memory reference to be
5631 explicit, so we need to deal with such paradoxical SUBREGs. */
5632 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
5633 && (GET_MODE_SIZE (GET_MODE (value))
5634 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5635 value
5636 = simplify_gen_subreg (GET_MODE (value),
5637 force_reg (GET_MODE (SUBREG_REG (value)),
5638 force_operand (SUBREG_REG (value),
5639 NULL_RTX)),
5640 GET_MODE (SUBREG_REG (value)),
5641 SUBREG_BYTE (value));
5642 #endif
5644 return value;
5647 /* Subroutine of expand_expr: return nonzero iff there is no way that
5648 EXP can reference X, which is being modified. TOP_P is nonzero if this
5649 call is going to be used to determine whether we need a temporary
5650 for EXP, as opposed to a recursive call to this function.
5652 It is always safe for this routine to return zero since it merely
5653 searches for optimization opportunities. */
5656 safe_from_p (x, exp, top_p)
5657 rtx x;
5658 tree exp;
5659 int top_p;
5661 rtx exp_rtl = 0;
5662 int i, nops;
5663 static tree save_expr_list;
5665 if (x == 0
5666 /* If EXP has varying size, we MUST use a target since we currently
5667 have no way of allocating temporaries of variable size
5668 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5669 So we assume here that something at a higher level has prevented a
5670 clash. This is somewhat bogus, but the best we can do. Only
5671 do this when X is BLKmode and when we are at the top level. */
5672 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5673 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5674 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5675 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5676 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5677 != INTEGER_CST)
5678 && GET_MODE (x) == BLKmode)
5679 /* If X is in the outgoing argument area, it is always safe. */
5680 || (GET_CODE (x) == MEM
5681 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5682 || (GET_CODE (XEXP (x, 0)) == PLUS
5683 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5684 return 1;
5686 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5687 find the underlying pseudo. */
5688 if (GET_CODE (x) == SUBREG)
5690 x = SUBREG_REG (x);
5691 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5692 return 0;
5695 /* A SAVE_EXPR might appear many times in the expression passed to the
5696 top-level safe_from_p call, and if it has a complex subexpression,
5697 examining it multiple times could result in a combinatorial explosion.
5698 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5699 with optimization took about 28 minutes to compile -- even though it was
5700 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5701 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5702 we have processed. Note that the only test of top_p was above. */
5704 if (top_p)
5706 int rtn;
5707 tree t;
5709 save_expr_list = 0;
5711 rtn = safe_from_p (x, exp, 0);
5713 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5714 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5716 return rtn;
5719 /* Now look at our tree code and possibly recurse. */
5720 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5722 case 'd':
5723 exp_rtl = DECL_RTL_IF_SET (exp);
5724 break;
5726 case 'c':
5727 return 1;
5729 case 'x':
5730 if (TREE_CODE (exp) == TREE_LIST)
5731 return ((TREE_VALUE (exp) == 0
5732 || safe_from_p (x, TREE_VALUE (exp), 0))
5733 && (TREE_CHAIN (exp) == 0
5734 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5735 else if (TREE_CODE (exp) == ERROR_MARK)
5736 return 1; /* An already-visited SAVE_EXPR? */
5737 else
5738 return 0;
5740 case '1':
5741 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5743 case '2':
5744 case '<':
5745 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5746 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5748 case 'e':
5749 case 'r':
5750 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5751 the expression. If it is set, we conflict iff we are that rtx or
5752 both are in memory. Otherwise, we check all operands of the
5753 expression recursively. */
5755 switch (TREE_CODE (exp))
5757 case ADDR_EXPR:
5758 /* If the operand is static or we are static, we can't conflict.
5759 Likewise if we don't conflict with the operand at all. */
5760 if (staticp (TREE_OPERAND (exp, 0))
5761 || TREE_STATIC (exp)
5762 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5763 return 1;
5765 /* Otherwise, the only way this can conflict is if we are taking
5766 the address of a DECL a that address if part of X, which is
5767 very rare. */
5768 exp = TREE_OPERAND (exp, 0);
5769 if (DECL_P (exp))
5771 if (!DECL_RTL_SET_P (exp)
5772 || GET_CODE (DECL_RTL (exp)) != MEM)
5773 return 0;
5774 else
5775 exp_rtl = XEXP (DECL_RTL (exp), 0);
5777 break;
5779 case INDIRECT_REF:
5780 if (GET_CODE (x) == MEM
5781 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5782 get_alias_set (exp)))
5783 return 0;
5784 break;
5786 case CALL_EXPR:
5787 /* Assume that the call will clobber all hard registers and
5788 all of memory. */
5789 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5790 || GET_CODE (x) == MEM)
5791 return 0;
5792 break;
5794 case RTL_EXPR:
5795 /* If a sequence exists, we would have to scan every instruction
5796 in the sequence to see if it was safe. This is probably not
5797 worthwhile. */
5798 if (RTL_EXPR_SEQUENCE (exp))
5799 return 0;
5801 exp_rtl = RTL_EXPR_RTL (exp);
5802 break;
5804 case WITH_CLEANUP_EXPR:
5805 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
5806 break;
5808 case CLEANUP_POINT_EXPR:
5809 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5811 case SAVE_EXPR:
5812 exp_rtl = SAVE_EXPR_RTL (exp);
5813 if (exp_rtl)
5814 break;
5816 /* If we've already scanned this, don't do it again. Otherwise,
5817 show we've scanned it and record for clearing the flag if we're
5818 going on. */
5819 if (TREE_PRIVATE (exp))
5820 return 1;
5822 TREE_PRIVATE (exp) = 1;
5823 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5825 TREE_PRIVATE (exp) = 0;
5826 return 0;
5829 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
5830 return 1;
5832 case BIND_EXPR:
5833 /* The only operand we look at is operand 1. The rest aren't
5834 part of the expression. */
5835 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5837 case METHOD_CALL_EXPR:
5838 /* This takes an rtx argument, but shouldn't appear here. */
5839 abort ();
5841 default:
5842 break;
5845 /* If we have an rtx, we do not need to scan our operands. */
5846 if (exp_rtl)
5847 break;
5849 nops = first_rtl_op (TREE_CODE (exp));
5850 for (i = 0; i < nops; i++)
5851 if (TREE_OPERAND (exp, i) != 0
5852 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5853 return 0;
5855 /* If this is a language-specific tree code, it may require
5856 special handling. */
5857 if ((unsigned int) TREE_CODE (exp)
5858 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5859 && !(*lang_hooks.safe_from_p) (x, exp))
5860 return 0;
5863 /* If we have an rtl, find any enclosed object. Then see if we conflict
5864 with it. */
5865 if (exp_rtl)
5867 if (GET_CODE (exp_rtl) == SUBREG)
5869 exp_rtl = SUBREG_REG (exp_rtl);
5870 if (GET_CODE (exp_rtl) == REG
5871 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5872 return 0;
5875 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5876 are memory and they conflict. */
5877 return ! (rtx_equal_p (x, exp_rtl)
5878 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5879 && true_dependence (exp_rtl, VOIDmode, x,
5880 rtx_addr_varies_p)));
5883 /* If we reach here, it is safe. */
5884 return 1;
5887 /* Subroutine of expand_expr: return rtx if EXP is a
5888 variable or parameter; else return 0. */
5890 static rtx
5891 var_rtx (exp)
5892 tree exp;
5894 STRIP_NOPS (exp);
5895 switch (TREE_CODE (exp))
5897 case PARM_DECL:
5898 case VAR_DECL:
5899 return DECL_RTL (exp);
5900 default:
5901 return 0;
5905 #ifdef MAX_INTEGER_COMPUTATION_MODE
5907 void
5908 check_max_integer_computation_mode (exp)
5909 tree exp;
5911 enum tree_code code;
5912 enum machine_mode mode;
5914 /* Strip any NOPs that don't change the mode. */
5915 STRIP_NOPS (exp);
5916 code = TREE_CODE (exp);
5918 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5919 if (code == NOP_EXPR
5920 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5921 return;
5923 /* First check the type of the overall operation. We need only look at
5924 unary, binary and relational operations. */
5925 if (TREE_CODE_CLASS (code) == '1'
5926 || TREE_CODE_CLASS (code) == '2'
5927 || TREE_CODE_CLASS (code) == '<')
5929 mode = TYPE_MODE (TREE_TYPE (exp));
5930 if (GET_MODE_CLASS (mode) == MODE_INT
5931 && mode > MAX_INTEGER_COMPUTATION_MODE)
5932 internal_error ("unsupported wide integer operation");
5935 /* Check operand of a unary op. */
5936 if (TREE_CODE_CLASS (code) == '1')
5938 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5939 if (GET_MODE_CLASS (mode) == MODE_INT
5940 && mode > MAX_INTEGER_COMPUTATION_MODE)
5941 internal_error ("unsupported wide integer operation");
5944 /* Check operands of a binary/comparison op. */
5945 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5947 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5948 if (GET_MODE_CLASS (mode) == MODE_INT
5949 && mode > MAX_INTEGER_COMPUTATION_MODE)
5950 internal_error ("unsupported wide integer operation");
5952 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5953 if (GET_MODE_CLASS (mode) == MODE_INT
5954 && mode > MAX_INTEGER_COMPUTATION_MODE)
5955 internal_error ("unsupported wide integer operation");
5958 #endif
5960 /* Return the highest power of two that EXP is known to be a multiple of.
5961 This is used in updating alignment of MEMs in array references. */
5963 static HOST_WIDE_INT
5964 highest_pow2_factor (exp)
5965 tree exp;
5967 HOST_WIDE_INT c0, c1;
5969 switch (TREE_CODE (exp))
5971 case INTEGER_CST:
5972 /* We can find the lowest bit that's a one. If the low
5973 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
5974 We need to handle this case since we can find it in a COND_EXPR,
5975 a MIN_EXPR, or a MAX_EXPR. If the constant overlows, we have an
5976 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
5977 later ICE. */
5978 if (TREE_CONSTANT_OVERFLOW (exp))
5979 return BIGGEST_ALIGNMENT;
5980 else
5982 /* Note: tree_low_cst is intentionally not used here,
5983 we don't care about the upper bits. */
5984 c0 = TREE_INT_CST_LOW (exp);
5985 c0 &= -c0;
5986 return c0 ? c0 : BIGGEST_ALIGNMENT;
5988 break;
5990 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
5991 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5992 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5993 return MIN (c0, c1);
5995 case MULT_EXPR:
5996 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5997 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5998 return c0 * c1;
6000 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6001 case CEIL_DIV_EXPR:
6002 if (integer_pow2p (TREE_OPERAND (exp, 1))
6003 && host_integerp (TREE_OPERAND (exp, 1), 1))
6005 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6006 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6007 return MAX (1, c0 / c1);
6009 break;
6011 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6012 case SAVE_EXPR: case WITH_RECORD_EXPR:
6013 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6015 case COMPOUND_EXPR:
6016 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6018 case COND_EXPR:
6019 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6020 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6021 return MIN (c0, c1);
6023 default:
6024 break;
6027 return 1;
6030 /* Similar, except that it is known that the expression must be a multiple
6031 of the alignment of TYPE. */
6033 static HOST_WIDE_INT
6034 highest_pow2_factor_for_type (type, exp)
6035 tree type;
6036 tree exp;
6038 HOST_WIDE_INT type_align, factor;
6040 factor = highest_pow2_factor (exp);
6041 type_align = TYPE_ALIGN (type) / BITS_PER_UNIT;
6042 return MAX (factor, type_align);
6045 /* Return an object on the placeholder list that matches EXP, a
6046 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6047 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6048 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6049 is a location which initially points to a starting location in the
6050 placeholder list (zero means start of the list) and where a pointer into
6051 the placeholder list at which the object is found is placed. */
6053 tree
6054 find_placeholder (exp, plist)
6055 tree exp;
6056 tree *plist;
6058 tree type = TREE_TYPE (exp);
6059 tree placeholder_expr;
6061 for (placeholder_expr
6062 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
6063 placeholder_expr != 0;
6064 placeholder_expr = TREE_CHAIN (placeholder_expr))
6066 tree need_type = TYPE_MAIN_VARIANT (type);
6067 tree elt;
6069 /* Find the outermost reference that is of the type we want. If none,
6070 see if any object has a type that is a pointer to the type we
6071 want. */
6072 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6073 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
6074 || TREE_CODE (elt) == COND_EXPR)
6075 ? TREE_OPERAND (elt, 1)
6076 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6077 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6078 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6079 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6080 ? TREE_OPERAND (elt, 0) : 0))
6081 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6083 if (plist)
6084 *plist = placeholder_expr;
6085 return elt;
6088 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6090 = ((TREE_CODE (elt) == COMPOUND_EXPR
6091 || TREE_CODE (elt) == COND_EXPR)
6092 ? TREE_OPERAND (elt, 1)
6093 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6094 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6095 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6096 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6097 ? TREE_OPERAND (elt, 0) : 0))
6098 if (POINTER_TYPE_P (TREE_TYPE (elt))
6099 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6100 == need_type))
6102 if (plist)
6103 *plist = placeholder_expr;
6104 return build1 (INDIRECT_REF, need_type, elt);
6108 return 0;
6111 /* expand_expr: generate code for computing expression EXP.
6112 An rtx for the computed value is returned. The value is never null.
6113 In the case of a void EXP, const0_rtx is returned.
6115 The value may be stored in TARGET if TARGET is nonzero.
6116 TARGET is just a suggestion; callers must assume that
6117 the rtx returned may not be the same as TARGET.
6119 If TARGET is CONST0_RTX, it means that the value will be ignored.
6121 If TMODE is not VOIDmode, it suggests generating the
6122 result in mode TMODE. But this is done only when convenient.
6123 Otherwise, TMODE is ignored and the value generated in its natural mode.
6124 TMODE is just a suggestion; callers must assume that
6125 the rtx returned may not have mode TMODE.
6127 Note that TARGET may have neither TMODE nor MODE. In that case, it
6128 probably will not be used.
6130 If MODIFIER is EXPAND_SUM then when EXP is an addition
6131 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6132 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6133 products as above, or REG or MEM, or constant.
6134 Ordinarily in such cases we would output mul or add instructions
6135 and then return a pseudo reg containing the sum.
6137 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6138 it also marks a label as absolutely required (it can't be dead).
6139 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6140 This is used for outputting expressions used in initializers.
6142 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6143 with a constant address even if that address is not normally legitimate.
6144 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
6147 expand_expr (exp, target, tmode, modifier)
6148 tree exp;
6149 rtx target;
6150 enum machine_mode tmode;
6151 enum expand_modifier modifier;
6153 rtx op0, op1, temp;
6154 tree type = TREE_TYPE (exp);
6155 int unsignedp = TREE_UNSIGNED (type);
6156 enum machine_mode mode;
6157 enum tree_code code = TREE_CODE (exp);
6158 optab this_optab;
6159 rtx subtarget, original_target;
6160 int ignore;
6161 tree context;
6163 /* Handle ERROR_MARK before anybody tries to access its type. */
6164 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6166 op0 = CONST0_RTX (tmode);
6167 if (op0 != 0)
6168 return op0;
6169 return const0_rtx;
6172 mode = TYPE_MODE (type);
6173 /* Use subtarget as the target for operand 0 of a binary operation. */
6174 subtarget = get_subtarget (target);
6175 original_target = target;
6176 ignore = (target == const0_rtx
6177 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6178 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6179 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
6180 && TREE_CODE (type) == VOID_TYPE));
6182 /* If we are going to ignore this result, we need only do something
6183 if there is a side-effect somewhere in the expression. If there
6184 is, short-circuit the most common cases here. Note that we must
6185 not call expand_expr with anything but const0_rtx in case this
6186 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6188 if (ignore)
6190 if (! TREE_SIDE_EFFECTS (exp))
6191 return const0_rtx;
6193 /* Ensure we reference a volatile object even if value is ignored, but
6194 don't do this if all we are doing is taking its address. */
6195 if (TREE_THIS_VOLATILE (exp)
6196 && TREE_CODE (exp) != FUNCTION_DECL
6197 && mode != VOIDmode && mode != BLKmode
6198 && modifier != EXPAND_CONST_ADDRESS)
6200 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6201 if (GET_CODE (temp) == MEM)
6202 temp = copy_to_reg (temp);
6203 return const0_rtx;
6206 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6207 || code == INDIRECT_REF || code == BUFFER_REF)
6208 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6209 modifier);
6211 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6212 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6214 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6215 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6216 return const0_rtx;
6218 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6219 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6220 /* If the second operand has no side effects, just evaluate
6221 the first. */
6222 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6223 modifier);
6224 else if (code == BIT_FIELD_REF)
6226 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6227 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6228 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6229 return const0_rtx;
6232 target = 0;
6235 #ifdef MAX_INTEGER_COMPUTATION_MODE
6236 /* Only check stuff here if the mode we want is different from the mode
6237 of the expression; if it's the same, check_max_integer_computiation_mode
6238 will handle it. Do we really need to check this stuff at all? */
6240 if (target
6241 && GET_MODE (target) != mode
6242 && TREE_CODE (exp) != INTEGER_CST
6243 && TREE_CODE (exp) != PARM_DECL
6244 && TREE_CODE (exp) != ARRAY_REF
6245 && TREE_CODE (exp) != ARRAY_RANGE_REF
6246 && TREE_CODE (exp) != COMPONENT_REF
6247 && TREE_CODE (exp) != BIT_FIELD_REF
6248 && TREE_CODE (exp) != INDIRECT_REF
6249 && TREE_CODE (exp) != CALL_EXPR
6250 && TREE_CODE (exp) != VAR_DECL
6251 && TREE_CODE (exp) != RTL_EXPR)
6253 enum machine_mode mode = GET_MODE (target);
6255 if (GET_MODE_CLASS (mode) == MODE_INT
6256 && mode > MAX_INTEGER_COMPUTATION_MODE)
6257 internal_error ("unsupported wide integer operation");
6260 if (tmode != mode
6261 && TREE_CODE (exp) != INTEGER_CST
6262 && TREE_CODE (exp) != PARM_DECL
6263 && TREE_CODE (exp) != ARRAY_REF
6264 && TREE_CODE (exp) != ARRAY_RANGE_REF
6265 && TREE_CODE (exp) != COMPONENT_REF
6266 && TREE_CODE (exp) != BIT_FIELD_REF
6267 && TREE_CODE (exp) != INDIRECT_REF
6268 && TREE_CODE (exp) != VAR_DECL
6269 && TREE_CODE (exp) != CALL_EXPR
6270 && TREE_CODE (exp) != RTL_EXPR
6271 && GET_MODE_CLASS (tmode) == MODE_INT
6272 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6273 internal_error ("unsupported wide integer operation");
6275 check_max_integer_computation_mode (exp);
6276 #endif
6278 /* If will do cse, generate all results into pseudo registers
6279 since 1) that allows cse to find more things
6280 and 2) otherwise cse could produce an insn the machine
6281 cannot support. And exception is a CONSTRUCTOR into a multi-word
6282 MEM: that's much more likely to be most efficient into the MEM. */
6284 if (! cse_not_expected && mode != BLKmode && target
6285 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6286 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD))
6287 target = subtarget;
6289 switch (code)
6291 case LABEL_DECL:
6293 tree function = decl_function_context (exp);
6294 /* Handle using a label in a containing function. */
6295 if (function != current_function_decl
6296 && function != inline_function_decl && function != 0)
6298 struct function *p = find_function_data (function);
6299 p->expr->x_forced_labels
6300 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6301 p->expr->x_forced_labels);
6303 else
6305 if (modifier == EXPAND_INITIALIZER)
6306 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6307 label_rtx (exp),
6308 forced_labels);
6311 temp = gen_rtx_MEM (FUNCTION_MODE,
6312 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6313 if (function != current_function_decl
6314 && function != inline_function_decl && function != 0)
6315 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6316 return temp;
6319 case PARM_DECL:
6320 if (DECL_RTL (exp) == 0)
6322 error_with_decl (exp, "prior parameter's size depends on `%s'");
6323 return CONST0_RTX (mode);
6326 /* ... fall through ... */
6328 case VAR_DECL:
6329 /* If a static var's type was incomplete when the decl was written,
6330 but the type is complete now, lay out the decl now. */
6331 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6332 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6334 rtx value = DECL_RTL_IF_SET (exp);
6336 layout_decl (exp, 0);
6338 /* If the RTL was already set, update its mode and memory
6339 attributes. */
6340 if (value != 0)
6342 PUT_MODE (value, DECL_MODE (exp));
6343 SET_DECL_RTL (exp, 0);
6344 set_mem_attributes (value, exp, 1);
6345 SET_DECL_RTL (exp, value);
6349 /* ... fall through ... */
6351 case FUNCTION_DECL:
6352 case RESULT_DECL:
6353 if (DECL_RTL (exp) == 0)
6354 abort ();
6356 /* Ensure variable marked as used even if it doesn't go through
6357 a parser. If it hasn't be used yet, write out an external
6358 definition. */
6359 if (! TREE_USED (exp))
6361 assemble_external (exp);
6362 TREE_USED (exp) = 1;
6365 /* Show we haven't gotten RTL for this yet. */
6366 temp = 0;
6368 /* Handle variables inherited from containing functions. */
6369 context = decl_function_context (exp);
6371 /* We treat inline_function_decl as an alias for the current function
6372 because that is the inline function whose vars, types, etc.
6373 are being merged into the current function.
6374 See expand_inline_function. */
6376 if (context != 0 && context != current_function_decl
6377 && context != inline_function_decl
6378 /* If var is static, we don't need a static chain to access it. */
6379 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6380 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6382 rtx addr;
6384 /* Mark as non-local and addressable. */
6385 DECL_NONLOCAL (exp) = 1;
6386 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6387 abort ();
6388 (*lang_hooks.mark_addressable) (exp);
6389 if (GET_CODE (DECL_RTL (exp)) != MEM)
6390 abort ();
6391 addr = XEXP (DECL_RTL (exp), 0);
6392 if (GET_CODE (addr) == MEM)
6393 addr
6394 = replace_equiv_address (addr,
6395 fix_lexical_addr (XEXP (addr, 0), exp));
6396 else
6397 addr = fix_lexical_addr (addr, exp);
6399 temp = replace_equiv_address (DECL_RTL (exp), addr);
6402 /* This is the case of an array whose size is to be determined
6403 from its initializer, while the initializer is still being parsed.
6404 See expand_decl. */
6406 else if (GET_CODE (DECL_RTL (exp)) == MEM
6407 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6408 temp = validize_mem (DECL_RTL (exp));
6410 /* If DECL_RTL is memory, we are in the normal case and either
6411 the address is not valid or it is not a register and -fforce-addr
6412 is specified, get the address into a register. */
6414 else if (GET_CODE (DECL_RTL (exp)) == MEM
6415 && modifier != EXPAND_CONST_ADDRESS
6416 && modifier != EXPAND_SUM
6417 && modifier != EXPAND_INITIALIZER
6418 && (! memory_address_p (DECL_MODE (exp),
6419 XEXP (DECL_RTL (exp), 0))
6420 || (flag_force_addr
6421 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6422 temp = replace_equiv_address (DECL_RTL (exp),
6423 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6425 /* If we got something, return it. But first, set the alignment
6426 if the address is a register. */
6427 if (temp != 0)
6429 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6430 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6432 return temp;
6435 /* If the mode of DECL_RTL does not match that of the decl, it
6436 must be a promoted value. We return a SUBREG of the wanted mode,
6437 but mark it so that we know that it was already extended. */
6439 if (GET_CODE (DECL_RTL (exp)) == REG
6440 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6442 /* Get the signedness used for this variable. Ensure we get the
6443 same mode we got when the variable was declared. */
6444 if (GET_MODE (DECL_RTL (exp))
6445 != promote_mode (type, DECL_MODE (exp), &unsignedp,
6446 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6447 abort ();
6449 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6450 SUBREG_PROMOTED_VAR_P (temp) = 1;
6451 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6452 return temp;
6455 return DECL_RTL (exp);
6457 case INTEGER_CST:
6458 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6459 TREE_INT_CST_HIGH (exp), mode);
6461 /* ??? If overflow is set, fold will have done an incomplete job,
6462 which can result in (plus xx (const_int 0)), which can get
6463 simplified by validate_replace_rtx during virtual register
6464 instantiation, which can result in unrecognizable insns.
6465 Avoid this by forcing all overflows into registers. */
6466 if (TREE_CONSTANT_OVERFLOW (exp)
6467 && modifier != EXPAND_INITIALIZER)
6468 temp = force_reg (mode, temp);
6470 return temp;
6472 case CONST_DECL:
6473 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
6475 case REAL_CST:
6476 /* If optimized, generate immediate CONST_DOUBLE
6477 which will be turned into memory by reload if necessary.
6479 We used to force a register so that loop.c could see it. But
6480 this does not allow gen_* patterns to perform optimizations with
6481 the constants. It also produces two insns in cases like "x = 1.0;".
6482 On most machines, floating-point constants are not permitted in
6483 many insns, so we'd end up copying it to a register in any case.
6485 Now, we do the copying in expand_binop, if appropriate. */
6486 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6487 TYPE_MODE (TREE_TYPE (exp)));
6489 case COMPLEX_CST:
6490 case STRING_CST:
6491 if (! TREE_CST_RTL (exp))
6492 output_constant_def (exp, 1);
6494 /* TREE_CST_RTL probably contains a constant address.
6495 On RISC machines where a constant address isn't valid,
6496 make some insns to get that address into a register. */
6497 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6498 && modifier != EXPAND_CONST_ADDRESS
6499 && modifier != EXPAND_INITIALIZER
6500 && modifier != EXPAND_SUM
6501 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6502 || (flag_force_addr
6503 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6504 return replace_equiv_address (TREE_CST_RTL (exp),
6505 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6506 return TREE_CST_RTL (exp);
6508 case EXPR_WITH_FILE_LOCATION:
6510 rtx to_return;
6511 const char *saved_input_filename = input_filename;
6512 int saved_lineno = lineno;
6513 input_filename = EXPR_WFL_FILENAME (exp);
6514 lineno = EXPR_WFL_LINENO (exp);
6515 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6516 emit_line_note (input_filename, lineno);
6517 /* Possibly avoid switching back and forth here. */
6518 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6519 input_filename = saved_input_filename;
6520 lineno = saved_lineno;
6521 return to_return;
6524 case SAVE_EXPR:
6525 context = decl_function_context (exp);
6527 /* If this SAVE_EXPR was at global context, assume we are an
6528 initialization function and move it into our context. */
6529 if (context == 0)
6530 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6532 /* We treat inline_function_decl as an alias for the current function
6533 because that is the inline function whose vars, types, etc.
6534 are being merged into the current function.
6535 See expand_inline_function. */
6536 if (context == current_function_decl || context == inline_function_decl)
6537 context = 0;
6539 /* If this is non-local, handle it. */
6540 if (context)
6542 /* The following call just exists to abort if the context is
6543 not of a containing function. */
6544 find_function_data (context);
6546 temp = SAVE_EXPR_RTL (exp);
6547 if (temp && GET_CODE (temp) == REG)
6549 put_var_into_stack (exp);
6550 temp = SAVE_EXPR_RTL (exp);
6552 if (temp == 0 || GET_CODE (temp) != MEM)
6553 abort ();
6554 return
6555 replace_equiv_address (temp,
6556 fix_lexical_addr (XEXP (temp, 0), exp));
6558 if (SAVE_EXPR_RTL (exp) == 0)
6560 if (mode == VOIDmode)
6561 temp = const0_rtx;
6562 else
6563 temp = assign_temp (build_qualified_type (type,
6564 (TYPE_QUALS (type)
6565 | TYPE_QUAL_CONST)),
6566 3, 0, 0);
6568 SAVE_EXPR_RTL (exp) = temp;
6569 if (!optimize && GET_CODE (temp) == REG)
6570 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6571 save_expr_regs);
6573 /* If the mode of TEMP does not match that of the expression, it
6574 must be a promoted value. We pass store_expr a SUBREG of the
6575 wanted mode but mark it so that we know that it was already
6576 extended. Note that `unsignedp' was modified above in
6577 this case. */
6579 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6581 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6582 SUBREG_PROMOTED_VAR_P (temp) = 1;
6583 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6586 if (temp == const0_rtx)
6587 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6588 else
6589 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6591 TREE_USED (exp) = 1;
6594 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6595 must be a promoted value. We return a SUBREG of the wanted mode,
6596 but mark it so that we know that it was already extended. */
6598 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6599 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6601 /* Compute the signedness and make the proper SUBREG. */
6602 promote_mode (type, mode, &unsignedp, 0);
6603 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6604 SUBREG_PROMOTED_VAR_P (temp) = 1;
6605 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6606 return temp;
6609 return SAVE_EXPR_RTL (exp);
6611 case UNSAVE_EXPR:
6613 rtx temp;
6614 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6615 TREE_OPERAND (exp, 0)
6616 = (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0));
6617 return temp;
6620 case PLACEHOLDER_EXPR:
6622 tree old_list = placeholder_list;
6623 tree placeholder_expr = 0;
6625 exp = find_placeholder (exp, &placeholder_expr);
6626 if (exp == 0)
6627 abort ();
6629 placeholder_list = TREE_CHAIN (placeholder_expr);
6630 temp = expand_expr (exp, original_target, tmode, modifier);
6631 placeholder_list = old_list;
6632 return temp;
6635 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6636 abort ();
6638 case WITH_RECORD_EXPR:
6639 /* Put the object on the placeholder list, expand our first operand,
6640 and pop the list. */
6641 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6642 placeholder_list);
6643 target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
6644 modifier);
6645 placeholder_list = TREE_CHAIN (placeholder_list);
6646 return target;
6648 case GOTO_EXPR:
6649 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6650 expand_goto (TREE_OPERAND (exp, 0));
6651 else
6652 expand_computed_goto (TREE_OPERAND (exp, 0));
6653 return const0_rtx;
6655 case EXIT_EXPR:
6656 expand_exit_loop_if_false (NULL,
6657 invert_truthvalue (TREE_OPERAND (exp, 0)));
6658 return const0_rtx;
6660 case LABELED_BLOCK_EXPR:
6661 if (LABELED_BLOCK_BODY (exp))
6662 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
6663 /* Should perhaps use expand_label, but this is simpler and safer. */
6664 do_pending_stack_adjust ();
6665 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6666 return const0_rtx;
6668 case EXIT_BLOCK_EXPR:
6669 if (EXIT_BLOCK_RETURN (exp))
6670 sorry ("returned value in block_exit_expr");
6671 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6672 return const0_rtx;
6674 case LOOP_EXPR:
6675 push_temp_slots ();
6676 expand_start_loop (1);
6677 expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
6678 expand_end_loop ();
6679 pop_temp_slots ();
6681 return const0_rtx;
6683 case BIND_EXPR:
6685 tree vars = TREE_OPERAND (exp, 0);
6686 int vars_need_expansion = 0;
6688 /* Need to open a binding contour here because
6689 if there are any cleanups they must be contained here. */
6690 expand_start_bindings (2);
6692 /* Mark the corresponding BLOCK for output in its proper place. */
6693 if (TREE_OPERAND (exp, 2) != 0
6694 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6695 (*lang_hooks.decls.insert_block) (TREE_OPERAND (exp, 2));
6697 /* If VARS have not yet been expanded, expand them now. */
6698 while (vars)
6700 if (!DECL_RTL_SET_P (vars))
6702 vars_need_expansion = 1;
6703 expand_decl (vars);
6705 expand_decl_init (vars);
6706 vars = TREE_CHAIN (vars);
6709 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
6711 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6713 return temp;
6716 case RTL_EXPR:
6717 if (RTL_EXPR_SEQUENCE (exp))
6719 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6720 abort ();
6721 emit_insns (RTL_EXPR_SEQUENCE (exp));
6722 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6724 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6725 free_temps_for_rtl_expr (exp);
6726 return RTL_EXPR_RTL (exp);
6728 case CONSTRUCTOR:
6729 /* If we don't need the result, just ensure we evaluate any
6730 subexpressions. */
6731 if (ignore)
6733 tree elt;
6735 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6736 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6738 return const0_rtx;
6741 /* All elts simple constants => refer to a constant in memory. But
6742 if this is a non-BLKmode mode, let it store a field at a time
6743 since that should make a CONST_INT or CONST_DOUBLE when we
6744 fold. Likewise, if we have a target we can use, it is best to
6745 store directly into the target unless the type is large enough
6746 that memcpy will be used. If we are making an initializer and
6747 all operands are constant, put it in memory as well. */
6748 else if ((TREE_STATIC (exp)
6749 && ((mode == BLKmode
6750 && ! (target != 0 && safe_from_p (target, exp, 1)))
6751 || TREE_ADDRESSABLE (exp)
6752 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6753 && (! MOVE_BY_PIECES_P
6754 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6755 TYPE_ALIGN (type)))
6756 && ! mostly_zeros_p (exp))))
6757 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6759 rtx constructor = output_constant_def (exp, 1);
6761 if (modifier != EXPAND_CONST_ADDRESS
6762 && modifier != EXPAND_INITIALIZER
6763 && modifier != EXPAND_SUM)
6764 constructor = validize_mem (constructor);
6766 return constructor;
6768 else
6770 /* Handle calls that pass values in multiple non-contiguous
6771 locations. The Irix 6 ABI has examples of this. */
6772 if (target == 0 || ! safe_from_p (target, exp, 1)
6773 || GET_CODE (target) == PARALLEL)
6774 target
6775 = assign_temp (build_qualified_type (type,
6776 (TYPE_QUALS (type)
6777 | (TREE_READONLY (exp)
6778 * TYPE_QUAL_CONST))),
6779 0, TREE_ADDRESSABLE (exp), 1);
6781 store_constructor (exp, target, 0,
6782 int_size_in_bytes (TREE_TYPE (exp)));
6783 return target;
6786 case INDIRECT_REF:
6788 tree exp1 = TREE_OPERAND (exp, 0);
6789 tree index;
6790 tree string = string_constant (exp1, &index);
6792 /* Try to optimize reads from const strings. */
6793 if (string
6794 && TREE_CODE (string) == STRING_CST
6795 && TREE_CODE (index) == INTEGER_CST
6796 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6797 && GET_MODE_CLASS (mode) == MODE_INT
6798 && GET_MODE_SIZE (mode) == 1
6799 && modifier != EXPAND_WRITE)
6800 return gen_int_mode (TREE_STRING_POINTER (string)
6801 [TREE_INT_CST_LOW (index)], mode);
6803 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6804 op0 = memory_address (mode, op0);
6805 temp = gen_rtx_MEM (mode, op0);
6806 set_mem_attributes (temp, exp, 0);
6808 /* If we are writing to this object and its type is a record with
6809 readonly fields, we must mark it as readonly so it will
6810 conflict with readonly references to those fields. */
6811 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
6812 RTX_UNCHANGING_P (temp) = 1;
6814 return temp;
6817 case ARRAY_REF:
6818 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6819 abort ();
6822 tree array = TREE_OPERAND (exp, 0);
6823 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6824 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6825 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6826 HOST_WIDE_INT i;
6828 /* Optimize the special-case of a zero lower bound.
6830 We convert the low_bound to sizetype to avoid some problems
6831 with constant folding. (E.g. suppose the lower bound is 1,
6832 and its mode is QI. Without the conversion, (ARRAY
6833 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6834 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6836 if (! integer_zerop (low_bound))
6837 index = size_diffop (index, convert (sizetype, low_bound));
6839 /* Fold an expression like: "foo"[2].
6840 This is not done in fold so it won't happen inside &.
6841 Don't fold if this is for wide characters since it's too
6842 difficult to do correctly and this is a very rare case. */
6844 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6845 && TREE_CODE (array) == STRING_CST
6846 && TREE_CODE (index) == INTEGER_CST
6847 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6848 && GET_MODE_CLASS (mode) == MODE_INT
6849 && GET_MODE_SIZE (mode) == 1)
6850 return gen_int_mode (TREE_STRING_POINTER (array)
6851 [TREE_INT_CST_LOW (index)], mode);
6853 /* If this is a constant index into a constant array,
6854 just get the value from the array. Handle both the cases when
6855 we have an explicit constructor and when our operand is a variable
6856 that was declared const. */
6858 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6859 && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6860 && TREE_CODE (index) == INTEGER_CST
6861 && 0 > compare_tree_int (index,
6862 list_length (CONSTRUCTOR_ELTS
6863 (TREE_OPERAND (exp, 0)))))
6865 tree elem;
6867 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6868 i = TREE_INT_CST_LOW (index);
6869 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6872 if (elem)
6873 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6874 modifier);
6877 else if (optimize >= 1
6878 && modifier != EXPAND_CONST_ADDRESS
6879 && modifier != EXPAND_INITIALIZER
6880 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6881 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6882 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6884 if (TREE_CODE (index) == INTEGER_CST)
6886 tree init = DECL_INITIAL (array);
6888 if (TREE_CODE (init) == CONSTRUCTOR)
6890 tree elem;
6892 for (elem = CONSTRUCTOR_ELTS (init);
6893 (elem
6894 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6895 elem = TREE_CHAIN (elem))
6898 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6899 return expand_expr (fold (TREE_VALUE (elem)), target,
6900 tmode, modifier);
6902 else if (TREE_CODE (init) == STRING_CST
6903 && 0 > compare_tree_int (index,
6904 TREE_STRING_LENGTH (init)))
6906 tree type = TREE_TYPE (TREE_TYPE (init));
6907 enum machine_mode mode = TYPE_MODE (type);
6909 if (GET_MODE_CLASS (mode) == MODE_INT
6910 && GET_MODE_SIZE (mode) == 1)
6911 return gen_int_mode (TREE_STRING_POINTER (init)
6912 [TREE_INT_CST_LOW (index)], mode);
6917 /* Fall through. */
6919 case COMPONENT_REF:
6920 case BIT_FIELD_REF:
6921 case ARRAY_RANGE_REF:
6922 /* If the operand is a CONSTRUCTOR, we can just extract the
6923 appropriate field if it is present. Don't do this if we have
6924 already written the data since we want to refer to that copy
6925 and varasm.c assumes that's what we'll do. */
6926 if (code == COMPONENT_REF
6927 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6928 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6930 tree elt;
6932 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6933 elt = TREE_CHAIN (elt))
6934 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6935 /* We can normally use the value of the field in the
6936 CONSTRUCTOR. However, if this is a bitfield in
6937 an integral mode that we can fit in a HOST_WIDE_INT,
6938 we must mask only the number of bits in the bitfield,
6939 since this is done implicitly by the constructor. If
6940 the bitfield does not meet either of those conditions,
6941 we can't do this optimization. */
6942 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6943 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6944 == MODE_INT)
6945 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6946 <= HOST_BITS_PER_WIDE_INT))))
6948 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6949 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6951 HOST_WIDE_INT bitsize
6952 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6953 enum machine_mode imode
6954 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6956 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6958 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6959 op0 = expand_and (imode, op0, op1, target);
6961 else
6963 tree count
6964 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6967 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6968 target, 0);
6969 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6970 target, 0);
6974 return op0;
6979 enum machine_mode mode1;
6980 HOST_WIDE_INT bitsize, bitpos;
6981 tree offset;
6982 int volatilep = 0;
6983 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6984 &mode1, &unsignedp, &volatilep);
6985 rtx orig_op0;
6987 /* If we got back the original object, something is wrong. Perhaps
6988 we are evaluating an expression too early. In any event, don't
6989 infinitely recurse. */
6990 if (tem == exp)
6991 abort ();
6993 /* If TEM's type is a union of variable size, pass TARGET to the inner
6994 computation, since it will need a temporary and TARGET is known
6995 to have to do. This occurs in unchecked conversion in Ada. */
6997 orig_op0 = op0
6998 = expand_expr (tem,
6999 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7000 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7001 != INTEGER_CST)
7002 ? target : NULL_RTX),
7003 VOIDmode,
7004 (modifier == EXPAND_INITIALIZER
7005 || modifier == EXPAND_CONST_ADDRESS)
7006 ? modifier : EXPAND_NORMAL);
7008 /* If this is a constant, put it into a register if it is a
7009 legitimate constant and OFFSET is 0 and memory if it isn't. */
7010 if (CONSTANT_P (op0))
7012 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7013 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7014 && offset == 0)
7015 op0 = force_reg (mode, op0);
7016 else
7017 op0 = validize_mem (force_const_mem (mode, op0));
7020 if (offset != 0)
7022 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
7024 /* If this object is in a register, put it into memory.
7025 This case can't occur in C, but can in Ada if we have
7026 unchecked conversion of an expression from a scalar type to
7027 an array or record type. */
7028 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7029 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7031 /* If the operand is a SAVE_EXPR, we can deal with this by
7032 forcing the SAVE_EXPR into memory. */
7033 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7035 put_var_into_stack (TREE_OPERAND (exp, 0));
7036 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7038 else
7040 tree nt
7041 = build_qualified_type (TREE_TYPE (tem),
7042 (TYPE_QUALS (TREE_TYPE (tem))
7043 | TYPE_QUAL_CONST));
7044 rtx memloc = assign_temp (nt, 1, 1, 1);
7046 emit_move_insn (memloc, op0);
7047 op0 = memloc;
7051 if (GET_CODE (op0) != MEM)
7052 abort ();
7054 #ifdef POINTERS_EXTEND_UNSIGNED
7055 if (GET_MODE (offset_rtx) != Pmode)
7056 offset_rtx = convert_memory_address (Pmode, offset_rtx);
7057 #else
7058 if (GET_MODE (offset_rtx) != ptr_mode)
7059 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7060 #endif
7062 /* A constant address in OP0 can have VOIDmode, we must not try
7063 to call force_reg for that case. Avoid that case. */
7064 if (GET_CODE (op0) == MEM
7065 && GET_MODE (op0) == BLKmode
7066 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7067 && bitsize != 0
7068 && (bitpos % bitsize) == 0
7069 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7070 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7072 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7073 bitpos = 0;
7076 op0 = offset_address (op0, offset_rtx,
7077 highest_pow2_factor (offset));
7080 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7081 record its alignment as BIGGEST_ALIGNMENT. */
7082 if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
7083 && is_aligning_offset (offset, tem))
7084 set_mem_align (op0, BIGGEST_ALIGNMENT);
7086 /* Don't forget about volatility even if this is a bitfield. */
7087 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7089 if (op0 == orig_op0)
7090 op0 = copy_rtx (op0);
7092 MEM_VOLATILE_P (op0) = 1;
7095 /* The following code doesn't handle CONCAT.
7096 Assume only bitpos == 0 can be used for CONCAT, due to
7097 one element arrays having the same mode as its element. */
7098 if (GET_CODE (op0) == CONCAT)
7100 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7101 abort ();
7102 return op0;
7105 /* In cases where an aligned union has an unaligned object
7106 as a field, we might be extracting a BLKmode value from
7107 an integer-mode (e.g., SImode) object. Handle this case
7108 by doing the extract into an object as wide as the field
7109 (which we know to be the width of a basic mode), then
7110 storing into memory, and changing the mode to BLKmode. */
7111 if (mode1 == VOIDmode
7112 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7113 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7114 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7115 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7116 && modifier != EXPAND_CONST_ADDRESS
7117 && modifier != EXPAND_INITIALIZER)
7118 /* If the field isn't aligned enough to fetch as a memref,
7119 fetch it as a bit field. */
7120 || (mode1 != BLKmode
7121 && SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))
7122 && ((TYPE_ALIGN (TREE_TYPE (tem))
7123 < GET_MODE_ALIGNMENT (mode))
7124 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
7125 /* If the type and the field are a constant size and the
7126 size of the type isn't the same size as the bitfield,
7127 we must use bitfield operations. */
7128 || (bitsize >= 0
7129 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7130 == INTEGER_CST)
7131 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7132 bitsize)))
7134 enum machine_mode ext_mode = mode;
7136 if (ext_mode == BLKmode
7137 && ! (target != 0 && GET_CODE (op0) == MEM
7138 && GET_CODE (target) == MEM
7139 && bitpos % BITS_PER_UNIT == 0))
7140 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7142 if (ext_mode == BLKmode)
7144 /* In this case, BITPOS must start at a byte boundary and
7145 TARGET, if specified, must be a MEM. */
7146 if (GET_CODE (op0) != MEM
7147 || (target != 0 && GET_CODE (target) != MEM)
7148 || bitpos % BITS_PER_UNIT != 0)
7149 abort ();
7151 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
7152 if (target == 0)
7153 target = assign_temp (type, 0, 1, 1);
7155 emit_block_move (target, op0,
7156 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7157 / BITS_PER_UNIT));
7159 return target;
7162 op0 = validize_mem (op0);
7164 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7165 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7167 op0 = extract_bit_field (op0, bitsize, bitpos,
7168 unsignedp, target, ext_mode, ext_mode,
7169 int_size_in_bytes (TREE_TYPE (tem)));
7171 /* If the result is a record type and BITSIZE is narrower than
7172 the mode of OP0, an integral mode, and this is a big endian
7173 machine, we must put the field into the high-order bits. */
7174 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7175 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7176 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7177 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7178 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7179 - bitsize),
7180 op0, 1);
7182 if (mode == BLKmode)
7184 rtx new = assign_temp (build_qualified_type
7185 ((*lang_hooks.types.type_for_mode)
7186 (ext_mode, 0),
7187 TYPE_QUAL_CONST), 0, 1, 1);
7189 emit_move_insn (new, op0);
7190 op0 = copy_rtx (new);
7191 PUT_MODE (op0, BLKmode);
7192 set_mem_attributes (op0, exp, 1);
7195 return op0;
7198 /* If the result is BLKmode, use that to access the object
7199 now as well. */
7200 if (mode == BLKmode)
7201 mode1 = BLKmode;
7203 /* Get a reference to just this component. */
7204 if (modifier == EXPAND_CONST_ADDRESS
7205 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7206 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7207 else
7208 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7210 if (op0 == orig_op0)
7211 op0 = copy_rtx (op0);
7213 set_mem_attributes (op0, exp, 0);
7214 if (GET_CODE (XEXP (op0, 0)) == REG)
7215 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7217 MEM_VOLATILE_P (op0) |= volatilep;
7218 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7219 || modifier == EXPAND_CONST_ADDRESS
7220 || modifier == EXPAND_INITIALIZER)
7221 return op0;
7222 else if (target == 0)
7223 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7225 convert_move (target, op0, unsignedp);
7226 return target;
7229 case VTABLE_REF:
7231 rtx insn, before = get_last_insn (), vtbl_ref;
7233 /* Evaluate the interior expression. */
7234 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7235 tmode, modifier);
7237 /* Get or create an instruction off which to hang a note. */
7238 if (REG_P (subtarget))
7240 target = subtarget;
7241 insn = get_last_insn ();
7242 if (insn == before)
7243 abort ();
7244 if (! INSN_P (insn))
7245 insn = prev_nonnote_insn (insn);
7247 else
7249 target = gen_reg_rtx (GET_MODE (subtarget));
7250 insn = emit_move_insn (target, subtarget);
7253 /* Collect the data for the note. */
7254 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7255 vtbl_ref = plus_constant (vtbl_ref,
7256 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7257 /* Discard the initial CONST that was added. */
7258 vtbl_ref = XEXP (vtbl_ref, 0);
7260 REG_NOTES (insn)
7261 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7263 return target;
7266 /* Intended for a reference to a buffer of a file-object in Pascal.
7267 But it's not certain that a special tree code will really be
7268 necessary for these. INDIRECT_REF might work for them. */
7269 case BUFFER_REF:
7270 abort ();
7272 case IN_EXPR:
7274 /* Pascal set IN expression.
7276 Algorithm:
7277 rlo = set_low - (set_low%bits_per_word);
7278 the_word = set [ (index - rlo)/bits_per_word ];
7279 bit_index = index % bits_per_word;
7280 bitmask = 1 << bit_index;
7281 return !!(the_word & bitmask); */
7283 tree set = TREE_OPERAND (exp, 0);
7284 tree index = TREE_OPERAND (exp, 1);
7285 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7286 tree set_type = TREE_TYPE (set);
7287 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7288 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7289 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7290 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7291 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7292 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7293 rtx setaddr = XEXP (setval, 0);
7294 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7295 rtx rlow;
7296 rtx diff, quo, rem, addr, bit, result;
7298 /* If domain is empty, answer is no. Likewise if index is constant
7299 and out of bounds. */
7300 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7301 && TREE_CODE (set_low_bound) == INTEGER_CST
7302 && tree_int_cst_lt (set_high_bound, set_low_bound))
7303 || (TREE_CODE (index) == INTEGER_CST
7304 && TREE_CODE (set_low_bound) == INTEGER_CST
7305 && tree_int_cst_lt (index, set_low_bound))
7306 || (TREE_CODE (set_high_bound) == INTEGER_CST
7307 && TREE_CODE (index) == INTEGER_CST
7308 && tree_int_cst_lt (set_high_bound, index))))
7309 return const0_rtx;
7311 if (target == 0)
7312 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7314 /* If we get here, we have to generate the code for both cases
7315 (in range and out of range). */
7317 op0 = gen_label_rtx ();
7318 op1 = gen_label_rtx ();
7320 if (! (GET_CODE (index_val) == CONST_INT
7321 && GET_CODE (lo_r) == CONST_INT))
7322 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7323 GET_MODE (index_val), iunsignedp, op1);
7325 if (! (GET_CODE (index_val) == CONST_INT
7326 && GET_CODE (hi_r) == CONST_INT))
7327 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7328 GET_MODE (index_val), iunsignedp, op1);
7330 /* Calculate the element number of bit zero in the first word
7331 of the set. */
7332 if (GET_CODE (lo_r) == CONST_INT)
7333 rlow = GEN_INT (INTVAL (lo_r)
7334 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7335 else
7336 rlow = expand_binop (index_mode, and_optab, lo_r,
7337 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7338 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7340 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7341 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7343 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7344 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7345 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7346 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7348 addr = memory_address (byte_mode,
7349 expand_binop (index_mode, add_optab, diff,
7350 setaddr, NULL_RTX, iunsignedp,
7351 OPTAB_LIB_WIDEN));
7353 /* Extract the bit we want to examine. */
7354 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7355 gen_rtx_MEM (byte_mode, addr),
7356 make_tree (TREE_TYPE (index), rem),
7357 NULL_RTX, 1);
7358 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7359 GET_MODE (target) == byte_mode ? target : 0,
7360 1, OPTAB_LIB_WIDEN);
7362 if (result != target)
7363 convert_move (target, result, 1);
7365 /* Output the code to handle the out-of-range case. */
7366 emit_jump (op0);
7367 emit_label (op1);
7368 emit_move_insn (target, const0_rtx);
7369 emit_label (op0);
7370 return target;
7373 case WITH_CLEANUP_EXPR:
7374 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7376 WITH_CLEANUP_EXPR_RTL (exp)
7377 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7378 expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1),
7379 CLEANUP_EH_ONLY (exp));
7381 /* That's it for this cleanup. */
7382 TREE_OPERAND (exp, 1) = 0;
7384 return WITH_CLEANUP_EXPR_RTL (exp);
7386 case CLEANUP_POINT_EXPR:
7388 /* Start a new binding layer that will keep track of all cleanup
7389 actions to be performed. */
7390 expand_start_bindings (2);
7392 target_temp_slot_level = temp_slot_level;
7394 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7395 /* If we're going to use this value, load it up now. */
7396 if (! ignore)
7397 op0 = force_not_mem (op0);
7398 preserve_temp_slots (op0);
7399 expand_end_bindings (NULL_TREE, 0, 0);
7401 return op0;
7403 case CALL_EXPR:
7404 /* Check for a built-in function. */
7405 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7406 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7407 == FUNCTION_DECL)
7408 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7410 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7411 == BUILT_IN_FRONTEND)
7412 return (*lang_hooks.expand_expr)
7413 (exp, original_target, tmode, modifier);
7414 else
7415 return expand_builtin (exp, target, subtarget, tmode, ignore);
7418 return expand_call (exp, target, ignore);
7420 case NON_LVALUE_EXPR:
7421 case NOP_EXPR:
7422 case CONVERT_EXPR:
7423 case REFERENCE_EXPR:
7424 if (TREE_OPERAND (exp, 0) == error_mark_node)
7425 return const0_rtx;
7427 if (TREE_CODE (type) == UNION_TYPE)
7429 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7431 /* If both input and output are BLKmode, this conversion isn't doing
7432 anything except possibly changing memory attribute. */
7433 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7435 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7436 modifier);
7438 result = copy_rtx (result);
7439 set_mem_attributes (result, exp, 0);
7440 return result;
7443 if (target == 0)
7444 target = assign_temp (type, 0, 1, 1);
7446 if (GET_CODE (target) == MEM)
7447 /* Store data into beginning of memory target. */
7448 store_expr (TREE_OPERAND (exp, 0),
7449 adjust_address (target, TYPE_MODE (valtype), 0), 0);
7451 else if (GET_CODE (target) == REG)
7452 /* Store this field into a union of the proper type. */
7453 store_field (target,
7454 MIN ((int_size_in_bytes (TREE_TYPE
7455 (TREE_OPERAND (exp, 0)))
7456 * BITS_PER_UNIT),
7457 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7458 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7459 VOIDmode, 0, type, 0);
7460 else
7461 abort ();
7463 /* Return the entire union. */
7464 return target;
7467 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7469 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7470 modifier);
7472 /* If the signedness of the conversion differs and OP0 is
7473 a promoted SUBREG, clear that indication since we now
7474 have to do the proper extension. */
7475 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7476 && GET_CODE (op0) == SUBREG)
7477 SUBREG_PROMOTED_VAR_P (op0) = 0;
7479 return op0;
7482 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7483 if (GET_MODE (op0) == mode)
7484 return op0;
7486 /* If OP0 is a constant, just convert it into the proper mode. */
7487 if (CONSTANT_P (op0))
7489 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7490 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7492 if (modifier == EXPAND_INITIALIZER)
7493 return simplify_gen_subreg (mode, op0, inner_mode,
7494 subreg_lowpart_offset (mode,
7495 inner_mode));
7496 else
7497 return convert_modes (mode, inner_mode, op0,
7498 TREE_UNSIGNED (inner_type));
7501 if (modifier == EXPAND_INITIALIZER)
7502 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7504 if (target == 0)
7505 return
7506 convert_to_mode (mode, op0,
7507 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7508 else
7509 convert_move (target, op0,
7510 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7511 return target;
7513 case VIEW_CONVERT_EXPR:
7514 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7516 /* If the input and output modes are both the same, we are done.
7517 Otherwise, if neither mode is BLKmode and both are within a word, we
7518 can use gen_lowpart. If neither is true, make sure the operand is
7519 in memory and convert the MEM to the new mode. */
7520 if (TYPE_MODE (type) == GET_MODE (op0))
7522 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7523 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7524 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7525 op0 = gen_lowpart (TYPE_MODE (type), op0);
7526 else if (GET_CODE (op0) != MEM)
7528 /* If the operand is not a MEM, force it into memory. Since we
7529 are going to be be changing the mode of the MEM, don't call
7530 force_const_mem for constants because we don't allow pool
7531 constants to change mode. */
7532 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7534 if (TREE_ADDRESSABLE (exp))
7535 abort ();
7537 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7538 target
7539 = assign_stack_temp_for_type
7540 (TYPE_MODE (inner_type),
7541 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7543 emit_move_insn (target, op0);
7544 op0 = target;
7547 /* At this point, OP0 is in the correct mode. If the output type is such
7548 that the operand is known to be aligned, indicate that it is.
7549 Otherwise, we need only be concerned about alignment for non-BLKmode
7550 results. */
7551 if (GET_CODE (op0) == MEM)
7553 op0 = copy_rtx (op0);
7555 if (TYPE_ALIGN_OK (type))
7556 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7557 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7558 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7560 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7561 HOST_WIDE_INT temp_size
7562 = MAX (int_size_in_bytes (inner_type),
7563 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7564 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7565 temp_size, 0, type);
7566 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7568 if (TREE_ADDRESSABLE (exp))
7569 abort ();
7571 if (GET_MODE (op0) == BLKmode)
7572 emit_block_move (new_with_op0_mode, op0,
7573 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))));
7574 else
7575 emit_move_insn (new_with_op0_mode, op0);
7577 op0 = new;
7580 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7583 return op0;
7585 case PLUS_EXPR:
7586 /* We come here from MINUS_EXPR when the second operand is a
7587 constant. */
7588 plus_expr:
7589 this_optab = ! unsignedp && flag_trapv
7590 && (GET_MODE_CLASS (mode) == MODE_INT)
7591 ? addv_optab : add_optab;
7593 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7594 something else, make sure we add the register to the constant and
7595 then to the other thing. This case can occur during strength
7596 reduction and doing it this way will produce better code if the
7597 frame pointer or argument pointer is eliminated.
7599 fold-const.c will ensure that the constant is always in the inner
7600 PLUS_EXPR, so the only case we need to do anything about is if
7601 sp, ap, or fp is our second argument, in which case we must swap
7602 the innermost first argument and our second argument. */
7604 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7605 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7606 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7607 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7608 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7609 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7611 tree t = TREE_OPERAND (exp, 1);
7613 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7614 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7617 /* If the result is to be ptr_mode and we are adding an integer to
7618 something, we might be forming a constant. So try to use
7619 plus_constant. If it produces a sum and we can't accept it,
7620 use force_operand. This allows P = &ARR[const] to generate
7621 efficient code on machines where a SYMBOL_REF is not a valid
7622 address.
7624 If this is an EXPAND_SUM call, always return the sum. */
7625 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7626 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7628 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7629 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7630 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7632 rtx constant_part;
7634 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7635 EXPAND_SUM);
7636 /* Use immed_double_const to ensure that the constant is
7637 truncated according to the mode of OP1, then sign extended
7638 to a HOST_WIDE_INT. Using the constant directly can result
7639 in non-canonical RTL in a 64x32 cross compile. */
7640 constant_part
7641 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7642 (HOST_WIDE_INT) 0,
7643 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7644 op1 = plus_constant (op1, INTVAL (constant_part));
7645 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7646 op1 = force_operand (op1, target);
7647 return op1;
7650 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7651 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7652 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7654 rtx constant_part;
7656 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7657 (modifier == EXPAND_INITIALIZER
7658 ? EXPAND_INITIALIZER : EXPAND_SUM));
7659 if (! CONSTANT_P (op0))
7661 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7662 VOIDmode, modifier);
7663 /* Don't go to both_summands if modifier
7664 says it's not right to return a PLUS. */
7665 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7666 goto binop2;
7667 goto both_summands;
7669 /* Use immed_double_const to ensure that the constant is
7670 truncated according to the mode of OP1, then sign extended
7671 to a HOST_WIDE_INT. Using the constant directly can result
7672 in non-canonical RTL in a 64x32 cross compile. */
7673 constant_part
7674 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7675 (HOST_WIDE_INT) 0,
7676 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7677 op0 = plus_constant (op0, INTVAL (constant_part));
7678 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7679 op0 = force_operand (op0, target);
7680 return op0;
7684 /* No sense saving up arithmetic to be done
7685 if it's all in the wrong mode to form part of an address.
7686 And force_operand won't know whether to sign-extend or
7687 zero-extend. */
7688 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7689 || mode != ptr_mode)
7690 goto binop;
7692 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7693 subtarget = 0;
7695 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
7696 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
7698 both_summands:
7699 /* Make sure any term that's a sum with a constant comes last. */
7700 if (GET_CODE (op0) == PLUS
7701 && CONSTANT_P (XEXP (op0, 1)))
7703 temp = op0;
7704 op0 = op1;
7705 op1 = temp;
7707 /* If adding to a sum including a constant,
7708 associate it to put the constant outside. */
7709 if (GET_CODE (op1) == PLUS
7710 && CONSTANT_P (XEXP (op1, 1)))
7712 rtx constant_term = const0_rtx;
7714 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7715 if (temp != 0)
7716 op0 = temp;
7717 /* Ensure that MULT comes first if there is one. */
7718 else if (GET_CODE (op0) == MULT)
7719 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7720 else
7721 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7723 /* Let's also eliminate constants from op0 if possible. */
7724 op0 = eliminate_constant_term (op0, &constant_term);
7726 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7727 their sum should be a constant. Form it into OP1, since the
7728 result we want will then be OP0 + OP1. */
7730 temp = simplify_binary_operation (PLUS, mode, constant_term,
7731 XEXP (op1, 1));
7732 if (temp != 0)
7733 op1 = temp;
7734 else
7735 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7738 /* Put a constant term last and put a multiplication first. */
7739 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7740 temp = op1, op1 = op0, op0 = temp;
7742 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7743 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7745 case MINUS_EXPR:
7746 /* For initializers, we are allowed to return a MINUS of two
7747 symbolic constants. Here we handle all cases when both operands
7748 are constant. */
7749 /* Handle difference of two symbolic constants,
7750 for the sake of an initializer. */
7751 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7752 && really_constant_p (TREE_OPERAND (exp, 0))
7753 && really_constant_p (TREE_OPERAND (exp, 1)))
7755 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
7756 modifier);
7757 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
7758 modifier);
7760 /* If the last operand is a CONST_INT, use plus_constant of
7761 the negated constant. Else make the MINUS. */
7762 if (GET_CODE (op1) == CONST_INT)
7763 return plus_constant (op0, - INTVAL (op1));
7764 else
7765 return gen_rtx_MINUS (mode, op0, op1);
7767 /* Convert A - const to A + (-const). */
7768 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7770 tree negated = fold (build1 (NEGATE_EXPR, type,
7771 TREE_OPERAND (exp, 1)));
7773 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7774 /* If we can't negate the constant in TYPE, leave it alone and
7775 expand_binop will negate it for us. We used to try to do it
7776 here in the signed version of TYPE, but that doesn't work
7777 on POINTER_TYPEs. */;
7778 else
7780 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7781 goto plus_expr;
7784 this_optab = ! unsignedp && flag_trapv
7785 && (GET_MODE_CLASS(mode) == MODE_INT)
7786 ? subv_optab : sub_optab;
7787 goto binop;
7789 case MULT_EXPR:
7790 /* If first operand is constant, swap them.
7791 Thus the following special case checks need only
7792 check the second operand. */
7793 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7795 tree t1 = TREE_OPERAND (exp, 0);
7796 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7797 TREE_OPERAND (exp, 1) = t1;
7800 /* Attempt to return something suitable for generating an
7801 indexed address, for machines that support that. */
7803 if (modifier == EXPAND_SUM && mode == ptr_mode
7804 && host_integerp (TREE_OPERAND (exp, 1), 0))
7806 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7807 EXPAND_SUM);
7809 /* If we knew for certain that this is arithmetic for an array
7810 reference, and we knew the bounds of the array, then we could
7811 apply the distributive law across (PLUS X C) for constant C.
7812 Without such knowledge, we risk overflowing the computation
7813 when both X and C are large, but X+C isn't. */
7814 /* ??? Could perhaps special-case EXP being unsigned and C being
7815 positive. In that case we are certain that X+C is no smaller
7816 than X and so the transformed expression will overflow iff the
7817 original would have. */
7819 if (GET_CODE (op0) != REG)
7820 op0 = force_operand (op0, NULL_RTX);
7821 if (GET_CODE (op0) != REG)
7822 op0 = copy_to_mode_reg (mode, op0);
7824 return
7825 gen_rtx_MULT (mode, op0,
7826 GEN_INT (tree_low_cst (TREE_OPERAND (exp, 1), 0)));
7829 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7830 subtarget = 0;
7832 /* Check for multiplying things that have been extended
7833 from a narrower type. If this machine supports multiplying
7834 in that narrower type with a result in the desired type,
7835 do it that way, and avoid the explicit type-conversion. */
7836 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7837 && TREE_CODE (type) == INTEGER_TYPE
7838 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7839 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7840 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7841 && int_fits_type_p (TREE_OPERAND (exp, 1),
7842 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7843 /* Don't use a widening multiply if a shift will do. */
7844 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7845 > HOST_BITS_PER_WIDE_INT)
7846 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7848 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7849 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7851 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7852 /* If both operands are extended, they must either both
7853 be zero-extended or both be sign-extended. */
7854 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7856 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7858 enum machine_mode innermode
7859 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7860 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7861 ? smul_widen_optab : umul_widen_optab);
7862 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7863 ? umul_widen_optab : smul_widen_optab);
7864 if (mode == GET_MODE_WIDER_MODE (innermode))
7866 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7868 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7869 NULL_RTX, VOIDmode, 0);
7870 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7871 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7872 VOIDmode, 0);
7873 else
7874 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7875 NULL_RTX, VOIDmode, 0);
7876 goto binop2;
7878 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7879 && innermode == word_mode)
7881 rtx htem;
7882 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7883 NULL_RTX, VOIDmode, 0);
7884 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7885 op1 = convert_modes (innermode, mode,
7886 expand_expr (TREE_OPERAND (exp, 1),
7887 NULL_RTX, VOIDmode, 0),
7888 unsignedp);
7889 else
7890 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7891 NULL_RTX, VOIDmode, 0);
7892 temp = expand_binop (mode, other_optab, op0, op1, target,
7893 unsignedp, OPTAB_LIB_WIDEN);
7894 htem = expand_mult_highpart_adjust (innermode,
7895 gen_highpart (innermode, temp),
7896 op0, op1,
7897 gen_highpart (innermode, temp),
7898 unsignedp);
7899 emit_move_insn (gen_highpart (innermode, temp), htem);
7900 return temp;
7904 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7905 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7906 return expand_mult (mode, op0, op1, target, unsignedp);
7908 case TRUNC_DIV_EXPR:
7909 case FLOOR_DIV_EXPR:
7910 case CEIL_DIV_EXPR:
7911 case ROUND_DIV_EXPR:
7912 case EXACT_DIV_EXPR:
7913 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7914 subtarget = 0;
7915 /* Possible optimization: compute the dividend with EXPAND_SUM
7916 then if the divisor is constant can optimize the case
7917 where some terms of the dividend have coeffs divisible by it. */
7918 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7919 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7920 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7922 case RDIV_EXPR:
7923 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7924 expensive divide. If not, combine will rebuild the original
7925 computation. */
7926 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7927 && TREE_CODE (type) == REAL_TYPE
7928 && !real_onep (TREE_OPERAND (exp, 0)))
7929 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7930 build (RDIV_EXPR, type,
7931 build_real (type, dconst1),
7932 TREE_OPERAND (exp, 1))),
7933 target, tmode, unsignedp);
7934 this_optab = sdiv_optab;
7935 goto binop;
7937 case TRUNC_MOD_EXPR:
7938 case FLOOR_MOD_EXPR:
7939 case CEIL_MOD_EXPR:
7940 case ROUND_MOD_EXPR:
7941 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7942 subtarget = 0;
7943 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7944 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7945 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7947 case FIX_ROUND_EXPR:
7948 case FIX_FLOOR_EXPR:
7949 case FIX_CEIL_EXPR:
7950 abort (); /* Not used for C. */
7952 case FIX_TRUNC_EXPR:
7953 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7954 if (target == 0)
7955 target = gen_reg_rtx (mode);
7956 expand_fix (target, op0, unsignedp);
7957 return target;
7959 case FLOAT_EXPR:
7960 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7961 if (target == 0)
7962 target = gen_reg_rtx (mode);
7963 /* expand_float can't figure out what to do if FROM has VOIDmode.
7964 So give it the correct mode. With -O, cse will optimize this. */
7965 if (GET_MODE (op0) == VOIDmode)
7966 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7967 op0);
7968 expand_float (target, op0,
7969 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7970 return target;
7972 case NEGATE_EXPR:
7973 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7974 temp = expand_unop (mode,
7975 ! unsignedp && flag_trapv
7976 && (GET_MODE_CLASS(mode) == MODE_INT)
7977 ? negv_optab : neg_optab, op0, target, 0);
7978 if (temp == 0)
7979 abort ();
7980 return temp;
7982 case ABS_EXPR:
7983 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7985 /* Handle complex values specially. */
7986 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7987 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7988 return expand_complex_abs (mode, op0, target, unsignedp);
7990 /* Unsigned abs is simply the operand. Testing here means we don't
7991 risk generating incorrect code below. */
7992 if (TREE_UNSIGNED (type))
7993 return op0;
7995 return expand_abs (mode, op0, target, unsignedp,
7996 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7998 case MAX_EXPR:
7999 case MIN_EXPR:
8000 target = original_target;
8001 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
8002 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
8003 || GET_MODE (target) != mode
8004 || (GET_CODE (target) == REG
8005 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8006 target = gen_reg_rtx (mode);
8007 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8008 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8010 /* First try to do it with a special MIN or MAX instruction.
8011 If that does not win, use a conditional jump to select the proper
8012 value. */
8013 this_optab = (TREE_UNSIGNED (type)
8014 ? (code == MIN_EXPR ? umin_optab : umax_optab)
8015 : (code == MIN_EXPR ? smin_optab : smax_optab));
8017 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8018 OPTAB_WIDEN);
8019 if (temp != 0)
8020 return temp;
8022 /* At this point, a MEM target is no longer useful; we will get better
8023 code without it. */
8025 if (GET_CODE (target) == MEM)
8026 target = gen_reg_rtx (mode);
8028 if (target != op0)
8029 emit_move_insn (target, op0);
8031 op0 = gen_label_rtx ();
8033 /* If this mode is an integer too wide to compare properly,
8034 compare word by word. Rely on cse to optimize constant cases. */
8035 if (GET_MODE_CLASS (mode) == MODE_INT
8036 && ! can_compare_p (GE, mode, ccp_jump))
8038 if (code == MAX_EXPR)
8039 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8040 target, op1, NULL_RTX, op0);
8041 else
8042 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8043 op1, target, NULL_RTX, op0);
8045 else
8047 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
8048 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
8049 unsignedp, mode, NULL_RTX, NULL_RTX,
8050 op0);
8052 emit_move_insn (target, op1);
8053 emit_label (op0);
8054 return target;
8056 case BIT_NOT_EXPR:
8057 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8058 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8059 if (temp == 0)
8060 abort ();
8061 return temp;
8063 case FFS_EXPR:
8064 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8065 temp = expand_unop (mode, ffs_optab, op0, target, 1);
8066 if (temp == 0)
8067 abort ();
8068 return temp;
8070 /* ??? Can optimize bitwise operations with one arg constant.
8071 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8072 and (a bitwise1 b) bitwise2 b (etc)
8073 but that is probably not worth while. */
8075 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8076 boolean values when we want in all cases to compute both of them. In
8077 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8078 as actual zero-or-1 values and then bitwise anding. In cases where
8079 there cannot be any side effects, better code would be made by
8080 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8081 how to recognize those cases. */
8083 case TRUTH_AND_EXPR:
8084 case BIT_AND_EXPR:
8085 this_optab = and_optab;
8086 goto binop;
8088 case TRUTH_OR_EXPR:
8089 case BIT_IOR_EXPR:
8090 this_optab = ior_optab;
8091 goto binop;
8093 case TRUTH_XOR_EXPR:
8094 case BIT_XOR_EXPR:
8095 this_optab = xor_optab;
8096 goto binop;
8098 case LSHIFT_EXPR:
8099 case RSHIFT_EXPR:
8100 case LROTATE_EXPR:
8101 case RROTATE_EXPR:
8102 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8103 subtarget = 0;
8104 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8105 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8106 unsignedp);
8108 /* Could determine the answer when only additive constants differ. Also,
8109 the addition of one can be handled by changing the condition. */
8110 case LT_EXPR:
8111 case LE_EXPR:
8112 case GT_EXPR:
8113 case GE_EXPR:
8114 case EQ_EXPR:
8115 case NE_EXPR:
8116 case UNORDERED_EXPR:
8117 case ORDERED_EXPR:
8118 case UNLT_EXPR:
8119 case UNLE_EXPR:
8120 case UNGT_EXPR:
8121 case UNGE_EXPR:
8122 case UNEQ_EXPR:
8123 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
8124 if (temp != 0)
8125 return temp;
8127 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8128 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8129 && original_target
8130 && GET_CODE (original_target) == REG
8131 && (GET_MODE (original_target)
8132 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8134 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8135 VOIDmode, 0);
8137 /* If temp is constant, we can just compute the result. */
8138 if (GET_CODE (temp) == CONST_INT)
8140 if (INTVAL (temp) != 0)
8141 emit_move_insn (target, const1_rtx);
8142 else
8143 emit_move_insn (target, const0_rtx);
8145 return target;
8148 if (temp != original_target)
8150 enum machine_mode mode1 = GET_MODE (temp);
8151 if (mode1 == VOIDmode)
8152 mode1 = tmode != VOIDmode ? tmode : mode;
8154 temp = copy_to_mode_reg (mode1, temp);
8157 op1 = gen_label_rtx ();
8158 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8159 GET_MODE (temp), unsignedp, op1);
8160 emit_move_insn (temp, const1_rtx);
8161 emit_label (op1);
8162 return temp;
8165 /* If no set-flag instruction, must generate a conditional
8166 store into a temporary variable. Drop through
8167 and handle this like && and ||. */
8169 case TRUTH_ANDIF_EXPR:
8170 case TRUTH_ORIF_EXPR:
8171 if (! ignore
8172 && (target == 0 || ! safe_from_p (target, exp, 1)
8173 /* Make sure we don't have a hard reg (such as function's return
8174 value) live across basic blocks, if not optimizing. */
8175 || (!optimize && GET_CODE (target) == REG
8176 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8177 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8179 if (target)
8180 emit_clr_insn (target);
8182 op1 = gen_label_rtx ();
8183 jumpifnot (exp, op1);
8185 if (target)
8186 emit_0_to_1_insn (target);
8188 emit_label (op1);
8189 return ignore ? const0_rtx : target;
8191 case TRUTH_NOT_EXPR:
8192 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8193 /* The parser is careful to generate TRUTH_NOT_EXPR
8194 only with operands that are always zero or one. */
8195 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8196 target, 1, OPTAB_LIB_WIDEN);
8197 if (temp == 0)
8198 abort ();
8199 return temp;
8201 case COMPOUND_EXPR:
8202 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8203 emit_queue ();
8204 return expand_expr (TREE_OPERAND (exp, 1),
8205 (ignore ? const0_rtx : target),
8206 VOIDmode, 0);
8208 case COND_EXPR:
8209 /* If we would have a "singleton" (see below) were it not for a
8210 conversion in each arm, bring that conversion back out. */
8211 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8212 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8213 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8214 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8216 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8217 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8219 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8220 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8221 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8222 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8223 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8224 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8225 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8226 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8227 return expand_expr (build1 (NOP_EXPR, type,
8228 build (COND_EXPR, TREE_TYPE (iftrue),
8229 TREE_OPERAND (exp, 0),
8230 iftrue, iffalse)),
8231 target, tmode, modifier);
8235 /* Note that COND_EXPRs whose type is a structure or union
8236 are required to be constructed to contain assignments of
8237 a temporary variable, so that we can evaluate them here
8238 for side effect only. If type is void, we must do likewise. */
8240 /* If an arm of the branch requires a cleanup,
8241 only that cleanup is performed. */
8243 tree singleton = 0;
8244 tree binary_op = 0, unary_op = 0;
8246 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8247 convert it to our mode, if necessary. */
8248 if (integer_onep (TREE_OPERAND (exp, 1))
8249 && integer_zerop (TREE_OPERAND (exp, 2))
8250 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8252 if (ignore)
8254 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8255 modifier);
8256 return const0_rtx;
8259 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8260 if (GET_MODE (op0) == mode)
8261 return op0;
8263 if (target == 0)
8264 target = gen_reg_rtx (mode);
8265 convert_move (target, op0, unsignedp);
8266 return target;
8269 /* Check for X ? A + B : A. If we have this, we can copy A to the
8270 output and conditionally add B. Similarly for unary operations.
8271 Don't do this if X has side-effects because those side effects
8272 might affect A or B and the "?" operation is a sequence point in
8273 ANSI. (operand_equal_p tests for side effects.) */
8275 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8276 && operand_equal_p (TREE_OPERAND (exp, 2),
8277 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8278 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8279 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8280 && operand_equal_p (TREE_OPERAND (exp, 1),
8281 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8282 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8283 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8284 && operand_equal_p (TREE_OPERAND (exp, 2),
8285 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8286 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8287 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8288 && operand_equal_p (TREE_OPERAND (exp, 1),
8289 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8290 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8292 /* If we are not to produce a result, we have no target. Otherwise,
8293 if a target was specified use it; it will not be used as an
8294 intermediate target unless it is safe. If no target, use a
8295 temporary. */
8297 if (ignore)
8298 temp = 0;
8299 else if (original_target
8300 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8301 || (singleton && GET_CODE (original_target) == REG
8302 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8303 && original_target == var_rtx (singleton)))
8304 && GET_MODE (original_target) == mode
8305 #ifdef HAVE_conditional_move
8306 && (! can_conditionally_move_p (mode)
8307 || GET_CODE (original_target) == REG
8308 || TREE_ADDRESSABLE (type))
8309 #endif
8310 && (GET_CODE (original_target) != MEM
8311 || TREE_ADDRESSABLE (type)))
8312 temp = original_target;
8313 else if (TREE_ADDRESSABLE (type))
8314 abort ();
8315 else
8316 temp = assign_temp (type, 0, 0, 1);
8318 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8319 do the test of X as a store-flag operation, do this as
8320 A + ((X != 0) << log C). Similarly for other simple binary
8321 operators. Only do for C == 1 if BRANCH_COST is low. */
8322 if (temp && singleton && binary_op
8323 && (TREE_CODE (binary_op) == PLUS_EXPR
8324 || TREE_CODE (binary_op) == MINUS_EXPR
8325 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8326 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8327 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8328 : integer_onep (TREE_OPERAND (binary_op, 1)))
8329 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8331 rtx result;
8332 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8333 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8334 ? addv_optab : add_optab)
8335 : TREE_CODE (binary_op) == MINUS_EXPR
8336 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8337 ? subv_optab : sub_optab)
8338 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8339 : xor_optab);
8341 /* If we had X ? A : A + 1, do this as A + (X == 0).
8343 We have to invert the truth value here and then put it
8344 back later if do_store_flag fails. We cannot simply copy
8345 TREE_OPERAND (exp, 0) to another variable and modify that
8346 because invert_truthvalue can modify the tree pointed to
8347 by its argument. */
8348 if (singleton == TREE_OPERAND (exp, 1))
8349 TREE_OPERAND (exp, 0)
8350 = invert_truthvalue (TREE_OPERAND (exp, 0));
8352 result = do_store_flag (TREE_OPERAND (exp, 0),
8353 (safe_from_p (temp, singleton, 1)
8354 ? temp : NULL_RTX),
8355 mode, BRANCH_COST <= 1);
8357 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8358 result = expand_shift (LSHIFT_EXPR, mode, result,
8359 build_int_2 (tree_log2
8360 (TREE_OPERAND
8361 (binary_op, 1)),
8363 (safe_from_p (temp, singleton, 1)
8364 ? temp : NULL_RTX), 0);
8366 if (result)
8368 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8369 return expand_binop (mode, boptab, op1, result, temp,
8370 unsignedp, OPTAB_LIB_WIDEN);
8372 else if (singleton == TREE_OPERAND (exp, 1))
8373 TREE_OPERAND (exp, 0)
8374 = invert_truthvalue (TREE_OPERAND (exp, 0));
8377 do_pending_stack_adjust ();
8378 NO_DEFER_POP;
8379 op0 = gen_label_rtx ();
8381 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8383 if (temp != 0)
8385 /* If the target conflicts with the other operand of the
8386 binary op, we can't use it. Also, we can't use the target
8387 if it is a hard register, because evaluating the condition
8388 might clobber it. */
8389 if ((binary_op
8390 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8391 || (GET_CODE (temp) == REG
8392 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8393 temp = gen_reg_rtx (mode);
8394 store_expr (singleton, temp, 0);
8396 else
8397 expand_expr (singleton,
8398 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8399 if (singleton == TREE_OPERAND (exp, 1))
8400 jumpif (TREE_OPERAND (exp, 0), op0);
8401 else
8402 jumpifnot (TREE_OPERAND (exp, 0), op0);
8404 start_cleanup_deferral ();
8405 if (binary_op && temp == 0)
8406 /* Just touch the other operand. */
8407 expand_expr (TREE_OPERAND (binary_op, 1),
8408 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8409 else if (binary_op)
8410 store_expr (build (TREE_CODE (binary_op), type,
8411 make_tree (type, temp),
8412 TREE_OPERAND (binary_op, 1)),
8413 temp, 0);
8414 else
8415 store_expr (build1 (TREE_CODE (unary_op), type,
8416 make_tree (type, temp)),
8417 temp, 0);
8418 op1 = op0;
8420 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8421 comparison operator. If we have one of these cases, set the
8422 output to A, branch on A (cse will merge these two references),
8423 then set the output to FOO. */
8424 else if (temp
8425 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8426 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8427 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8428 TREE_OPERAND (exp, 1), 0)
8429 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8430 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8431 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8433 if (GET_CODE (temp) == REG
8434 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8435 temp = gen_reg_rtx (mode);
8436 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8437 jumpif (TREE_OPERAND (exp, 0), op0);
8439 start_cleanup_deferral ();
8440 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8441 op1 = op0;
8443 else if (temp
8444 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8445 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8446 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8447 TREE_OPERAND (exp, 2), 0)
8448 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8449 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8450 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8452 if (GET_CODE (temp) == REG
8453 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8454 temp = gen_reg_rtx (mode);
8455 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8456 jumpifnot (TREE_OPERAND (exp, 0), op0);
8458 start_cleanup_deferral ();
8459 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8460 op1 = op0;
8462 else
8464 op1 = gen_label_rtx ();
8465 jumpifnot (TREE_OPERAND (exp, 0), op0);
8467 start_cleanup_deferral ();
8469 /* One branch of the cond can be void, if it never returns. For
8470 example A ? throw : E */
8471 if (temp != 0
8472 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8473 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8474 else
8475 expand_expr (TREE_OPERAND (exp, 1),
8476 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8477 end_cleanup_deferral ();
8478 emit_queue ();
8479 emit_jump_insn (gen_jump (op1));
8480 emit_barrier ();
8481 emit_label (op0);
8482 start_cleanup_deferral ();
8483 if (temp != 0
8484 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8485 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8486 else
8487 expand_expr (TREE_OPERAND (exp, 2),
8488 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8491 end_cleanup_deferral ();
8493 emit_queue ();
8494 emit_label (op1);
8495 OK_DEFER_POP;
8497 return temp;
8500 case TARGET_EXPR:
8502 /* Something needs to be initialized, but we didn't know
8503 where that thing was when building the tree. For example,
8504 it could be the return value of a function, or a parameter
8505 to a function which lays down in the stack, or a temporary
8506 variable which must be passed by reference.
8508 We guarantee that the expression will either be constructed
8509 or copied into our original target. */
8511 tree slot = TREE_OPERAND (exp, 0);
8512 tree cleanups = NULL_TREE;
8513 tree exp1;
8515 if (TREE_CODE (slot) != VAR_DECL)
8516 abort ();
8518 if (! ignore)
8519 target = original_target;
8521 /* Set this here so that if we get a target that refers to a
8522 register variable that's already been used, put_reg_into_stack
8523 knows that it should fix up those uses. */
8524 TREE_USED (slot) = 1;
8526 if (target == 0)
8528 if (DECL_RTL_SET_P (slot))
8530 target = DECL_RTL (slot);
8531 /* If we have already expanded the slot, so don't do
8532 it again. (mrs) */
8533 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8534 return target;
8536 else
8538 target = assign_temp (type, 2, 0, 1);
8539 /* All temp slots at this level must not conflict. */
8540 preserve_temp_slots (target);
8541 SET_DECL_RTL (slot, target);
8542 if (TREE_ADDRESSABLE (slot))
8543 put_var_into_stack (slot);
8545 /* Since SLOT is not known to the called function
8546 to belong to its stack frame, we must build an explicit
8547 cleanup. This case occurs when we must build up a reference
8548 to pass the reference as an argument. In this case,
8549 it is very likely that such a reference need not be
8550 built here. */
8552 if (TREE_OPERAND (exp, 2) == 0)
8553 TREE_OPERAND (exp, 2)
8554 = (*lang_hooks.maybe_build_cleanup) (slot);
8555 cleanups = TREE_OPERAND (exp, 2);
8558 else
8560 /* This case does occur, when expanding a parameter which
8561 needs to be constructed on the stack. The target
8562 is the actual stack address that we want to initialize.
8563 The function we call will perform the cleanup in this case. */
8565 /* If we have already assigned it space, use that space,
8566 not target that we were passed in, as our target
8567 parameter is only a hint. */
8568 if (DECL_RTL_SET_P (slot))
8570 target = DECL_RTL (slot);
8571 /* If we have already expanded the slot, so don't do
8572 it again. (mrs) */
8573 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8574 return target;
8576 else
8578 SET_DECL_RTL (slot, target);
8579 /* If we must have an addressable slot, then make sure that
8580 the RTL that we just stored in slot is OK. */
8581 if (TREE_ADDRESSABLE (slot))
8582 put_var_into_stack (slot);
8586 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8587 /* Mark it as expanded. */
8588 TREE_OPERAND (exp, 1) = NULL_TREE;
8590 store_expr (exp1, target, 0);
8592 expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
8594 return target;
8597 case INIT_EXPR:
8599 tree lhs = TREE_OPERAND (exp, 0);
8600 tree rhs = TREE_OPERAND (exp, 1);
8602 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8603 return temp;
8606 case MODIFY_EXPR:
8608 /* If lhs is complex, expand calls in rhs before computing it.
8609 That's so we don't compute a pointer and save it over a
8610 call. If lhs is simple, compute it first so we can give it
8611 as a target if the rhs is just a call. This avoids an
8612 extra temp and copy and that prevents a partial-subsumption
8613 which makes bad code. Actually we could treat
8614 component_ref's of vars like vars. */
8616 tree lhs = TREE_OPERAND (exp, 0);
8617 tree rhs = TREE_OPERAND (exp, 1);
8619 temp = 0;
8621 /* Check for |= or &= of a bitfield of size one into another bitfield
8622 of size 1. In this case, (unless we need the result of the
8623 assignment) we can do this more efficiently with a
8624 test followed by an assignment, if necessary.
8626 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8627 things change so we do, this code should be enhanced to
8628 support it. */
8629 if (ignore
8630 && TREE_CODE (lhs) == COMPONENT_REF
8631 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8632 || TREE_CODE (rhs) == BIT_AND_EXPR)
8633 && TREE_OPERAND (rhs, 0) == lhs
8634 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8635 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8636 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8638 rtx label = gen_label_rtx ();
8640 do_jump (TREE_OPERAND (rhs, 1),
8641 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8642 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8643 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8644 (TREE_CODE (rhs) == BIT_IOR_EXPR
8645 ? integer_one_node
8646 : integer_zero_node)),
8647 0, 0);
8648 do_pending_stack_adjust ();
8649 emit_label (label);
8650 return const0_rtx;
8653 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8655 return temp;
8658 case RETURN_EXPR:
8659 if (!TREE_OPERAND (exp, 0))
8660 expand_null_return ();
8661 else
8662 expand_return (TREE_OPERAND (exp, 0));
8663 return const0_rtx;
8665 case PREINCREMENT_EXPR:
8666 case PREDECREMENT_EXPR:
8667 return expand_increment (exp, 0, ignore);
8669 case POSTINCREMENT_EXPR:
8670 case POSTDECREMENT_EXPR:
8671 /* Faster to treat as pre-increment if result is not used. */
8672 return expand_increment (exp, ! ignore, ignore);
8674 case ADDR_EXPR:
8675 /* Are we taking the address of a nested function? */
8676 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8677 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8678 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8679 && ! TREE_STATIC (exp))
8681 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8682 op0 = force_operand (op0, target);
8684 /* If we are taking the address of something erroneous, just
8685 return a zero. */
8686 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8687 return const0_rtx;
8688 /* If we are taking the address of a constant and are at the
8689 top level, we have to use output_constant_def since we can't
8690 call force_const_mem at top level. */
8691 else if (cfun == 0
8692 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8693 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
8694 == 'c')))
8695 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
8696 else
8698 /* We make sure to pass const0_rtx down if we came in with
8699 ignore set, to avoid doing the cleanups twice for something. */
8700 op0 = expand_expr (TREE_OPERAND (exp, 0),
8701 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8702 (modifier == EXPAND_INITIALIZER
8703 ? modifier : EXPAND_CONST_ADDRESS));
8705 /* If we are going to ignore the result, OP0 will have been set
8706 to const0_rtx, so just return it. Don't get confused and
8707 think we are taking the address of the constant. */
8708 if (ignore)
8709 return op0;
8711 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8712 clever and returns a REG when given a MEM. */
8713 op0 = protect_from_queue (op0, 1);
8715 /* We would like the object in memory. If it is a constant, we can
8716 have it be statically allocated into memory. For a non-constant,
8717 we need to allocate some memory and store the value into it. */
8719 if (CONSTANT_P (op0))
8720 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8721 op0);
8722 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8723 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8724 || GET_CODE (op0) == PARALLEL)
8726 /* If the operand is a SAVE_EXPR, we can deal with this by
8727 forcing the SAVE_EXPR into memory. */
8728 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
8730 put_var_into_stack (TREE_OPERAND (exp, 0));
8731 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
8733 else
8735 /* If this object is in a register, it can't be BLKmode. */
8736 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8737 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8739 if (GET_CODE (op0) == PARALLEL)
8740 /* Handle calls that pass values in multiple
8741 non-contiguous locations. The Irix 6 ABI has examples
8742 of this. */
8743 emit_group_store (memloc, op0,
8744 int_size_in_bytes (inner_type));
8745 else
8746 emit_move_insn (memloc, op0);
8748 op0 = memloc;
8752 if (GET_CODE (op0) != MEM)
8753 abort ();
8755 mark_temp_addr_taken (op0);
8756 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8758 op0 = XEXP (op0, 0);
8759 #ifdef POINTERS_EXTEND_UNSIGNED
8760 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8761 && mode == ptr_mode)
8762 op0 = convert_memory_address (ptr_mode, op0);
8763 #endif
8764 return op0;
8767 /* If OP0 is not aligned as least as much as the type requires, we
8768 need to make a temporary, copy OP0 to it, and take the address of
8769 the temporary. We want to use the alignment of the type, not of
8770 the operand. Note that this is incorrect for FUNCTION_TYPE, but
8771 the test for BLKmode means that can't happen. The test for
8772 BLKmode is because we never make mis-aligned MEMs with
8773 non-BLKmode.
8775 We don't need to do this at all if the machine doesn't have
8776 strict alignment. */
8777 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
8778 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
8779 > MEM_ALIGN (op0))
8780 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
8782 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8783 rtx new
8784 = assign_stack_temp_for_type
8785 (TYPE_MODE (inner_type),
8786 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
8787 : int_size_in_bytes (inner_type),
8788 1, build_qualified_type (inner_type,
8789 (TYPE_QUALS (inner_type)
8790 | TYPE_QUAL_CONST)));
8792 if (TYPE_ALIGN_OK (inner_type))
8793 abort ();
8795 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)));
8796 op0 = new;
8799 op0 = force_operand (XEXP (op0, 0), target);
8802 if (flag_force_addr
8803 && GET_CODE (op0) != REG
8804 && modifier != EXPAND_CONST_ADDRESS
8805 && modifier != EXPAND_INITIALIZER
8806 && modifier != EXPAND_SUM)
8807 op0 = force_reg (Pmode, op0);
8809 if (GET_CODE (op0) == REG
8810 && ! REG_USERVAR_P (op0))
8811 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8813 #ifdef POINTERS_EXTEND_UNSIGNED
8814 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8815 && mode == ptr_mode)
8816 op0 = convert_memory_address (ptr_mode, op0);
8817 #endif
8819 return op0;
8821 case ENTRY_VALUE_EXPR:
8822 abort ();
8824 /* COMPLEX type for Extended Pascal & Fortran */
8825 case COMPLEX_EXPR:
8827 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8828 rtx insns;
8830 /* Get the rtx code of the operands. */
8831 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8832 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8834 if (! target)
8835 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8837 start_sequence ();
8839 /* Move the real (op0) and imaginary (op1) parts to their location. */
8840 emit_move_insn (gen_realpart (mode, target), op0);
8841 emit_move_insn (gen_imagpart (mode, target), op1);
8843 insns = get_insns ();
8844 end_sequence ();
8846 /* Complex construction should appear as a single unit. */
8847 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8848 each with a separate pseudo as destination.
8849 It's not correct for flow to treat them as a unit. */
8850 if (GET_CODE (target) != CONCAT)
8851 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8852 else
8853 emit_insns (insns);
8855 return target;
8858 case REALPART_EXPR:
8859 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8860 return gen_realpart (mode, op0);
8862 case IMAGPART_EXPR:
8863 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8864 return gen_imagpart (mode, op0);
8866 case CONJ_EXPR:
8868 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8869 rtx imag_t;
8870 rtx insns;
8872 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8874 if (! target)
8875 target = gen_reg_rtx (mode);
8877 start_sequence ();
8879 /* Store the realpart and the negated imagpart to target. */
8880 emit_move_insn (gen_realpart (partmode, target),
8881 gen_realpart (partmode, op0));
8883 imag_t = gen_imagpart (partmode, target);
8884 temp = expand_unop (partmode,
8885 ! unsignedp && flag_trapv
8886 && (GET_MODE_CLASS(partmode) == MODE_INT)
8887 ? negv_optab : neg_optab,
8888 gen_imagpart (partmode, op0), imag_t, 0);
8889 if (temp != imag_t)
8890 emit_move_insn (imag_t, temp);
8892 insns = get_insns ();
8893 end_sequence ();
8895 /* Conjugate should appear as a single unit
8896 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8897 each with a separate pseudo as destination.
8898 It's not correct for flow to treat them as a unit. */
8899 if (GET_CODE (target) != CONCAT)
8900 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8901 else
8902 emit_insns (insns);
8904 return target;
8907 case TRY_CATCH_EXPR:
8909 tree handler = TREE_OPERAND (exp, 1);
8911 expand_eh_region_start ();
8913 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8915 expand_eh_region_end_cleanup (handler);
8917 return op0;
8920 case TRY_FINALLY_EXPR:
8922 tree try_block = TREE_OPERAND (exp, 0);
8923 tree finally_block = TREE_OPERAND (exp, 1);
8924 rtx finally_label = gen_label_rtx ();
8925 rtx done_label = gen_label_rtx ();
8926 rtx return_link = gen_reg_rtx (Pmode);
8927 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8928 (tree) finally_label, (tree) return_link);
8929 TREE_SIDE_EFFECTS (cleanup) = 1;
8931 /* Start a new binding layer that will keep track of all cleanup
8932 actions to be performed. */
8933 expand_start_bindings (2);
8935 target_temp_slot_level = temp_slot_level;
8937 expand_decl_cleanup (NULL_TREE, cleanup);
8938 op0 = expand_expr (try_block, target, tmode, modifier);
8940 preserve_temp_slots (op0);
8941 expand_end_bindings (NULL_TREE, 0, 0);
8942 emit_jump (done_label);
8943 emit_label (finally_label);
8944 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8945 emit_indirect_jump (return_link);
8946 emit_label (done_label);
8947 return op0;
8950 case GOTO_SUBROUTINE_EXPR:
8952 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8953 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8954 rtx return_address = gen_label_rtx ();
8955 emit_move_insn (return_link,
8956 gen_rtx_LABEL_REF (Pmode, return_address));
8957 emit_jump (subr);
8958 emit_label (return_address);
8959 return const0_rtx;
8962 case VA_ARG_EXPR:
8963 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8965 case EXC_PTR_EXPR:
8966 return get_exception_pointer (cfun);
8968 case FDESC_EXPR:
8969 /* Function descriptors are not valid except for as
8970 initialization constants, and should not be expanded. */
8971 abort ();
8973 default:
8974 return (*lang_hooks.expand_expr) (exp, original_target, tmode, modifier);
8977 /* Here to do an ordinary binary operator, generating an instruction
8978 from the optab already placed in `this_optab'. */
8979 binop:
8980 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8981 subtarget = 0;
8982 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8983 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8984 binop2:
8985 temp = expand_binop (mode, this_optab, op0, op1, target,
8986 unsignedp, OPTAB_LIB_WIDEN);
8987 if (temp == 0)
8988 abort ();
8989 return temp;
8992 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
8993 when applied to the address of EXP produces an address known to be
8994 aligned more than BIGGEST_ALIGNMENT. */
8996 static int
8997 is_aligning_offset (offset, exp)
8998 tree offset;
8999 tree exp;
9001 /* Strip off any conversions and WITH_RECORD_EXPR nodes. */
9002 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9003 || TREE_CODE (offset) == NOP_EXPR
9004 || TREE_CODE (offset) == CONVERT_EXPR
9005 || TREE_CODE (offset) == WITH_RECORD_EXPR)
9006 offset = TREE_OPERAND (offset, 0);
9008 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9009 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9010 if (TREE_CODE (offset) != BIT_AND_EXPR
9011 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9012 || compare_tree_int (TREE_OPERAND (offset, 1), BIGGEST_ALIGNMENT) <= 0
9013 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9014 return 0;
9016 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9017 It must be NEGATE_EXPR. Then strip any more conversions. */
9018 offset = TREE_OPERAND (offset, 0);
9019 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9020 || TREE_CODE (offset) == NOP_EXPR
9021 || TREE_CODE (offset) == CONVERT_EXPR)
9022 offset = TREE_OPERAND (offset, 0);
9024 if (TREE_CODE (offset) != NEGATE_EXPR)
9025 return 0;
9027 offset = TREE_OPERAND (offset, 0);
9028 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9029 || TREE_CODE (offset) == NOP_EXPR
9030 || TREE_CODE (offset) == CONVERT_EXPR)
9031 offset = TREE_OPERAND (offset, 0);
9033 /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
9034 whose type is the same as EXP. */
9035 return (TREE_CODE (offset) == ADDR_EXPR
9036 && (TREE_OPERAND (offset, 0) == exp
9037 || (TREE_CODE (TREE_OPERAND (offset, 0)) == PLACEHOLDER_EXPR
9038 && (TREE_TYPE (TREE_OPERAND (offset, 0))
9039 == TREE_TYPE (exp)))));
9042 /* Return the tree node if an ARG corresponds to a string constant or zero
9043 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
9044 in bytes within the string that ARG is accessing. The type of the
9045 offset will be `sizetype'. */
9047 tree
9048 string_constant (arg, ptr_offset)
9049 tree arg;
9050 tree *ptr_offset;
9052 STRIP_NOPS (arg);
9054 if (TREE_CODE (arg) == ADDR_EXPR
9055 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9057 *ptr_offset = size_zero_node;
9058 return TREE_OPERAND (arg, 0);
9060 else if (TREE_CODE (arg) == PLUS_EXPR)
9062 tree arg0 = TREE_OPERAND (arg, 0);
9063 tree arg1 = TREE_OPERAND (arg, 1);
9065 STRIP_NOPS (arg0);
9066 STRIP_NOPS (arg1);
9068 if (TREE_CODE (arg0) == ADDR_EXPR
9069 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9071 *ptr_offset = convert (sizetype, arg1);
9072 return TREE_OPERAND (arg0, 0);
9074 else if (TREE_CODE (arg1) == ADDR_EXPR
9075 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9077 *ptr_offset = convert (sizetype, arg0);
9078 return TREE_OPERAND (arg1, 0);
9082 return 0;
9085 /* Expand code for a post- or pre- increment or decrement
9086 and return the RTX for the result.
9087 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9089 static rtx
9090 expand_increment (exp, post, ignore)
9091 tree exp;
9092 int post, ignore;
9094 rtx op0, op1;
9095 rtx temp, value;
9096 tree incremented = TREE_OPERAND (exp, 0);
9097 optab this_optab = add_optab;
9098 int icode;
9099 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9100 int op0_is_copy = 0;
9101 int single_insn = 0;
9102 /* 1 means we can't store into OP0 directly,
9103 because it is a subreg narrower than a word,
9104 and we don't dare clobber the rest of the word. */
9105 int bad_subreg = 0;
9107 /* Stabilize any component ref that might need to be
9108 evaluated more than once below. */
9109 if (!post
9110 || TREE_CODE (incremented) == BIT_FIELD_REF
9111 || (TREE_CODE (incremented) == COMPONENT_REF
9112 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9113 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9114 incremented = stabilize_reference (incremented);
9115 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9116 ones into save exprs so that they don't accidentally get evaluated
9117 more than once by the code below. */
9118 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9119 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9120 incremented = save_expr (incremented);
9122 /* Compute the operands as RTX.
9123 Note whether OP0 is the actual lvalue or a copy of it:
9124 I believe it is a copy iff it is a register or subreg
9125 and insns were generated in computing it. */
9127 temp = get_last_insn ();
9128 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9130 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9131 in place but instead must do sign- or zero-extension during assignment,
9132 so we copy it into a new register and let the code below use it as
9133 a copy.
9135 Note that we can safely modify this SUBREG since it is know not to be
9136 shared (it was made by the expand_expr call above). */
9138 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9140 if (post)
9141 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9142 else
9143 bad_subreg = 1;
9145 else if (GET_CODE (op0) == SUBREG
9146 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9148 /* We cannot increment this SUBREG in place. If we are
9149 post-incrementing, get a copy of the old value. Otherwise,
9150 just mark that we cannot increment in place. */
9151 if (post)
9152 op0 = copy_to_reg (op0);
9153 else
9154 bad_subreg = 1;
9157 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9158 && temp != get_last_insn ());
9159 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9161 /* Decide whether incrementing or decrementing. */
9162 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9163 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9164 this_optab = sub_optab;
9166 /* Convert decrement by a constant into a negative increment. */
9167 if (this_optab == sub_optab
9168 && GET_CODE (op1) == CONST_INT)
9170 op1 = GEN_INT (-INTVAL (op1));
9171 this_optab = add_optab;
9174 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9175 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9177 /* For a preincrement, see if we can do this with a single instruction. */
9178 if (!post)
9180 icode = (int) this_optab->handlers[(int) mode].insn_code;
9181 if (icode != (int) CODE_FOR_nothing
9182 /* Make sure that OP0 is valid for operands 0 and 1
9183 of the insn we want to queue. */
9184 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9185 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9186 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9187 single_insn = 1;
9190 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9191 then we cannot just increment OP0. We must therefore contrive to
9192 increment the original value. Then, for postincrement, we can return
9193 OP0 since it is a copy of the old value. For preincrement, expand here
9194 unless we can do it with a single insn.
9196 Likewise if storing directly into OP0 would clobber high bits
9197 we need to preserve (bad_subreg). */
9198 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9200 /* This is the easiest way to increment the value wherever it is.
9201 Problems with multiple evaluation of INCREMENTED are prevented
9202 because either (1) it is a component_ref or preincrement,
9203 in which case it was stabilized above, or (2) it is an array_ref
9204 with constant index in an array in a register, which is
9205 safe to reevaluate. */
9206 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9207 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9208 ? MINUS_EXPR : PLUS_EXPR),
9209 TREE_TYPE (exp),
9210 incremented,
9211 TREE_OPERAND (exp, 1));
9213 while (TREE_CODE (incremented) == NOP_EXPR
9214 || TREE_CODE (incremented) == CONVERT_EXPR)
9216 newexp = convert (TREE_TYPE (incremented), newexp);
9217 incremented = TREE_OPERAND (incremented, 0);
9220 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9221 return post ? op0 : temp;
9224 if (post)
9226 /* We have a true reference to the value in OP0.
9227 If there is an insn to add or subtract in this mode, queue it.
9228 Queueing the increment insn avoids the register shuffling
9229 that often results if we must increment now and first save
9230 the old value for subsequent use. */
9232 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9233 op0 = stabilize (op0);
9234 #endif
9236 icode = (int) this_optab->handlers[(int) mode].insn_code;
9237 if (icode != (int) CODE_FOR_nothing
9238 /* Make sure that OP0 is valid for operands 0 and 1
9239 of the insn we want to queue. */
9240 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9241 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9243 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9244 op1 = force_reg (mode, op1);
9246 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9248 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9250 rtx addr = (general_operand (XEXP (op0, 0), mode)
9251 ? force_reg (Pmode, XEXP (op0, 0))
9252 : copy_to_reg (XEXP (op0, 0)));
9253 rtx temp, result;
9255 op0 = replace_equiv_address (op0, addr);
9256 temp = force_reg (GET_MODE (op0), op0);
9257 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9258 op1 = force_reg (mode, op1);
9260 /* The increment queue is LIFO, thus we have to `queue'
9261 the instructions in reverse order. */
9262 enqueue_insn (op0, gen_move_insn (op0, temp));
9263 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9264 return result;
9268 /* Preincrement, or we can't increment with one simple insn. */
9269 if (post)
9270 /* Save a copy of the value before inc or dec, to return it later. */
9271 temp = value = copy_to_reg (op0);
9272 else
9273 /* Arrange to return the incremented value. */
9274 /* Copy the rtx because expand_binop will protect from the queue,
9275 and the results of that would be invalid for us to return
9276 if our caller does emit_queue before using our result. */
9277 temp = copy_rtx (value = op0);
9279 /* Increment however we can. */
9280 op1 = expand_binop (mode, this_optab, value, op1, op0,
9281 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9283 /* Make sure the value is stored into OP0. */
9284 if (op1 != op0)
9285 emit_move_insn (op0, op1);
9287 return temp;
9290 /* At the start of a function, record that we have no previously-pushed
9291 arguments waiting to be popped. */
9293 void
9294 init_pending_stack_adjust ()
9296 pending_stack_adjust = 0;
9299 /* When exiting from function, if safe, clear out any pending stack adjust
9300 so the adjustment won't get done.
9302 Note, if the current function calls alloca, then it must have a
9303 frame pointer regardless of the value of flag_omit_frame_pointer. */
9305 void
9306 clear_pending_stack_adjust ()
9308 #ifdef EXIT_IGNORE_STACK
9309 if (optimize > 0
9310 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9311 && EXIT_IGNORE_STACK
9312 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9313 && ! flag_inline_functions)
9315 stack_pointer_delta -= pending_stack_adjust,
9316 pending_stack_adjust = 0;
9318 #endif
9321 /* Pop any previously-pushed arguments that have not been popped yet. */
9323 void
9324 do_pending_stack_adjust ()
9326 if (inhibit_defer_pop == 0)
9328 if (pending_stack_adjust != 0)
9329 adjust_stack (GEN_INT (pending_stack_adjust));
9330 pending_stack_adjust = 0;
9334 /* Expand conditional expressions. */
9336 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9337 LABEL is an rtx of code CODE_LABEL, in this function and all the
9338 functions here. */
9340 void
9341 jumpifnot (exp, label)
9342 tree exp;
9343 rtx label;
9345 do_jump (exp, label, NULL_RTX);
9348 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9350 void
9351 jumpif (exp, label)
9352 tree exp;
9353 rtx label;
9355 do_jump (exp, NULL_RTX, label);
9358 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9359 the result is zero, or IF_TRUE_LABEL if the result is one.
9360 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9361 meaning fall through in that case.
9363 do_jump always does any pending stack adjust except when it does not
9364 actually perform a jump. An example where there is no jump
9365 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9367 This function is responsible for optimizing cases such as
9368 &&, || and comparison operators in EXP. */
9370 void
9371 do_jump (exp, if_false_label, if_true_label)
9372 tree exp;
9373 rtx if_false_label, if_true_label;
9375 enum tree_code code = TREE_CODE (exp);
9376 /* Some cases need to create a label to jump to
9377 in order to properly fall through.
9378 These cases set DROP_THROUGH_LABEL nonzero. */
9379 rtx drop_through_label = 0;
9380 rtx temp;
9381 int i;
9382 tree type;
9383 enum machine_mode mode;
9385 #ifdef MAX_INTEGER_COMPUTATION_MODE
9386 check_max_integer_computation_mode (exp);
9387 #endif
9389 emit_queue ();
9391 switch (code)
9393 case ERROR_MARK:
9394 break;
9396 case INTEGER_CST:
9397 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9398 if (temp)
9399 emit_jump (temp);
9400 break;
9402 #if 0
9403 /* This is not true with #pragma weak */
9404 case ADDR_EXPR:
9405 /* The address of something can never be zero. */
9406 if (if_true_label)
9407 emit_jump (if_true_label);
9408 break;
9409 #endif
9411 case NOP_EXPR:
9412 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9413 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9414 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
9415 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
9416 goto normal;
9417 case CONVERT_EXPR:
9418 /* If we are narrowing the operand, we have to do the compare in the
9419 narrower mode. */
9420 if ((TYPE_PRECISION (TREE_TYPE (exp))
9421 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9422 goto normal;
9423 case NON_LVALUE_EXPR:
9424 case REFERENCE_EXPR:
9425 case ABS_EXPR:
9426 case NEGATE_EXPR:
9427 case LROTATE_EXPR:
9428 case RROTATE_EXPR:
9429 /* These cannot change zero->non-zero or vice versa. */
9430 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9431 break;
9433 case WITH_RECORD_EXPR:
9434 /* Put the object on the placeholder list, recurse through our first
9435 operand, and pop the list. */
9436 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9437 placeholder_list);
9438 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9439 placeholder_list = TREE_CHAIN (placeholder_list);
9440 break;
9442 #if 0
9443 /* This is never less insns than evaluating the PLUS_EXPR followed by
9444 a test and can be longer if the test is eliminated. */
9445 case PLUS_EXPR:
9446 /* Reduce to minus. */
9447 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9448 TREE_OPERAND (exp, 0),
9449 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9450 TREE_OPERAND (exp, 1))));
9451 /* Process as MINUS. */
9452 #endif
9454 case MINUS_EXPR:
9455 /* Non-zero iff operands of minus differ. */
9456 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9457 TREE_OPERAND (exp, 0),
9458 TREE_OPERAND (exp, 1)),
9459 NE, NE, if_false_label, if_true_label);
9460 break;
9462 case BIT_AND_EXPR:
9463 /* If we are AND'ing with a small constant, do this comparison in the
9464 smallest type that fits. If the machine doesn't have comparisons
9465 that small, it will be converted back to the wider comparison.
9466 This helps if we are testing the sign bit of a narrower object.
9467 combine can't do this for us because it can't know whether a
9468 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9470 if (! SLOW_BYTE_ACCESS
9471 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9472 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9473 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9474 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9475 && (type = (*lang_hooks.types.type_for_mode) (mode, 1)) != 0
9476 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9477 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9478 != CODE_FOR_nothing))
9480 do_jump (convert (type, exp), if_false_label, if_true_label);
9481 break;
9483 goto normal;
9485 case TRUTH_NOT_EXPR:
9486 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9487 break;
9489 case TRUTH_ANDIF_EXPR:
9490 if (if_false_label == 0)
9491 if_false_label = drop_through_label = gen_label_rtx ();
9492 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9493 start_cleanup_deferral ();
9494 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9495 end_cleanup_deferral ();
9496 break;
9498 case TRUTH_ORIF_EXPR:
9499 if (if_true_label == 0)
9500 if_true_label = drop_through_label = gen_label_rtx ();
9501 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9502 start_cleanup_deferral ();
9503 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9504 end_cleanup_deferral ();
9505 break;
9507 case COMPOUND_EXPR:
9508 push_temp_slots ();
9509 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9510 preserve_temp_slots (NULL_RTX);
9511 free_temp_slots ();
9512 pop_temp_slots ();
9513 emit_queue ();
9514 do_pending_stack_adjust ();
9515 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9516 break;
9518 case COMPONENT_REF:
9519 case BIT_FIELD_REF:
9520 case ARRAY_REF:
9521 case ARRAY_RANGE_REF:
9523 HOST_WIDE_INT bitsize, bitpos;
9524 int unsignedp;
9525 enum machine_mode mode;
9526 tree type;
9527 tree offset;
9528 int volatilep = 0;
9530 /* Get description of this reference. We don't actually care
9531 about the underlying object here. */
9532 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9533 &unsignedp, &volatilep);
9535 type = (*lang_hooks.types.type_for_size) (bitsize, unsignedp);
9536 if (! SLOW_BYTE_ACCESS
9537 && type != 0 && bitsize >= 0
9538 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9539 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9540 != CODE_FOR_nothing))
9542 do_jump (convert (type, exp), if_false_label, if_true_label);
9543 break;
9545 goto normal;
9548 case COND_EXPR:
9549 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9550 if (integer_onep (TREE_OPERAND (exp, 1))
9551 && integer_zerop (TREE_OPERAND (exp, 2)))
9552 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9554 else if (integer_zerop (TREE_OPERAND (exp, 1))
9555 && integer_onep (TREE_OPERAND (exp, 2)))
9556 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9558 else
9560 rtx label1 = gen_label_rtx ();
9561 drop_through_label = gen_label_rtx ();
9563 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9565 start_cleanup_deferral ();
9566 /* Now the THEN-expression. */
9567 do_jump (TREE_OPERAND (exp, 1),
9568 if_false_label ? if_false_label : drop_through_label,
9569 if_true_label ? if_true_label : drop_through_label);
9570 /* In case the do_jump just above never jumps. */
9571 do_pending_stack_adjust ();
9572 emit_label (label1);
9574 /* Now the ELSE-expression. */
9575 do_jump (TREE_OPERAND (exp, 2),
9576 if_false_label ? if_false_label : drop_through_label,
9577 if_true_label ? if_true_label : drop_through_label);
9578 end_cleanup_deferral ();
9580 break;
9582 case EQ_EXPR:
9584 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9586 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9587 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9589 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9590 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9591 do_jump
9592 (fold
9593 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9594 fold (build (EQ_EXPR, TREE_TYPE (exp),
9595 fold (build1 (REALPART_EXPR,
9596 TREE_TYPE (inner_type),
9597 exp0)),
9598 fold (build1 (REALPART_EXPR,
9599 TREE_TYPE (inner_type),
9600 exp1)))),
9601 fold (build (EQ_EXPR, TREE_TYPE (exp),
9602 fold (build1 (IMAGPART_EXPR,
9603 TREE_TYPE (inner_type),
9604 exp0)),
9605 fold (build1 (IMAGPART_EXPR,
9606 TREE_TYPE (inner_type),
9607 exp1)))))),
9608 if_false_label, if_true_label);
9611 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9612 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9614 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9615 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9616 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9617 else
9618 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9619 break;
9622 case NE_EXPR:
9624 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9626 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9627 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9629 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9630 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9631 do_jump
9632 (fold
9633 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9634 fold (build (NE_EXPR, TREE_TYPE (exp),
9635 fold (build1 (REALPART_EXPR,
9636 TREE_TYPE (inner_type),
9637 exp0)),
9638 fold (build1 (REALPART_EXPR,
9639 TREE_TYPE (inner_type),
9640 exp1)))),
9641 fold (build (NE_EXPR, TREE_TYPE (exp),
9642 fold (build1 (IMAGPART_EXPR,
9643 TREE_TYPE (inner_type),
9644 exp0)),
9645 fold (build1 (IMAGPART_EXPR,
9646 TREE_TYPE (inner_type),
9647 exp1)))))),
9648 if_false_label, if_true_label);
9651 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9652 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9654 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9655 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9656 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9657 else
9658 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9659 break;
9662 case LT_EXPR:
9663 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9664 if (GET_MODE_CLASS (mode) == MODE_INT
9665 && ! can_compare_p (LT, mode, ccp_jump))
9666 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9667 else
9668 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9669 break;
9671 case LE_EXPR:
9672 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9673 if (GET_MODE_CLASS (mode) == MODE_INT
9674 && ! can_compare_p (LE, mode, ccp_jump))
9675 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9676 else
9677 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9678 break;
9680 case GT_EXPR:
9681 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9682 if (GET_MODE_CLASS (mode) == MODE_INT
9683 && ! can_compare_p (GT, mode, ccp_jump))
9684 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9685 else
9686 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9687 break;
9689 case GE_EXPR:
9690 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9691 if (GET_MODE_CLASS (mode) == MODE_INT
9692 && ! can_compare_p (GE, mode, ccp_jump))
9693 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9694 else
9695 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9696 break;
9698 case UNORDERED_EXPR:
9699 case ORDERED_EXPR:
9701 enum rtx_code cmp, rcmp;
9702 int do_rev;
9704 if (code == UNORDERED_EXPR)
9705 cmp = UNORDERED, rcmp = ORDERED;
9706 else
9707 cmp = ORDERED, rcmp = UNORDERED;
9708 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9710 do_rev = 0;
9711 if (! can_compare_p (cmp, mode, ccp_jump)
9712 && (can_compare_p (rcmp, mode, ccp_jump)
9713 /* If the target doesn't provide either UNORDERED or ORDERED
9714 comparisons, canonicalize on UNORDERED for the library. */
9715 || rcmp == UNORDERED))
9716 do_rev = 1;
9718 if (! do_rev)
9719 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9720 else
9721 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9723 break;
9726 enum rtx_code rcode1;
9727 enum tree_code tcode2;
9729 case UNLT_EXPR:
9730 rcode1 = UNLT;
9731 tcode2 = LT_EXPR;
9732 goto unordered_bcc;
9733 case UNLE_EXPR:
9734 rcode1 = UNLE;
9735 tcode2 = LE_EXPR;
9736 goto unordered_bcc;
9737 case UNGT_EXPR:
9738 rcode1 = UNGT;
9739 tcode2 = GT_EXPR;
9740 goto unordered_bcc;
9741 case UNGE_EXPR:
9742 rcode1 = UNGE;
9743 tcode2 = GE_EXPR;
9744 goto unordered_bcc;
9745 case UNEQ_EXPR:
9746 rcode1 = UNEQ;
9747 tcode2 = EQ_EXPR;
9748 goto unordered_bcc;
9750 unordered_bcc:
9751 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9752 if (can_compare_p (rcode1, mode, ccp_jump))
9753 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9754 if_true_label);
9755 else
9757 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9758 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9759 tree cmp0, cmp1;
9761 /* If the target doesn't support combined unordered
9762 compares, decompose into UNORDERED + comparison. */
9763 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9764 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9765 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9766 do_jump (exp, if_false_label, if_true_label);
9769 break;
9771 /* Special case:
9772 __builtin_expect (<test>, 0) and
9773 __builtin_expect (<test>, 1)
9775 We need to do this here, so that <test> is not converted to a SCC
9776 operation on machines that use condition code registers and COMPARE
9777 like the PowerPC, and then the jump is done based on whether the SCC
9778 operation produced a 1 or 0. */
9779 case CALL_EXPR:
9780 /* Check for a built-in function. */
9781 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
9783 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
9784 tree arglist = TREE_OPERAND (exp, 1);
9786 if (TREE_CODE (fndecl) == FUNCTION_DECL
9787 && DECL_BUILT_IN (fndecl)
9788 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
9789 && arglist != NULL_TREE
9790 && TREE_CHAIN (arglist) != NULL_TREE)
9792 rtx seq = expand_builtin_expect_jump (exp, if_false_label,
9793 if_true_label);
9795 if (seq != NULL_RTX)
9797 emit_insn (seq);
9798 return;
9802 /* fall through and generate the normal code. */
9804 default:
9805 normal:
9806 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9807 #if 0
9808 /* This is not needed any more and causes poor code since it causes
9809 comparisons and tests from non-SI objects to have different code
9810 sequences. */
9811 /* Copy to register to avoid generating bad insns by cse
9812 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9813 if (!cse_not_expected && GET_CODE (temp) == MEM)
9814 temp = copy_to_reg (temp);
9815 #endif
9816 do_pending_stack_adjust ();
9817 /* Do any postincrements in the expression that was tested. */
9818 emit_queue ();
9820 if (GET_CODE (temp) == CONST_INT
9821 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
9822 || GET_CODE (temp) == LABEL_REF)
9824 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9825 if (target)
9826 emit_jump (target);
9828 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9829 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9830 /* Note swapping the labels gives us not-equal. */
9831 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9832 else if (GET_MODE (temp) != VOIDmode)
9833 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9834 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9835 GET_MODE (temp), NULL_RTX,
9836 if_false_label, if_true_label);
9837 else
9838 abort ();
9841 if (drop_through_label)
9843 /* If do_jump produces code that might be jumped around,
9844 do any stack adjusts from that code, before the place
9845 where control merges in. */
9846 do_pending_stack_adjust ();
9847 emit_label (drop_through_label);
9851 /* Given a comparison expression EXP for values too wide to be compared
9852 with one insn, test the comparison and jump to the appropriate label.
9853 The code of EXP is ignored; we always test GT if SWAP is 0,
9854 and LT if SWAP is 1. */
9856 static void
9857 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9858 tree exp;
9859 int swap;
9860 rtx if_false_label, if_true_label;
9862 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9863 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9864 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9865 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9867 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9870 /* Compare OP0 with OP1, word at a time, in mode MODE.
9871 UNSIGNEDP says to do unsigned comparison.
9872 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9874 void
9875 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9876 enum machine_mode mode;
9877 int unsignedp;
9878 rtx op0, op1;
9879 rtx if_false_label, if_true_label;
9881 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9882 rtx drop_through_label = 0;
9883 int i;
9885 if (! if_true_label || ! if_false_label)
9886 drop_through_label = gen_label_rtx ();
9887 if (! if_true_label)
9888 if_true_label = drop_through_label;
9889 if (! if_false_label)
9890 if_false_label = drop_through_label;
9892 /* Compare a word at a time, high order first. */
9893 for (i = 0; i < nwords; i++)
9895 rtx op0_word, op1_word;
9897 if (WORDS_BIG_ENDIAN)
9899 op0_word = operand_subword_force (op0, i, mode);
9900 op1_word = operand_subword_force (op1, i, mode);
9902 else
9904 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9905 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9908 /* All but high-order word must be compared as unsigned. */
9909 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9910 (unsignedp || i > 0), word_mode, NULL_RTX,
9911 NULL_RTX, if_true_label);
9913 /* Consider lower words only if these are equal. */
9914 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9915 NULL_RTX, NULL_RTX, if_false_label);
9918 if (if_false_label)
9919 emit_jump (if_false_label);
9920 if (drop_through_label)
9921 emit_label (drop_through_label);
9924 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9925 with one insn, test the comparison and jump to the appropriate label. */
9927 static void
9928 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9929 tree exp;
9930 rtx if_false_label, if_true_label;
9932 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9933 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9934 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9935 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9936 int i;
9937 rtx drop_through_label = 0;
9939 if (! if_false_label)
9940 drop_through_label = if_false_label = gen_label_rtx ();
9942 for (i = 0; i < nwords; i++)
9943 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
9944 operand_subword_force (op1, i, mode),
9945 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9946 word_mode, NULL_RTX, if_false_label, NULL_RTX);
9948 if (if_true_label)
9949 emit_jump (if_true_label);
9950 if (drop_through_label)
9951 emit_label (drop_through_label);
9954 /* Jump according to whether OP0 is 0.
9955 We assume that OP0 has an integer mode that is too wide
9956 for the available compare insns. */
9958 void
9959 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9960 rtx op0;
9961 rtx if_false_label, if_true_label;
9963 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9964 rtx part;
9965 int i;
9966 rtx drop_through_label = 0;
9968 /* The fastest way of doing this comparison on almost any machine is to
9969 "or" all the words and compare the result. If all have to be loaded
9970 from memory and this is a very wide item, it's possible this may
9971 be slower, but that's highly unlikely. */
9973 part = gen_reg_rtx (word_mode);
9974 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
9975 for (i = 1; i < nwords && part != 0; i++)
9976 part = expand_binop (word_mode, ior_optab, part,
9977 operand_subword_force (op0, i, GET_MODE (op0)),
9978 part, 1, OPTAB_WIDEN);
9980 if (part != 0)
9982 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
9983 NULL_RTX, if_false_label, if_true_label);
9985 return;
9988 /* If we couldn't do the "or" simply, do this with a series of compares. */
9989 if (! if_false_label)
9990 drop_through_label = if_false_label = gen_label_rtx ();
9992 for (i = 0; i < nwords; i++)
9993 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
9994 const0_rtx, EQ, 1, word_mode, NULL_RTX,
9995 if_false_label, NULL_RTX);
9997 if (if_true_label)
9998 emit_jump (if_true_label);
10000 if (drop_through_label)
10001 emit_label (drop_through_label);
10004 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
10005 (including code to compute the values to be compared)
10006 and set (CC0) according to the result.
10007 The decision as to signed or unsigned comparison must be made by the caller.
10009 We force a stack adjustment unless there are currently
10010 things pushed on the stack that aren't yet used.
10012 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10013 compared. */
10016 compare_from_rtx (op0, op1, code, unsignedp, mode, size)
10017 rtx op0, op1;
10018 enum rtx_code code;
10019 int unsignedp;
10020 enum machine_mode mode;
10021 rtx size;
10023 rtx tem;
10025 /* If one operand is constant, make it the second one. Only do this
10026 if the other operand is not constant as well. */
10028 if (swap_commutative_operands_p (op0, op1))
10030 tem = op0;
10031 op0 = op1;
10032 op1 = tem;
10033 code = swap_condition (code);
10036 if (flag_force_mem)
10038 op0 = force_not_mem (op0);
10039 op1 = force_not_mem (op1);
10042 do_pending_stack_adjust ();
10044 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10045 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10046 return tem;
10048 #if 0
10049 /* There's no need to do this now that combine.c can eliminate lots of
10050 sign extensions. This can be less efficient in certain cases on other
10051 machines. */
10053 /* If this is a signed equality comparison, we can do it as an
10054 unsigned comparison since zero-extension is cheaper than sign
10055 extension and comparisons with zero are done as unsigned. This is
10056 the case even on machines that can do fast sign extension, since
10057 zero-extension is easier to combine with other operations than
10058 sign-extension is. If we are comparing against a constant, we must
10059 convert it to what it would look like unsigned. */
10060 if ((code == EQ || code == NE) && ! unsignedp
10061 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10063 if (GET_CODE (op1) == CONST_INT
10064 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10065 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10066 unsignedp = 1;
10068 #endif
10070 emit_cmp_insn (op0, op1, code, size, mode, unsignedp);
10072 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
10075 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
10076 The decision as to signed or unsigned comparison must be made by the caller.
10078 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10079 compared. */
10081 void
10082 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size,
10083 if_false_label, if_true_label)
10084 rtx op0, op1;
10085 enum rtx_code code;
10086 int unsignedp;
10087 enum machine_mode mode;
10088 rtx size;
10089 rtx if_false_label, if_true_label;
10091 rtx tem;
10092 int dummy_true_label = 0;
10094 /* Reverse the comparison if that is safe and we want to jump if it is
10095 false. */
10096 if (! if_true_label && ! FLOAT_MODE_P (mode))
10098 if_true_label = if_false_label;
10099 if_false_label = 0;
10100 code = reverse_condition (code);
10103 /* If one operand is constant, make it the second one. Only do this
10104 if the other operand is not constant as well. */
10106 if (swap_commutative_operands_p (op0, op1))
10108 tem = op0;
10109 op0 = op1;
10110 op1 = tem;
10111 code = swap_condition (code);
10114 if (flag_force_mem)
10116 op0 = force_not_mem (op0);
10117 op1 = force_not_mem (op1);
10120 do_pending_stack_adjust ();
10122 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10123 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10125 if (tem == const_true_rtx)
10127 if (if_true_label)
10128 emit_jump (if_true_label);
10130 else
10132 if (if_false_label)
10133 emit_jump (if_false_label);
10135 return;
10138 #if 0
10139 /* There's no need to do this now that combine.c can eliminate lots of
10140 sign extensions. This can be less efficient in certain cases on other
10141 machines. */
10143 /* If this is a signed equality comparison, we can do it as an
10144 unsigned comparison since zero-extension is cheaper than sign
10145 extension and comparisons with zero are done as unsigned. This is
10146 the case even on machines that can do fast sign extension, since
10147 zero-extension is easier to combine with other operations than
10148 sign-extension is. If we are comparing against a constant, we must
10149 convert it to what it would look like unsigned. */
10150 if ((code == EQ || code == NE) && ! unsignedp
10151 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10153 if (GET_CODE (op1) == CONST_INT
10154 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10155 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10156 unsignedp = 1;
10158 #endif
10160 if (! if_true_label)
10162 dummy_true_label = 1;
10163 if_true_label = gen_label_rtx ();
10166 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp,
10167 if_true_label);
10169 if (if_false_label)
10170 emit_jump (if_false_label);
10171 if (dummy_true_label)
10172 emit_label (if_true_label);
10175 /* Generate code for a comparison expression EXP (including code to compute
10176 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10177 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10178 generated code will drop through.
10179 SIGNED_CODE should be the rtx operation for this comparison for
10180 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10182 We force a stack adjustment unless there are currently
10183 things pushed on the stack that aren't yet used. */
10185 static void
10186 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10187 if_true_label)
10188 tree exp;
10189 enum rtx_code signed_code, unsigned_code;
10190 rtx if_false_label, if_true_label;
10192 rtx op0, op1;
10193 tree type;
10194 enum machine_mode mode;
10195 int unsignedp;
10196 enum rtx_code code;
10198 /* Don't crash if the comparison was erroneous. */
10199 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10200 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10201 return;
10203 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10204 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
10205 return;
10207 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10208 mode = TYPE_MODE (type);
10209 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
10210 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
10211 || (GET_MODE_BITSIZE (mode)
10212 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
10213 1)))))))
10215 /* op0 might have been replaced by promoted constant, in which
10216 case the type of second argument should be used. */
10217 type = TREE_TYPE (TREE_OPERAND (exp, 1));
10218 mode = TYPE_MODE (type);
10220 unsignedp = TREE_UNSIGNED (type);
10221 code = unsignedp ? unsigned_code : signed_code;
10223 #ifdef HAVE_canonicalize_funcptr_for_compare
10224 /* If function pointers need to be "canonicalized" before they can
10225 be reliably compared, then canonicalize them. */
10226 if (HAVE_canonicalize_funcptr_for_compare
10227 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10228 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10229 == FUNCTION_TYPE))
10231 rtx new_op0 = gen_reg_rtx (mode);
10233 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10234 op0 = new_op0;
10237 if (HAVE_canonicalize_funcptr_for_compare
10238 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10239 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10240 == FUNCTION_TYPE))
10242 rtx new_op1 = gen_reg_rtx (mode);
10244 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10245 op1 = new_op1;
10247 #endif
10249 /* Do any postincrements in the expression that was tested. */
10250 emit_queue ();
10252 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10253 ((mode == BLKmode)
10254 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10255 if_false_label, if_true_label);
10258 /* Generate code to calculate EXP using a store-flag instruction
10259 and return an rtx for the result. EXP is either a comparison
10260 or a TRUTH_NOT_EXPR whose operand is a comparison.
10262 If TARGET is nonzero, store the result there if convenient.
10264 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10265 cheap.
10267 Return zero if there is no suitable set-flag instruction
10268 available on this machine.
10270 Once expand_expr has been called on the arguments of the comparison,
10271 we are committed to doing the store flag, since it is not safe to
10272 re-evaluate the expression. We emit the store-flag insn by calling
10273 emit_store_flag, but only expand the arguments if we have a reason
10274 to believe that emit_store_flag will be successful. If we think that
10275 it will, but it isn't, we have to simulate the store-flag with a
10276 set/jump/set sequence. */
10278 static rtx
10279 do_store_flag (exp, target, mode, only_cheap)
10280 tree exp;
10281 rtx target;
10282 enum machine_mode mode;
10283 int only_cheap;
10285 enum rtx_code code;
10286 tree arg0, arg1, type;
10287 tree tem;
10288 enum machine_mode operand_mode;
10289 int invert = 0;
10290 int unsignedp;
10291 rtx op0, op1;
10292 enum insn_code icode;
10293 rtx subtarget = target;
10294 rtx result, label;
10296 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10297 result at the end. We can't simply invert the test since it would
10298 have already been inverted if it were valid. This case occurs for
10299 some floating-point comparisons. */
10301 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10302 invert = 1, exp = TREE_OPERAND (exp, 0);
10304 arg0 = TREE_OPERAND (exp, 0);
10305 arg1 = TREE_OPERAND (exp, 1);
10307 /* Don't crash if the comparison was erroneous. */
10308 if (arg0 == error_mark_node || arg1 == error_mark_node)
10309 return const0_rtx;
10311 type = TREE_TYPE (arg0);
10312 operand_mode = TYPE_MODE (type);
10313 unsignedp = TREE_UNSIGNED (type);
10315 /* We won't bother with BLKmode store-flag operations because it would mean
10316 passing a lot of information to emit_store_flag. */
10317 if (operand_mode == BLKmode)
10318 return 0;
10320 /* We won't bother with store-flag operations involving function pointers
10321 when function pointers must be canonicalized before comparisons. */
10322 #ifdef HAVE_canonicalize_funcptr_for_compare
10323 if (HAVE_canonicalize_funcptr_for_compare
10324 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10325 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10326 == FUNCTION_TYPE))
10327 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10328 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10329 == FUNCTION_TYPE))))
10330 return 0;
10331 #endif
10333 STRIP_NOPS (arg0);
10334 STRIP_NOPS (arg1);
10336 /* Get the rtx comparison code to use. We know that EXP is a comparison
10337 operation of some type. Some comparisons against 1 and -1 can be
10338 converted to comparisons with zero. Do so here so that the tests
10339 below will be aware that we have a comparison with zero. These
10340 tests will not catch constants in the first operand, but constants
10341 are rarely passed as the first operand. */
10343 switch (TREE_CODE (exp))
10345 case EQ_EXPR:
10346 code = EQ;
10347 break;
10348 case NE_EXPR:
10349 code = NE;
10350 break;
10351 case LT_EXPR:
10352 if (integer_onep (arg1))
10353 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10354 else
10355 code = unsignedp ? LTU : LT;
10356 break;
10357 case LE_EXPR:
10358 if (! unsignedp && integer_all_onesp (arg1))
10359 arg1 = integer_zero_node, code = LT;
10360 else
10361 code = unsignedp ? LEU : LE;
10362 break;
10363 case GT_EXPR:
10364 if (! unsignedp && integer_all_onesp (arg1))
10365 arg1 = integer_zero_node, code = GE;
10366 else
10367 code = unsignedp ? GTU : GT;
10368 break;
10369 case GE_EXPR:
10370 if (integer_onep (arg1))
10371 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10372 else
10373 code = unsignedp ? GEU : GE;
10374 break;
10376 case UNORDERED_EXPR:
10377 code = UNORDERED;
10378 break;
10379 case ORDERED_EXPR:
10380 code = ORDERED;
10381 break;
10382 case UNLT_EXPR:
10383 code = UNLT;
10384 break;
10385 case UNLE_EXPR:
10386 code = UNLE;
10387 break;
10388 case UNGT_EXPR:
10389 code = UNGT;
10390 break;
10391 case UNGE_EXPR:
10392 code = UNGE;
10393 break;
10394 case UNEQ_EXPR:
10395 code = UNEQ;
10396 break;
10398 default:
10399 abort ();
10402 /* Put a constant second. */
10403 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10405 tem = arg0; arg0 = arg1; arg1 = tem;
10406 code = swap_condition (code);
10409 /* If this is an equality or inequality test of a single bit, we can
10410 do this by shifting the bit being tested to the low-order bit and
10411 masking the result with the constant 1. If the condition was EQ,
10412 we xor it with 1. This does not require an scc insn and is faster
10413 than an scc insn even if we have it. */
10415 if ((code == NE || code == EQ)
10416 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10417 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10419 tree inner = TREE_OPERAND (arg0, 0);
10420 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10421 int ops_unsignedp;
10423 /* If INNER is a right shift of a constant and it plus BITNUM does
10424 not overflow, adjust BITNUM and INNER. */
10426 if (TREE_CODE (inner) == RSHIFT_EXPR
10427 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10428 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10429 && bitnum < TYPE_PRECISION (type)
10430 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10431 bitnum - TYPE_PRECISION (type)))
10433 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10434 inner = TREE_OPERAND (inner, 0);
10437 /* If we are going to be able to omit the AND below, we must do our
10438 operations as unsigned. If we must use the AND, we have a choice.
10439 Normally unsigned is faster, but for some machines signed is. */
10440 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10441 #ifdef LOAD_EXTEND_OP
10442 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10443 #else
10445 #endif
10448 if (! get_subtarget (subtarget)
10449 || GET_MODE (subtarget) != operand_mode
10450 || ! safe_from_p (subtarget, inner, 1))
10451 subtarget = 0;
10453 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10455 if (bitnum != 0)
10456 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10457 size_int (bitnum), subtarget, ops_unsignedp);
10459 if (GET_MODE (op0) != mode)
10460 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10462 if ((code == EQ && ! invert) || (code == NE && invert))
10463 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10464 ops_unsignedp, OPTAB_LIB_WIDEN);
10466 /* Put the AND last so it can combine with more things. */
10467 if (bitnum != TYPE_PRECISION (type) - 1)
10468 op0 = expand_and (mode, op0, const1_rtx, subtarget);
10470 return op0;
10473 /* Now see if we are likely to be able to do this. Return if not. */
10474 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10475 return 0;
10477 icode = setcc_gen_code[(int) code];
10478 if (icode == CODE_FOR_nothing
10479 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10481 /* We can only do this if it is one of the special cases that
10482 can be handled without an scc insn. */
10483 if ((code == LT && integer_zerop (arg1))
10484 || (! only_cheap && code == GE && integer_zerop (arg1)))
10486 else if (BRANCH_COST >= 0
10487 && ! only_cheap && (code == NE || code == EQ)
10488 && TREE_CODE (type) != REAL_TYPE
10489 && ((abs_optab->handlers[(int) operand_mode].insn_code
10490 != CODE_FOR_nothing)
10491 || (ffs_optab->handlers[(int) operand_mode].insn_code
10492 != CODE_FOR_nothing)))
10494 else
10495 return 0;
10498 if (! get_subtarget (target)
10499 || GET_MODE (subtarget) != operand_mode
10500 || ! safe_from_p (subtarget, arg1, 1))
10501 subtarget = 0;
10503 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10504 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10506 if (target == 0)
10507 target = gen_reg_rtx (mode);
10509 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10510 because, if the emit_store_flag does anything it will succeed and
10511 OP0 and OP1 will not be used subsequently. */
10513 result = emit_store_flag (target, code,
10514 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10515 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10516 operand_mode, unsignedp, 1);
10518 if (result)
10520 if (invert)
10521 result = expand_binop (mode, xor_optab, result, const1_rtx,
10522 result, 0, OPTAB_LIB_WIDEN);
10523 return result;
10526 /* If this failed, we have to do this with set/compare/jump/set code. */
10527 if (GET_CODE (target) != REG
10528 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10529 target = gen_reg_rtx (GET_MODE (target));
10531 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10532 result = compare_from_rtx (op0, op1, code, unsignedp,
10533 operand_mode, NULL_RTX);
10534 if (GET_CODE (result) == CONST_INT)
10535 return (((result == const0_rtx && ! invert)
10536 || (result != const0_rtx && invert))
10537 ? const0_rtx : const1_rtx);
10539 /* The code of RESULT may not match CODE if compare_from_rtx
10540 decided to swap its operands and reverse the original code.
10542 We know that compare_from_rtx returns either a CONST_INT or
10543 a new comparison code, so it is safe to just extract the
10544 code from RESULT. */
10545 code = GET_CODE (result);
10547 label = gen_label_rtx ();
10548 if (bcc_gen_fctn[(int) code] == 0)
10549 abort ();
10551 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10552 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10553 emit_label (label);
10555 return target;
10559 /* Stubs in case we haven't got a casesi insn. */
10560 #ifndef HAVE_casesi
10561 # define HAVE_casesi 0
10562 # define gen_casesi(a, b, c, d, e) (0)
10563 # define CODE_FOR_casesi CODE_FOR_nothing
10564 #endif
10566 /* If the machine does not have a case insn that compares the bounds,
10567 this means extra overhead for dispatch tables, which raises the
10568 threshold for using them. */
10569 #ifndef CASE_VALUES_THRESHOLD
10570 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10571 #endif /* CASE_VALUES_THRESHOLD */
10573 unsigned int
10574 case_values_threshold ()
10576 return CASE_VALUES_THRESHOLD;
10579 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10580 0 otherwise (i.e. if there is no casesi instruction). */
10582 try_casesi (index_type, index_expr, minval, range,
10583 table_label, default_label)
10584 tree index_type, index_expr, minval, range;
10585 rtx table_label ATTRIBUTE_UNUSED;
10586 rtx default_label;
10588 enum machine_mode index_mode = SImode;
10589 int index_bits = GET_MODE_BITSIZE (index_mode);
10590 rtx op1, op2, index;
10591 enum machine_mode op_mode;
10593 if (! HAVE_casesi)
10594 return 0;
10596 /* Convert the index to SImode. */
10597 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10599 enum machine_mode omode = TYPE_MODE (index_type);
10600 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
10602 /* We must handle the endpoints in the original mode. */
10603 index_expr = build (MINUS_EXPR, index_type,
10604 index_expr, minval);
10605 minval = integer_zero_node;
10606 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10607 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10608 omode, 1, default_label);
10609 /* Now we can safely truncate. */
10610 index = convert_to_mode (index_mode, index, 0);
10612 else
10614 if (TYPE_MODE (index_type) != index_mode)
10616 index_expr = convert ((*lang_hooks.types.type_for_size)
10617 (index_bits, 0), index_expr);
10618 index_type = TREE_TYPE (index_expr);
10621 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10623 emit_queue ();
10624 index = protect_from_queue (index, 0);
10625 do_pending_stack_adjust ();
10627 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10628 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10629 (index, op_mode))
10630 index = copy_to_mode_reg (op_mode, index);
10632 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10634 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10635 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10636 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
10637 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10638 (op1, op_mode))
10639 op1 = copy_to_mode_reg (op_mode, op1);
10641 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10643 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10644 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10645 op2, TREE_UNSIGNED (TREE_TYPE (range)));
10646 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10647 (op2, op_mode))
10648 op2 = copy_to_mode_reg (op_mode, op2);
10650 emit_jump_insn (gen_casesi (index, op1, op2,
10651 table_label, default_label));
10652 return 1;
10655 /* Attempt to generate a tablejump instruction; same concept. */
10656 #ifndef HAVE_tablejump
10657 #define HAVE_tablejump 0
10658 #define gen_tablejump(x, y) (0)
10659 #endif
10661 /* Subroutine of the next function.
10663 INDEX is the value being switched on, with the lowest value
10664 in the table already subtracted.
10665 MODE is its expected mode (needed if INDEX is constant).
10666 RANGE is the length of the jump table.
10667 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10669 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10670 index value is out of range. */
10672 static void
10673 do_tablejump (index, mode, range, table_label, default_label)
10674 rtx index, range, table_label, default_label;
10675 enum machine_mode mode;
10677 rtx temp, vector;
10679 /* Do an unsigned comparison (in the proper mode) between the index
10680 expression and the value which represents the length of the range.
10681 Since we just finished subtracting the lower bound of the range
10682 from the index expression, this comparison allows us to simultaneously
10683 check that the original index expression value is both greater than
10684 or equal to the minimum value of the range and less than or equal to
10685 the maximum value of the range. */
10687 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10688 default_label);
10690 /* If index is in range, it must fit in Pmode.
10691 Convert to Pmode so we can index with it. */
10692 if (mode != Pmode)
10693 index = convert_to_mode (Pmode, index, 1);
10695 /* Don't let a MEM slip thru, because then INDEX that comes
10696 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10697 and break_out_memory_refs will go to work on it and mess it up. */
10698 #ifdef PIC_CASE_VECTOR_ADDRESS
10699 if (flag_pic && GET_CODE (index) != REG)
10700 index = copy_to_mode_reg (Pmode, index);
10701 #endif
10703 /* If flag_force_addr were to affect this address
10704 it could interfere with the tricky assumptions made
10705 about addresses that contain label-refs,
10706 which may be valid only very near the tablejump itself. */
10707 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10708 GET_MODE_SIZE, because this indicates how large insns are. The other
10709 uses should all be Pmode, because they are addresses. This code
10710 could fail if addresses and insns are not the same size. */
10711 index = gen_rtx_PLUS (Pmode,
10712 gen_rtx_MULT (Pmode, index,
10713 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10714 gen_rtx_LABEL_REF (Pmode, table_label));
10715 #ifdef PIC_CASE_VECTOR_ADDRESS
10716 if (flag_pic)
10717 index = PIC_CASE_VECTOR_ADDRESS (index);
10718 else
10719 #endif
10720 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10721 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10722 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10723 RTX_UNCHANGING_P (vector) = 1;
10724 convert_move (temp, vector, 0);
10726 emit_jump_insn (gen_tablejump (temp, table_label));
10728 /* If we are generating PIC code or if the table is PC-relative, the
10729 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10730 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10731 emit_barrier ();
10735 try_tablejump (index_type, index_expr, minval, range,
10736 table_label, default_label)
10737 tree index_type, index_expr, minval, range;
10738 rtx table_label, default_label;
10740 rtx index;
10742 if (! HAVE_tablejump)
10743 return 0;
10745 index_expr = fold (build (MINUS_EXPR, index_type,
10746 convert (index_type, index_expr),
10747 convert (index_type, minval)));
10748 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10749 emit_queue ();
10750 index = protect_from_queue (index, 0);
10751 do_pending_stack_adjust ();
10753 do_tablejump (index, TYPE_MODE (index_type),
10754 convert_modes (TYPE_MODE (index_type),
10755 TYPE_MODE (TREE_TYPE (range)),
10756 expand_expr (range, NULL_RTX,
10757 VOIDmode, 0),
10758 TREE_UNSIGNED (TREE_TYPE (range))),
10759 table_label, default_label);
10760 return 1;