* src/s390/sysv.S: Save/restore %r6. Add DWARF-2 unwind info.
[official-gcc.git] / gcc / expr.c
blobe9cb241153049a3ec398b184335ddecae98408ca
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "machmode.h"
25 #include "real.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "obstack.h"
29 #include "flags.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "except.h"
33 #include "function.h"
34 #include "insn-config.h"
35 #include "insn-attr.h"
36 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
37 #include "expr.h"
38 #include "optabs.h"
39 #include "libfuncs.h"
40 #include "recog.h"
41 #include "reload.h"
42 #include "output.h"
43 #include "typeclass.h"
44 #include "toplev.h"
45 #include "ggc.h"
46 #include "langhooks.h"
47 #include "intl.h"
48 #include "tm_p.h"
50 /* Decide whether a function's arguments should be processed
51 from first to last or from last to first.
53 They should if the stack and args grow in opposite directions, but
54 only if we have push insns. */
56 #ifdef PUSH_ROUNDING
58 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
59 #define PUSH_ARGS_REVERSED /* If it's last to first. */
60 #endif
62 #endif
64 #ifndef STACK_PUSH_CODE
65 #ifdef STACK_GROWS_DOWNWARD
66 #define STACK_PUSH_CODE PRE_DEC
67 #else
68 #define STACK_PUSH_CODE PRE_INC
69 #endif
70 #endif
72 /* Assume that case vectors are not pc-relative. */
73 #ifndef CASE_VECTOR_PC_RELATIVE
74 #define CASE_VECTOR_PC_RELATIVE 0
75 #endif
77 /* If this is nonzero, we do not bother generating VOLATILE
78 around volatile memory references, and we are willing to
79 output indirect addresses. If cse is to follow, we reject
80 indirect addresses so a useful potential cse is generated;
81 if it is used only once, instruction combination will produce
82 the same indirect address eventually. */
83 int cse_not_expected;
85 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
86 static tree placeholder_list = 0;
88 /* This structure is used by move_by_pieces to describe the move to
89 be performed. */
90 struct move_by_pieces
92 rtx to;
93 rtx to_addr;
94 int autinc_to;
95 int explicit_inc_to;
96 rtx from;
97 rtx from_addr;
98 int autinc_from;
99 int explicit_inc_from;
100 unsigned HOST_WIDE_INT len;
101 HOST_WIDE_INT offset;
102 int reverse;
105 /* This structure is used by store_by_pieces to describe the clear to
106 be performed. */
108 struct store_by_pieces
110 rtx to;
111 rtx to_addr;
112 int autinc_to;
113 int explicit_inc_to;
114 unsigned HOST_WIDE_INT len;
115 HOST_WIDE_INT offset;
116 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
117 PTR constfundata;
118 int reverse;
121 extern struct obstack permanent_obstack;
123 static rtx enqueue_insn PARAMS ((rtx, rtx));
124 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
125 PARAMS ((unsigned HOST_WIDE_INT,
126 unsigned int));
127 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
128 struct move_by_pieces *));
129 static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
130 enum machine_mode));
131 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
132 unsigned int));
133 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
134 unsigned int));
135 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
136 enum machine_mode,
137 struct store_by_pieces *));
138 static rtx compress_float_constant PARAMS ((rtx, rtx));
139 static rtx get_subtarget PARAMS ((rtx));
140 static int is_zeros_p PARAMS ((tree));
141 static int mostly_zeros_p PARAMS ((tree));
142 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
143 HOST_WIDE_INT, enum machine_mode,
144 tree, tree, int, int));
145 static void store_constructor PARAMS ((tree, rtx, int, HOST_WIDE_INT));
146 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
147 HOST_WIDE_INT, enum machine_mode,
148 tree, enum machine_mode, int, tree,
149 int));
150 static rtx var_rtx PARAMS ((tree));
151 static HOST_WIDE_INT highest_pow2_factor PARAMS ((tree));
152 static HOST_WIDE_INT highest_pow2_factor_for_type PARAMS ((tree, tree));
153 static int is_aligning_offset PARAMS ((tree, tree));
154 static rtx expand_increment PARAMS ((tree, int, int));
155 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
156 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
157 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
158 rtx, rtx));
159 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
160 #ifdef PUSH_ROUNDING
161 static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
162 #endif
163 static void do_tablejump PARAMS ((rtx, enum machine_mode, rtx, rtx, rtx));
165 /* Record for each mode whether we can move a register directly to or
166 from an object of that mode in memory. If we can't, we won't try
167 to use that mode directly when accessing a field of that mode. */
169 static char direct_load[NUM_MACHINE_MODES];
170 static char direct_store[NUM_MACHINE_MODES];
172 /* Record for each mode whether we can float-extend from memory. */
174 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
176 /* If a memory-to-memory move would take MOVE_RATIO or more simple
177 move-instruction sequences, we will do a movstr or libcall instead. */
179 #ifndef MOVE_RATIO
180 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
181 #define MOVE_RATIO 2
182 #else
183 /* If we are optimizing for space (-Os), cut down the default move ratio. */
184 #define MOVE_RATIO (optimize_size ? 3 : 15)
185 #endif
186 #endif
188 /* This macro is used to determine whether move_by_pieces should be called
189 to perform a structure copy. */
190 #ifndef MOVE_BY_PIECES_P
191 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
192 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
193 #endif
195 /* This array records the insn_code of insns to perform block moves. */
196 enum insn_code movstr_optab[NUM_MACHINE_MODES];
198 /* This array records the insn_code of insns to perform block clears. */
199 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
201 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
203 #ifndef SLOW_UNALIGNED_ACCESS
204 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
205 #endif
207 /* This is run once per compilation to set up which modes can be used
208 directly in memory and to initialize the block move optab. */
210 void
211 init_expr_once ()
213 rtx insn, pat;
214 enum machine_mode mode;
215 int num_clobbers;
216 rtx mem, mem1;
218 /* Try indexing by frame ptr and try by stack ptr.
219 It is known that on the Convex the stack ptr isn't a valid index.
220 With luck, one or the other is valid on any machine. */
221 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
222 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
224 insn = rtx_alloc (INSN);
225 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
226 PATTERN (insn) = pat;
228 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
229 mode = (enum machine_mode) ((int) mode + 1))
231 int regno;
232 rtx reg;
234 direct_load[(int) mode] = direct_store[(int) mode] = 0;
235 PUT_MODE (mem, mode);
236 PUT_MODE (mem1, mode);
238 /* See if there is some register that can be used in this mode and
239 directly loaded or stored from memory. */
241 if (mode != VOIDmode && mode != BLKmode)
242 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
243 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
244 regno++)
246 if (! HARD_REGNO_MODE_OK (regno, mode))
247 continue;
249 reg = gen_rtx_REG (mode, regno);
251 SET_SRC (pat) = mem;
252 SET_DEST (pat) = reg;
253 if (recog (pat, insn, &num_clobbers) >= 0)
254 direct_load[(int) mode] = 1;
256 SET_SRC (pat) = mem1;
257 SET_DEST (pat) = reg;
258 if (recog (pat, insn, &num_clobbers) >= 0)
259 direct_load[(int) mode] = 1;
261 SET_SRC (pat) = reg;
262 SET_DEST (pat) = mem;
263 if (recog (pat, insn, &num_clobbers) >= 0)
264 direct_store[(int) mode] = 1;
266 SET_SRC (pat) = reg;
267 SET_DEST (pat) = mem1;
268 if (recog (pat, insn, &num_clobbers) >= 0)
269 direct_store[(int) mode] = 1;
273 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
275 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
276 mode = GET_MODE_WIDER_MODE (mode))
278 enum machine_mode srcmode;
279 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
280 srcmode = GET_MODE_WIDER_MODE (srcmode))
282 enum insn_code ic;
284 ic = can_extend_p (mode, srcmode, 0);
285 if (ic == CODE_FOR_nothing)
286 continue;
288 PUT_MODE (mem, srcmode);
290 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
291 float_extend_from_mem[mode][srcmode] = true;
296 /* This is run at the start of compiling a function. */
298 void
299 init_expr ()
301 cfun->expr = (struct expr_status *) ggc_alloc (sizeof (struct expr_status));
303 pending_chain = 0;
304 pending_stack_adjust = 0;
305 stack_pointer_delta = 0;
306 inhibit_defer_pop = 0;
307 saveregs_value = 0;
308 apply_args_value = 0;
309 forced_labels = 0;
312 /* Small sanity check that the queue is empty at the end of a function. */
314 void
315 finish_expr_for_function ()
317 if (pending_chain)
318 abort ();
321 /* Manage the queue of increment instructions to be output
322 for POSTINCREMENT_EXPR expressions, etc. */
324 /* Queue up to increment (or change) VAR later. BODY says how:
325 BODY should be the same thing you would pass to emit_insn
326 to increment right away. It will go to emit_insn later on.
328 The value is a QUEUED expression to be used in place of VAR
329 where you want to guarantee the pre-incrementation value of VAR. */
331 static rtx
332 enqueue_insn (var, body)
333 rtx var, body;
335 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
336 body, pending_chain);
337 return pending_chain;
340 /* Use protect_from_queue to convert a QUEUED expression
341 into something that you can put immediately into an instruction.
342 If the queued incrementation has not happened yet,
343 protect_from_queue returns the variable itself.
344 If the incrementation has happened, protect_from_queue returns a temp
345 that contains a copy of the old value of the variable.
347 Any time an rtx which might possibly be a QUEUED is to be put
348 into an instruction, it must be passed through protect_from_queue first.
349 QUEUED expressions are not meaningful in instructions.
351 Do not pass a value through protect_from_queue and then hold
352 on to it for a while before putting it in an instruction!
353 If the queue is flushed in between, incorrect code will result. */
356 protect_from_queue (x, modify)
357 rtx x;
358 int modify;
360 RTX_CODE code = GET_CODE (x);
362 #if 0 /* A QUEUED can hang around after the queue is forced out. */
363 /* Shortcut for most common case. */
364 if (pending_chain == 0)
365 return x;
366 #endif
368 if (code != QUEUED)
370 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
371 use of autoincrement. Make a copy of the contents of the memory
372 location rather than a copy of the address, but not if the value is
373 of mode BLKmode. Don't modify X in place since it might be
374 shared. */
375 if (code == MEM && GET_MODE (x) != BLKmode
376 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
378 rtx y = XEXP (x, 0);
379 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
381 if (QUEUED_INSN (y))
383 rtx temp = gen_reg_rtx (GET_MODE (x));
385 emit_insn_before (gen_move_insn (temp, new),
386 QUEUED_INSN (y));
387 return temp;
390 /* Copy the address into a pseudo, so that the returned value
391 remains correct across calls to emit_queue. */
392 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
395 /* Otherwise, recursively protect the subexpressions of all
396 the kinds of rtx's that can contain a QUEUED. */
397 if (code == MEM)
399 rtx tem = protect_from_queue (XEXP (x, 0), 0);
400 if (tem != XEXP (x, 0))
402 x = copy_rtx (x);
403 XEXP (x, 0) = tem;
406 else if (code == PLUS || code == MULT)
408 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
409 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
410 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
412 x = copy_rtx (x);
413 XEXP (x, 0) = new0;
414 XEXP (x, 1) = new1;
417 return x;
419 /* If the increment has not happened, use the variable itself. Copy it
420 into a new pseudo so that the value remains correct across calls to
421 emit_queue. */
422 if (QUEUED_INSN (x) == 0)
423 return copy_to_reg (QUEUED_VAR (x));
424 /* If the increment has happened and a pre-increment copy exists,
425 use that copy. */
426 if (QUEUED_COPY (x) != 0)
427 return QUEUED_COPY (x);
428 /* The increment has happened but we haven't set up a pre-increment copy.
429 Set one up now, and use it. */
430 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
431 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
432 QUEUED_INSN (x));
433 return QUEUED_COPY (x);
436 /* Return nonzero if X contains a QUEUED expression:
437 if it contains anything that will be altered by a queued increment.
438 We handle only combinations of MEM, PLUS, MINUS and MULT operators
439 since memory addresses generally contain only those. */
442 queued_subexp_p (x)
443 rtx x;
445 enum rtx_code code = GET_CODE (x);
446 switch (code)
448 case QUEUED:
449 return 1;
450 case MEM:
451 return queued_subexp_p (XEXP (x, 0));
452 case MULT:
453 case PLUS:
454 case MINUS:
455 return (queued_subexp_p (XEXP (x, 0))
456 || queued_subexp_p (XEXP (x, 1)));
457 default:
458 return 0;
462 /* Perform all the pending incrementations. */
464 void
465 emit_queue ()
467 rtx p;
468 while ((p = pending_chain))
470 rtx body = QUEUED_BODY (p);
472 if (GET_CODE (body) == SEQUENCE)
474 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
475 emit_insn (QUEUED_BODY (p));
477 else
478 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
479 pending_chain = QUEUED_NEXT (p);
483 /* Copy data from FROM to TO, where the machine modes are not the same.
484 Both modes may be integer, or both may be floating.
485 UNSIGNEDP should be nonzero if FROM is an unsigned type.
486 This causes zero-extension instead of sign-extension. */
488 void
489 convert_move (to, from, unsignedp)
490 rtx to, from;
491 int unsignedp;
493 enum machine_mode to_mode = GET_MODE (to);
494 enum machine_mode from_mode = GET_MODE (from);
495 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
496 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
497 enum insn_code code;
498 rtx libcall;
500 /* rtx code for making an equivalent value. */
501 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
503 to = protect_from_queue (to, 1);
504 from = protect_from_queue (from, 0);
506 if (to_real != from_real)
507 abort ();
509 /* If FROM is a SUBREG that indicates that we have already done at least
510 the required extension, strip it. We don't handle such SUBREGs as
511 TO here. */
513 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
514 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
515 >= GET_MODE_SIZE (to_mode))
516 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
517 from = gen_lowpart (to_mode, from), from_mode = to_mode;
519 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
520 abort ();
522 if (to_mode == from_mode
523 || (from_mode == VOIDmode && CONSTANT_P (from)))
525 emit_move_insn (to, from);
526 return;
529 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
531 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
532 abort ();
534 if (VECTOR_MODE_P (to_mode))
535 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
536 else
537 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
539 emit_move_insn (to, from);
540 return;
543 if (to_real != from_real)
544 abort ();
546 if (to_real)
548 rtx value, insns;
550 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
552 /* Try converting directly if the insn is supported. */
553 if ((code = can_extend_p (to_mode, from_mode, 0))
554 != CODE_FOR_nothing)
556 emit_unop_insn (code, to, from, UNKNOWN);
557 return;
561 #ifdef HAVE_trunchfqf2
562 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
564 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
565 return;
567 #endif
568 #ifdef HAVE_trunctqfqf2
569 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
571 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
572 return;
574 #endif
575 #ifdef HAVE_truncsfqf2
576 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
578 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
579 return;
581 #endif
582 #ifdef HAVE_truncdfqf2
583 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
585 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
586 return;
588 #endif
589 #ifdef HAVE_truncxfqf2
590 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
592 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
593 return;
595 #endif
596 #ifdef HAVE_trunctfqf2
597 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
599 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
600 return;
602 #endif
604 #ifdef HAVE_trunctqfhf2
605 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
607 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
608 return;
610 #endif
611 #ifdef HAVE_truncsfhf2
612 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
614 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
615 return;
617 #endif
618 #ifdef HAVE_truncdfhf2
619 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
621 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
622 return;
624 #endif
625 #ifdef HAVE_truncxfhf2
626 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
628 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
629 return;
631 #endif
632 #ifdef HAVE_trunctfhf2
633 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
635 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
636 return;
638 #endif
640 #ifdef HAVE_truncsftqf2
641 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
643 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
644 return;
646 #endif
647 #ifdef HAVE_truncdftqf2
648 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
650 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
651 return;
653 #endif
654 #ifdef HAVE_truncxftqf2
655 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
657 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
658 return;
660 #endif
661 #ifdef HAVE_trunctftqf2
662 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
664 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
665 return;
667 #endif
669 #ifdef HAVE_truncdfsf2
670 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
672 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
673 return;
675 #endif
676 #ifdef HAVE_truncxfsf2
677 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
679 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
680 return;
682 #endif
683 #ifdef HAVE_trunctfsf2
684 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
686 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
687 return;
689 #endif
690 #ifdef HAVE_truncxfdf2
691 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
693 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
694 return;
696 #endif
697 #ifdef HAVE_trunctfdf2
698 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
700 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
701 return;
703 #endif
705 libcall = (rtx) 0;
706 switch (from_mode)
708 case SFmode:
709 switch (to_mode)
711 case DFmode:
712 libcall = extendsfdf2_libfunc;
713 break;
715 case XFmode:
716 libcall = extendsfxf2_libfunc;
717 break;
719 case TFmode:
720 libcall = extendsftf2_libfunc;
721 break;
723 default:
724 break;
726 break;
728 case DFmode:
729 switch (to_mode)
731 case SFmode:
732 libcall = truncdfsf2_libfunc;
733 break;
735 case XFmode:
736 libcall = extenddfxf2_libfunc;
737 break;
739 case TFmode:
740 libcall = extenddftf2_libfunc;
741 break;
743 default:
744 break;
746 break;
748 case XFmode:
749 switch (to_mode)
751 case SFmode:
752 libcall = truncxfsf2_libfunc;
753 break;
755 case DFmode:
756 libcall = truncxfdf2_libfunc;
757 break;
759 default:
760 break;
762 break;
764 case TFmode:
765 switch (to_mode)
767 case SFmode:
768 libcall = trunctfsf2_libfunc;
769 break;
771 case DFmode:
772 libcall = trunctfdf2_libfunc;
773 break;
775 default:
776 break;
778 break;
780 default:
781 break;
784 if (libcall == (rtx) 0)
785 /* This conversion is not implemented yet. */
786 abort ();
788 start_sequence ();
789 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
790 1, from, from_mode);
791 insns = get_insns ();
792 end_sequence ();
793 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
794 from));
795 return;
798 /* Now both modes are integers. */
800 /* Handle expanding beyond a word. */
801 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
802 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
804 rtx insns;
805 rtx lowpart;
806 rtx fill_value;
807 rtx lowfrom;
808 int i;
809 enum machine_mode lowpart_mode;
810 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
812 /* Try converting directly if the insn is supported. */
813 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
814 != CODE_FOR_nothing)
816 /* If FROM is a SUBREG, put it into a register. Do this
817 so that we always generate the same set of insns for
818 better cse'ing; if an intermediate assignment occurred,
819 we won't be doing the operation directly on the SUBREG. */
820 if (optimize > 0 && GET_CODE (from) == SUBREG)
821 from = force_reg (from_mode, from);
822 emit_unop_insn (code, to, from, equiv_code);
823 return;
825 /* Next, try converting via full word. */
826 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
827 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
828 != CODE_FOR_nothing))
830 if (GET_CODE (to) == REG)
831 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
832 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
833 emit_unop_insn (code, to,
834 gen_lowpart (word_mode, to), equiv_code);
835 return;
838 /* No special multiword conversion insn; do it by hand. */
839 start_sequence ();
841 /* Since we will turn this into a no conflict block, we must ensure
842 that the source does not overlap the target. */
844 if (reg_overlap_mentioned_p (to, from))
845 from = force_reg (from_mode, from);
847 /* Get a copy of FROM widened to a word, if necessary. */
848 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
849 lowpart_mode = word_mode;
850 else
851 lowpart_mode = from_mode;
853 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
855 lowpart = gen_lowpart (lowpart_mode, to);
856 emit_move_insn (lowpart, lowfrom);
858 /* Compute the value to put in each remaining word. */
859 if (unsignedp)
860 fill_value = const0_rtx;
861 else
863 #ifdef HAVE_slt
864 if (HAVE_slt
865 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
866 && STORE_FLAG_VALUE == -1)
868 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
869 lowpart_mode, 0);
870 fill_value = gen_reg_rtx (word_mode);
871 emit_insn (gen_slt (fill_value));
873 else
874 #endif
876 fill_value
877 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
878 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
879 NULL_RTX, 0);
880 fill_value = convert_to_mode (word_mode, fill_value, 1);
884 /* Fill the remaining words. */
885 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
887 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
888 rtx subword = operand_subword (to, index, 1, to_mode);
890 if (subword == 0)
891 abort ();
893 if (fill_value != subword)
894 emit_move_insn (subword, fill_value);
897 insns = get_insns ();
898 end_sequence ();
900 emit_no_conflict_block (insns, to, from, NULL_RTX,
901 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
902 return;
905 /* Truncating multi-word to a word or less. */
906 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
907 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
909 if (!((GET_CODE (from) == MEM
910 && ! MEM_VOLATILE_P (from)
911 && direct_load[(int) to_mode]
912 && ! mode_dependent_address_p (XEXP (from, 0)))
913 || GET_CODE (from) == REG
914 || GET_CODE (from) == SUBREG))
915 from = force_reg (from_mode, from);
916 convert_move (to, gen_lowpart (word_mode, from), 0);
917 return;
920 /* Handle pointer conversion. */ /* SPEE 900220. */
921 if (to_mode == PQImode)
923 if (from_mode != QImode)
924 from = convert_to_mode (QImode, from, unsignedp);
926 #ifdef HAVE_truncqipqi2
927 if (HAVE_truncqipqi2)
929 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
930 return;
932 #endif /* HAVE_truncqipqi2 */
933 abort ();
936 if (from_mode == PQImode)
938 if (to_mode != QImode)
940 from = convert_to_mode (QImode, from, unsignedp);
941 from_mode = QImode;
943 else
945 #ifdef HAVE_extendpqiqi2
946 if (HAVE_extendpqiqi2)
948 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
949 return;
951 #endif /* HAVE_extendpqiqi2 */
952 abort ();
956 if (to_mode == PSImode)
958 if (from_mode != SImode)
959 from = convert_to_mode (SImode, from, unsignedp);
961 #ifdef HAVE_truncsipsi2
962 if (HAVE_truncsipsi2)
964 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
965 return;
967 #endif /* HAVE_truncsipsi2 */
968 abort ();
971 if (from_mode == PSImode)
973 if (to_mode != SImode)
975 from = convert_to_mode (SImode, from, unsignedp);
976 from_mode = SImode;
978 else
980 #ifdef HAVE_extendpsisi2
981 if (! unsignedp && HAVE_extendpsisi2)
983 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
984 return;
986 #endif /* HAVE_extendpsisi2 */
987 #ifdef HAVE_zero_extendpsisi2
988 if (unsignedp && HAVE_zero_extendpsisi2)
990 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
991 return;
993 #endif /* HAVE_zero_extendpsisi2 */
994 abort ();
998 if (to_mode == PDImode)
1000 if (from_mode != DImode)
1001 from = convert_to_mode (DImode, from, unsignedp);
1003 #ifdef HAVE_truncdipdi2
1004 if (HAVE_truncdipdi2)
1006 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1007 return;
1009 #endif /* HAVE_truncdipdi2 */
1010 abort ();
1013 if (from_mode == PDImode)
1015 if (to_mode != DImode)
1017 from = convert_to_mode (DImode, from, unsignedp);
1018 from_mode = DImode;
1020 else
1022 #ifdef HAVE_extendpdidi2
1023 if (HAVE_extendpdidi2)
1025 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1026 return;
1028 #endif /* HAVE_extendpdidi2 */
1029 abort ();
1033 /* Now follow all the conversions between integers
1034 no more than a word long. */
1036 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1037 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1038 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1039 GET_MODE_BITSIZE (from_mode)))
1041 if (!((GET_CODE (from) == MEM
1042 && ! MEM_VOLATILE_P (from)
1043 && direct_load[(int) to_mode]
1044 && ! mode_dependent_address_p (XEXP (from, 0)))
1045 || GET_CODE (from) == REG
1046 || GET_CODE (from) == SUBREG))
1047 from = force_reg (from_mode, from);
1048 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1049 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1050 from = copy_to_reg (from);
1051 emit_move_insn (to, gen_lowpart (to_mode, from));
1052 return;
1055 /* Handle extension. */
1056 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1058 /* Convert directly if that works. */
1059 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1060 != CODE_FOR_nothing)
1062 if (flag_force_mem)
1063 from = force_not_mem (from);
1065 emit_unop_insn (code, to, from, equiv_code);
1066 return;
1068 else
1070 enum machine_mode intermediate;
1071 rtx tmp;
1072 tree shift_amount;
1074 /* Search for a mode to convert via. */
1075 for (intermediate = from_mode; intermediate != VOIDmode;
1076 intermediate = GET_MODE_WIDER_MODE (intermediate))
1077 if (((can_extend_p (to_mode, intermediate, unsignedp)
1078 != CODE_FOR_nothing)
1079 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1080 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1081 GET_MODE_BITSIZE (intermediate))))
1082 && (can_extend_p (intermediate, from_mode, unsignedp)
1083 != CODE_FOR_nothing))
1085 convert_move (to, convert_to_mode (intermediate, from,
1086 unsignedp), unsignedp);
1087 return;
1090 /* No suitable intermediate mode.
1091 Generate what we need with shifts. */
1092 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1093 - GET_MODE_BITSIZE (from_mode), 0);
1094 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1095 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1096 to, unsignedp);
1097 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1098 to, unsignedp);
1099 if (tmp != to)
1100 emit_move_insn (to, tmp);
1101 return;
1105 /* Support special truncate insns for certain modes. */
1107 if (from_mode == DImode && to_mode == SImode)
1109 #ifdef HAVE_truncdisi2
1110 if (HAVE_truncdisi2)
1112 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1113 return;
1115 #endif
1116 convert_move (to, force_reg (from_mode, from), unsignedp);
1117 return;
1120 if (from_mode == DImode && to_mode == HImode)
1122 #ifdef HAVE_truncdihi2
1123 if (HAVE_truncdihi2)
1125 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1126 return;
1128 #endif
1129 convert_move (to, force_reg (from_mode, from), unsignedp);
1130 return;
1133 if (from_mode == DImode && to_mode == QImode)
1135 #ifdef HAVE_truncdiqi2
1136 if (HAVE_truncdiqi2)
1138 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1139 return;
1141 #endif
1142 convert_move (to, force_reg (from_mode, from), unsignedp);
1143 return;
1146 if (from_mode == SImode && to_mode == HImode)
1148 #ifdef HAVE_truncsihi2
1149 if (HAVE_truncsihi2)
1151 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1152 return;
1154 #endif
1155 convert_move (to, force_reg (from_mode, from), unsignedp);
1156 return;
1159 if (from_mode == SImode && to_mode == QImode)
1161 #ifdef HAVE_truncsiqi2
1162 if (HAVE_truncsiqi2)
1164 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1165 return;
1167 #endif
1168 convert_move (to, force_reg (from_mode, from), unsignedp);
1169 return;
1172 if (from_mode == HImode && to_mode == QImode)
1174 #ifdef HAVE_trunchiqi2
1175 if (HAVE_trunchiqi2)
1177 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1178 return;
1180 #endif
1181 convert_move (to, force_reg (from_mode, from), unsignedp);
1182 return;
1185 if (from_mode == TImode && to_mode == DImode)
1187 #ifdef HAVE_trunctidi2
1188 if (HAVE_trunctidi2)
1190 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1191 return;
1193 #endif
1194 convert_move (to, force_reg (from_mode, from), unsignedp);
1195 return;
1198 if (from_mode == TImode && to_mode == SImode)
1200 #ifdef HAVE_trunctisi2
1201 if (HAVE_trunctisi2)
1203 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1204 return;
1206 #endif
1207 convert_move (to, force_reg (from_mode, from), unsignedp);
1208 return;
1211 if (from_mode == TImode && to_mode == HImode)
1213 #ifdef HAVE_trunctihi2
1214 if (HAVE_trunctihi2)
1216 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1217 return;
1219 #endif
1220 convert_move (to, force_reg (from_mode, from), unsignedp);
1221 return;
1224 if (from_mode == TImode && to_mode == QImode)
1226 #ifdef HAVE_trunctiqi2
1227 if (HAVE_trunctiqi2)
1229 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1230 return;
1232 #endif
1233 convert_move (to, force_reg (from_mode, from), unsignedp);
1234 return;
1237 /* Handle truncation of volatile memrefs, and so on;
1238 the things that couldn't be truncated directly,
1239 and for which there was no special instruction. */
1240 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1242 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1243 emit_move_insn (to, temp);
1244 return;
1247 /* Mode combination is not recognized. */
1248 abort ();
1251 /* Return an rtx for a value that would result
1252 from converting X to mode MODE.
1253 Both X and MODE may be floating, or both integer.
1254 UNSIGNEDP is nonzero if X is an unsigned value.
1255 This can be done by referring to a part of X in place
1256 or by copying to a new temporary with conversion.
1258 This function *must not* call protect_from_queue
1259 except when putting X into an insn (in which case convert_move does it). */
1262 convert_to_mode (mode, x, unsignedp)
1263 enum machine_mode mode;
1264 rtx x;
1265 int unsignedp;
1267 return convert_modes (mode, VOIDmode, x, unsignedp);
1270 /* Return an rtx for a value that would result
1271 from converting X from mode OLDMODE to mode MODE.
1272 Both modes may be floating, or both integer.
1273 UNSIGNEDP is nonzero if X is an unsigned value.
1275 This can be done by referring to a part of X in place
1276 or by copying to a new temporary with conversion.
1278 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1280 This function *must not* call protect_from_queue
1281 except when putting X into an insn (in which case convert_move does it). */
1284 convert_modes (mode, oldmode, x, unsignedp)
1285 enum machine_mode mode, oldmode;
1286 rtx x;
1287 int unsignedp;
1289 rtx temp;
1291 /* If FROM is a SUBREG that indicates that we have already done at least
1292 the required extension, strip it. */
1294 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1295 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1296 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1297 x = gen_lowpart (mode, x);
1299 if (GET_MODE (x) != VOIDmode)
1300 oldmode = GET_MODE (x);
1302 if (mode == oldmode)
1303 return x;
1305 /* There is one case that we must handle specially: If we are converting
1306 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1307 we are to interpret the constant as unsigned, gen_lowpart will do
1308 the wrong if the constant appears negative. What we want to do is
1309 make the high-order word of the constant zero, not all ones. */
1311 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1312 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1313 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1315 HOST_WIDE_INT val = INTVAL (x);
1317 if (oldmode != VOIDmode
1318 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1320 int width = GET_MODE_BITSIZE (oldmode);
1322 /* We need to zero extend VAL. */
1323 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1326 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1329 /* We can do this with a gen_lowpart if both desired and current modes
1330 are integer, and this is either a constant integer, a register, or a
1331 non-volatile MEM. Except for the constant case where MODE is no
1332 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1334 if ((GET_CODE (x) == CONST_INT
1335 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1336 || (GET_MODE_CLASS (mode) == MODE_INT
1337 && GET_MODE_CLASS (oldmode) == MODE_INT
1338 && (GET_CODE (x) == CONST_DOUBLE
1339 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1340 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1341 && direct_load[(int) mode])
1342 || (GET_CODE (x) == REG
1343 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1344 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1346 /* ?? If we don't know OLDMODE, we have to assume here that
1347 X does not need sign- or zero-extension. This may not be
1348 the case, but it's the best we can do. */
1349 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1350 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1352 HOST_WIDE_INT val = INTVAL (x);
1353 int width = GET_MODE_BITSIZE (oldmode);
1355 /* We must sign or zero-extend in this case. Start by
1356 zero-extending, then sign extend if we need to. */
1357 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1358 if (! unsignedp
1359 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1360 val |= (HOST_WIDE_INT) (-1) << width;
1362 return gen_int_mode (val, mode);
1365 return gen_lowpart (mode, x);
1368 temp = gen_reg_rtx (mode);
1369 convert_move (temp, x, unsignedp);
1370 return temp;
1373 /* This macro is used to determine what the largest unit size that
1374 move_by_pieces can use is. */
1376 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1377 move efficiently, as opposed to MOVE_MAX which is the maximum
1378 number of bytes we can move with a single instruction. */
1380 #ifndef MOVE_MAX_PIECES
1381 #define MOVE_MAX_PIECES MOVE_MAX
1382 #endif
1384 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1385 store efficiently. Due to internal GCC limitations, this is
1386 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1387 for an immediate constant. */
1389 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1391 /* Generate several move instructions to copy LEN bytes from block FROM to
1392 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1393 and TO through protect_from_queue before calling.
1395 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1396 used to push FROM to the stack.
1398 ALIGN is maximum alignment we can assume. */
1400 void
1401 move_by_pieces (to, from, len, align)
1402 rtx to, from;
1403 unsigned HOST_WIDE_INT len;
1404 unsigned int align;
1406 struct move_by_pieces data;
1407 rtx to_addr, from_addr = XEXP (from, 0);
1408 unsigned int max_size = MOVE_MAX_PIECES + 1;
1409 enum machine_mode mode = VOIDmode, tmode;
1410 enum insn_code icode;
1412 data.offset = 0;
1413 data.from_addr = from_addr;
1414 if (to)
1416 to_addr = XEXP (to, 0);
1417 data.to = to;
1418 data.autinc_to
1419 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1420 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1421 data.reverse
1422 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1424 else
1426 to_addr = NULL_RTX;
1427 data.to = NULL_RTX;
1428 data.autinc_to = 1;
1429 #ifdef STACK_GROWS_DOWNWARD
1430 data.reverse = 1;
1431 #else
1432 data.reverse = 0;
1433 #endif
1435 data.to_addr = to_addr;
1436 data.from = from;
1437 data.autinc_from
1438 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1439 || GET_CODE (from_addr) == POST_INC
1440 || GET_CODE (from_addr) == POST_DEC);
1442 data.explicit_inc_from = 0;
1443 data.explicit_inc_to = 0;
1444 if (data.reverse) data.offset = len;
1445 data.len = len;
1447 /* If copying requires more than two move insns,
1448 copy addresses to registers (to make displacements shorter)
1449 and use post-increment if available. */
1450 if (!(data.autinc_from && data.autinc_to)
1451 && move_by_pieces_ninsns (len, align) > 2)
1453 /* Find the mode of the largest move... */
1454 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1455 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1456 if (GET_MODE_SIZE (tmode) < max_size)
1457 mode = tmode;
1459 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1461 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1462 data.autinc_from = 1;
1463 data.explicit_inc_from = -1;
1465 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1467 data.from_addr = copy_addr_to_reg (from_addr);
1468 data.autinc_from = 1;
1469 data.explicit_inc_from = 1;
1471 if (!data.autinc_from && CONSTANT_P (from_addr))
1472 data.from_addr = copy_addr_to_reg (from_addr);
1473 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1475 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1476 data.autinc_to = 1;
1477 data.explicit_inc_to = -1;
1479 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1481 data.to_addr = copy_addr_to_reg (to_addr);
1482 data.autinc_to = 1;
1483 data.explicit_inc_to = 1;
1485 if (!data.autinc_to && CONSTANT_P (to_addr))
1486 data.to_addr = copy_addr_to_reg (to_addr);
1489 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1490 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1491 align = MOVE_MAX * BITS_PER_UNIT;
1493 /* First move what we can in the largest integer mode, then go to
1494 successively smaller modes. */
1496 while (max_size > 1)
1498 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1499 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1500 if (GET_MODE_SIZE (tmode) < max_size)
1501 mode = tmode;
1503 if (mode == VOIDmode)
1504 break;
1506 icode = mov_optab->handlers[(int) mode].insn_code;
1507 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1508 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1510 max_size = GET_MODE_SIZE (mode);
1513 /* The code above should have handled everything. */
1514 if (data.len > 0)
1515 abort ();
1518 /* Return number of insns required to move L bytes by pieces.
1519 ALIGN (in bits) is maximum alignment we can assume. */
1521 static unsigned HOST_WIDE_INT
1522 move_by_pieces_ninsns (l, align)
1523 unsigned HOST_WIDE_INT l;
1524 unsigned int align;
1526 unsigned HOST_WIDE_INT n_insns = 0;
1527 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1529 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1530 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1531 align = MOVE_MAX * BITS_PER_UNIT;
1533 while (max_size > 1)
1535 enum machine_mode mode = VOIDmode, tmode;
1536 enum insn_code icode;
1538 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1539 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1540 if (GET_MODE_SIZE (tmode) < max_size)
1541 mode = tmode;
1543 if (mode == VOIDmode)
1544 break;
1546 icode = mov_optab->handlers[(int) mode].insn_code;
1547 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1548 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1550 max_size = GET_MODE_SIZE (mode);
1553 if (l)
1554 abort ();
1555 return n_insns;
1558 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1559 with move instructions for mode MODE. GENFUN is the gen_... function
1560 to make a move insn for that mode. DATA has all the other info. */
1562 static void
1563 move_by_pieces_1 (genfun, mode, data)
1564 rtx (*genfun) PARAMS ((rtx, ...));
1565 enum machine_mode mode;
1566 struct move_by_pieces *data;
1568 unsigned int size = GET_MODE_SIZE (mode);
1569 rtx to1 = NULL_RTX, from1;
1571 while (data->len >= size)
1573 if (data->reverse)
1574 data->offset -= size;
1576 if (data->to)
1578 if (data->autinc_to)
1579 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1580 data->offset);
1581 else
1582 to1 = adjust_address (data->to, mode, data->offset);
1585 if (data->autinc_from)
1586 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1587 data->offset);
1588 else
1589 from1 = adjust_address (data->from, mode, data->offset);
1591 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1592 emit_insn (gen_add2_insn (data->to_addr,
1593 GEN_INT (-(HOST_WIDE_INT)size)));
1594 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1595 emit_insn (gen_add2_insn (data->from_addr,
1596 GEN_INT (-(HOST_WIDE_INT)size)));
1598 if (data->to)
1599 emit_insn ((*genfun) (to1, from1));
1600 else
1602 #ifdef PUSH_ROUNDING
1603 emit_single_push_insn (mode, from1, NULL);
1604 #else
1605 abort ();
1606 #endif
1609 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1610 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1611 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1612 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1614 if (! data->reverse)
1615 data->offset += size;
1617 data->len -= size;
1621 /* Emit code to move a block Y to a block X.
1622 This may be done with string-move instructions,
1623 with multiple scalar move instructions, or with a library call.
1625 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1626 with mode BLKmode.
1627 SIZE is an rtx that says how long they are.
1628 ALIGN is the maximum alignment we can assume they have.
1630 Return the address of the new block, if memcpy is called and returns it,
1631 0 otherwise. */
1633 static GTY(()) tree block_move_fn;
1635 emit_block_move (x, y, size)
1636 rtx x, y;
1637 rtx size;
1639 rtx retval = 0;
1640 #ifdef TARGET_MEM_FUNCTIONS
1641 tree call_expr, arg_list;
1642 #endif
1643 unsigned int align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1645 if (GET_MODE (x) != BLKmode)
1646 abort ();
1648 if (GET_MODE (y) != BLKmode)
1649 abort ();
1651 x = protect_from_queue (x, 1);
1652 y = protect_from_queue (y, 0);
1653 size = protect_from_queue (size, 0);
1655 if (GET_CODE (x) != MEM)
1656 abort ();
1657 if (GET_CODE (y) != MEM)
1658 abort ();
1659 if (size == 0)
1660 abort ();
1662 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1663 move_by_pieces (x, y, INTVAL (size), align);
1664 else
1666 /* Try the most limited insn first, because there's no point
1667 including more than one in the machine description unless
1668 the more limited one has some advantage. */
1670 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1671 enum machine_mode mode;
1673 /* Since this is a move insn, we don't care about volatility. */
1674 volatile_ok = 1;
1676 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1677 mode = GET_MODE_WIDER_MODE (mode))
1679 enum insn_code code = movstr_optab[(int) mode];
1680 insn_operand_predicate_fn pred;
1682 if (code != CODE_FOR_nothing
1683 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1684 here because if SIZE is less than the mode mask, as it is
1685 returned by the macro, it will definitely be less than the
1686 actual mode mask. */
1687 && ((GET_CODE (size) == CONST_INT
1688 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1689 <= (GET_MODE_MASK (mode) >> 1)))
1690 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1691 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1692 || (*pred) (x, BLKmode))
1693 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1694 || (*pred) (y, BLKmode))
1695 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1696 || (*pred) (opalign, VOIDmode)))
1698 rtx op2;
1699 rtx last = get_last_insn ();
1700 rtx pat;
1702 op2 = convert_to_mode (mode, size, 1);
1703 pred = insn_data[(int) code].operand[2].predicate;
1704 if (pred != 0 && ! (*pred) (op2, mode))
1705 op2 = copy_to_mode_reg (mode, op2);
1707 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1708 if (pat)
1710 emit_insn (pat);
1711 volatile_ok = 0;
1712 return 0;
1714 else
1715 delete_insns_since (last);
1719 volatile_ok = 0;
1721 /* X, Y, or SIZE may have been passed through protect_from_queue.
1723 It is unsafe to save the value generated by protect_from_queue
1724 and reuse it later. Consider what happens if emit_queue is
1725 called before the return value from protect_from_queue is used.
1727 Expansion of the CALL_EXPR below will call emit_queue before
1728 we are finished emitting RTL for argument setup. So if we are
1729 not careful we could get the wrong value for an argument.
1731 To avoid this problem we go ahead and emit code to copy X, Y &
1732 SIZE into new pseudos. We can then place those new pseudos
1733 into an RTL_EXPR and use them later, even after a call to
1734 emit_queue.
1736 Note this is not strictly needed for library calls since they
1737 do not call emit_queue before loading their arguments. However,
1738 we may need to have library calls call emit_queue in the future
1739 since failing to do so could cause problems for targets which
1740 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1741 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1742 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1744 #ifdef TARGET_MEM_FUNCTIONS
1745 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1746 #else
1747 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1748 TREE_UNSIGNED (integer_type_node));
1749 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1750 #endif
1752 #ifdef TARGET_MEM_FUNCTIONS
1753 /* It is incorrect to use the libcall calling conventions to call
1754 memcpy in this context.
1756 This could be a user call to memcpy and the user may wish to
1757 examine the return value from memcpy.
1759 For targets where libcalls and normal calls have different conventions
1760 for returning pointers, we could end up generating incorrect code.
1762 So instead of using a libcall sequence we build up a suitable
1763 CALL_EXPR and expand the call in the normal fashion. */
1764 if (block_move_fn == NULL_TREE)
1766 tree fntype;
1768 /* This was copied from except.c, I don't know if all this is
1769 necessary in this context or not. */
1770 block_move_fn = get_identifier ("memcpy");
1771 fntype = build_pointer_type (void_type_node);
1772 fntype = build_function_type (fntype, NULL_TREE);
1773 block_move_fn = build_decl (FUNCTION_DECL, block_move_fn, fntype);
1774 DECL_EXTERNAL (block_move_fn) = 1;
1775 TREE_PUBLIC (block_move_fn) = 1;
1776 DECL_ARTIFICIAL (block_move_fn) = 1;
1777 TREE_NOTHROW (block_move_fn) = 1;
1778 make_decl_rtl (block_move_fn, NULL);
1779 assemble_external (block_move_fn);
1782 /* We need to make an argument list for the function call.
1784 memcpy has three arguments, the first two are void * addresses and
1785 the last is a size_t byte count for the copy. */
1786 arg_list
1787 = build_tree_list (NULL_TREE,
1788 make_tree (build_pointer_type (void_type_node), x));
1789 TREE_CHAIN (arg_list)
1790 = build_tree_list (NULL_TREE,
1791 make_tree (build_pointer_type (void_type_node), y));
1792 TREE_CHAIN (TREE_CHAIN (arg_list))
1793 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1794 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1796 /* Now we have to build up the CALL_EXPR itself. */
1797 call_expr = build1 (ADDR_EXPR,
1798 build_pointer_type (TREE_TYPE (block_move_fn)),
1799 block_move_fn);
1800 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (block_move_fn)),
1801 call_expr, arg_list, NULL_TREE);
1802 TREE_SIDE_EFFECTS (call_expr) = 1;
1804 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1805 #else
1806 emit_library_call (bcopy_libfunc, LCT_NORMAL,
1807 VOIDmode, 3, y, Pmode, x, Pmode,
1808 convert_to_mode (TYPE_MODE (integer_type_node), size,
1809 TREE_UNSIGNED (integer_type_node)),
1810 TYPE_MODE (integer_type_node));
1811 #endif
1813 /* If we are initializing a readonly value, show the above call
1814 clobbered it. Otherwise, a load from it may erroneously be hoisted
1815 from a loop. */
1816 if (RTX_UNCHANGING_P (x))
1817 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
1820 return retval;
1823 /* Copy all or part of a value X into registers starting at REGNO.
1824 The number of registers to be filled is NREGS. */
1826 void
1827 move_block_to_reg (regno, x, nregs, mode)
1828 int regno;
1829 rtx x;
1830 int nregs;
1831 enum machine_mode mode;
1833 int i;
1834 #ifdef HAVE_load_multiple
1835 rtx pat;
1836 rtx last;
1837 #endif
1839 if (nregs == 0)
1840 return;
1842 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1843 x = validize_mem (force_const_mem (mode, x));
1845 /* See if the machine can do this with a load multiple insn. */
1846 #ifdef HAVE_load_multiple
1847 if (HAVE_load_multiple)
1849 last = get_last_insn ();
1850 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1851 GEN_INT (nregs));
1852 if (pat)
1854 emit_insn (pat);
1855 return;
1857 else
1858 delete_insns_since (last);
1860 #endif
1862 for (i = 0; i < nregs; i++)
1863 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1864 operand_subword_force (x, i, mode));
1867 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1868 The number of registers to be filled is NREGS. SIZE indicates the number
1869 of bytes in the object X. */
1871 void
1872 move_block_from_reg (regno, x, nregs, size)
1873 int regno;
1874 rtx x;
1875 int nregs;
1876 int size;
1878 int i;
1879 #ifdef HAVE_store_multiple
1880 rtx pat;
1881 rtx last;
1882 #endif
1883 enum machine_mode mode;
1885 if (nregs == 0)
1886 return;
1888 /* If SIZE is that of a mode no bigger than a word, just use that
1889 mode's store operation. */
1890 if (size <= UNITS_PER_WORD
1891 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode
1892 && !FUNCTION_ARG_REG_LITTLE_ENDIAN)
1894 emit_move_insn (adjust_address (x, mode, 0), gen_rtx_REG (mode, regno));
1895 return;
1898 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1899 to the left before storing to memory. Note that the previous test
1900 doesn't handle all cases (e.g. SIZE == 3). */
1901 if (size < UNITS_PER_WORD
1902 && BYTES_BIG_ENDIAN
1903 && !FUNCTION_ARG_REG_LITTLE_ENDIAN)
1905 rtx tem = operand_subword (x, 0, 1, BLKmode);
1906 rtx shift;
1908 if (tem == 0)
1909 abort ();
1911 shift = expand_shift (LSHIFT_EXPR, word_mode,
1912 gen_rtx_REG (word_mode, regno),
1913 build_int_2 ((UNITS_PER_WORD - size)
1914 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1915 emit_move_insn (tem, shift);
1916 return;
1919 /* See if the machine can do this with a store multiple insn. */
1920 #ifdef HAVE_store_multiple
1921 if (HAVE_store_multiple)
1923 last = get_last_insn ();
1924 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1925 GEN_INT (nregs));
1926 if (pat)
1928 emit_insn (pat);
1929 return;
1931 else
1932 delete_insns_since (last);
1934 #endif
1936 for (i = 0; i < nregs; i++)
1938 rtx tem = operand_subword (x, i, 1, BLKmode);
1940 if (tem == 0)
1941 abort ();
1943 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1947 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1948 registers represented by a PARALLEL. SSIZE represents the total size of
1949 block SRC in bytes, or -1 if not known. */
1950 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatant assumption that
1951 the balance will be in what would be the low-order memory addresses, i.e.
1952 left justified for big endian, right justified for little endian. This
1953 happens to be true for the targets currently using this support. If this
1954 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1955 would be needed. */
1957 void
1958 emit_group_load (dst, orig_src, ssize)
1959 rtx dst, orig_src;
1960 int ssize;
1962 rtx *tmps, src;
1963 int start, i;
1965 if (GET_CODE (dst) != PARALLEL)
1966 abort ();
1968 /* Check for a NULL entry, used to indicate that the parameter goes
1969 both on the stack and in registers. */
1970 if (XEXP (XVECEXP (dst, 0, 0), 0))
1971 start = 0;
1972 else
1973 start = 1;
1975 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
1977 /* Process the pieces. */
1978 for (i = start; i < XVECLEN (dst, 0); i++)
1980 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1981 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1982 unsigned int bytelen = GET_MODE_SIZE (mode);
1983 int shift = 0;
1985 /* Handle trailing fragments that run over the size of the struct. */
1986 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1988 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1989 bytelen = ssize - bytepos;
1990 if (bytelen <= 0)
1991 abort ();
1994 /* If we won't be loading directly from memory, protect the real source
1995 from strange tricks we might play; but make sure that the source can
1996 be loaded directly into the destination. */
1997 src = orig_src;
1998 if (GET_CODE (orig_src) != MEM
1999 && (!CONSTANT_P (orig_src)
2000 || (GET_MODE (orig_src) != mode
2001 && GET_MODE (orig_src) != VOIDmode)))
2003 if (GET_MODE (orig_src) == VOIDmode)
2004 src = gen_reg_rtx (mode);
2005 else
2006 src = gen_reg_rtx (GET_MODE (orig_src));
2008 emit_move_insn (src, orig_src);
2011 /* Optimize the access just a bit. */
2012 if (GET_CODE (src) == MEM
2013 && MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode)
2014 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2015 && bytelen == GET_MODE_SIZE (mode))
2017 tmps[i] = gen_reg_rtx (mode);
2018 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2020 else if (GET_CODE (src) == CONCAT)
2022 if ((bytepos == 0
2023 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
2024 || (bytepos == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
2025 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1)))))
2027 tmps[i] = XEXP (src, bytepos != 0);
2028 if (! CONSTANT_P (tmps[i])
2029 && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
2030 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
2031 0, 1, NULL_RTX, mode, mode, ssize);
2033 else if (bytepos == 0)
2035 rtx mem = assign_stack_temp (GET_MODE (src),
2036 GET_MODE_SIZE (GET_MODE (src)), 0);
2037 emit_move_insn (mem, src);
2038 tmps[i] = adjust_address (mem, mode, 0);
2040 else
2041 abort ();
2043 else if (CONSTANT_P (src)
2044 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2045 tmps[i] = src;
2046 else
2047 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2048 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2049 mode, mode, ssize);
2051 if (BYTES_BIG_ENDIAN && shift)
2052 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2053 tmps[i], 0, OPTAB_WIDEN);
2056 emit_queue ();
2058 /* Copy the extracted pieces into the proper (probable) hard regs. */
2059 for (i = start; i < XVECLEN (dst, 0); i++)
2060 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2063 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2064 registers represented by a PARALLEL. SSIZE represents the total size of
2065 block DST, or -1 if not known. */
2067 void
2068 emit_group_store (orig_dst, src, ssize)
2069 rtx orig_dst, src;
2070 int ssize;
2072 rtx *tmps, dst;
2073 int start, i;
2075 if (GET_CODE (src) != PARALLEL)
2076 abort ();
2078 /* Check for a NULL entry, used to indicate that the parameter goes
2079 both on the stack and in registers. */
2080 if (XEXP (XVECEXP (src, 0, 0), 0))
2081 start = 0;
2082 else
2083 start = 1;
2085 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2087 /* Copy the (probable) hard regs into pseudos. */
2088 for (i = start; i < XVECLEN (src, 0); i++)
2090 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2091 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2092 emit_move_insn (tmps[i], reg);
2094 emit_queue ();
2096 /* If we won't be storing directly into memory, protect the real destination
2097 from strange tricks we might play. */
2098 dst = orig_dst;
2099 if (GET_CODE (dst) == PARALLEL)
2101 rtx temp;
2103 /* We can get a PARALLEL dst if there is a conditional expression in
2104 a return statement. In that case, the dst and src are the same,
2105 so no action is necessary. */
2106 if (rtx_equal_p (dst, src))
2107 return;
2109 /* It is unclear if we can ever reach here, but we may as well handle
2110 it. Allocate a temporary, and split this into a store/load to/from
2111 the temporary. */
2113 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2114 emit_group_store (temp, src, ssize);
2115 emit_group_load (dst, temp, ssize);
2116 return;
2118 else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
2120 dst = gen_reg_rtx (GET_MODE (orig_dst));
2121 /* Make life a bit easier for combine. */
2122 emit_move_insn (dst, const0_rtx);
2125 /* Process the pieces. */
2126 for (i = start; i < XVECLEN (src, 0); i++)
2128 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2129 enum machine_mode mode = GET_MODE (tmps[i]);
2130 unsigned int bytelen = GET_MODE_SIZE (mode);
2131 rtx dest = dst;
2133 /* Handle trailing fragments that run over the size of the struct. */
2134 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2136 if (BYTES_BIG_ENDIAN)
2138 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2139 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2140 tmps[i], 0, OPTAB_WIDEN);
2142 bytelen = ssize - bytepos;
2145 if (GET_CODE (dst) == CONCAT)
2147 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2148 dest = XEXP (dst, 0);
2149 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2151 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2152 dest = XEXP (dst, 1);
2154 else
2155 abort ();
2158 /* Optimize the access just a bit. */
2159 if (GET_CODE (dest) == MEM
2160 && MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode)
2161 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2162 && bytelen == GET_MODE_SIZE (mode))
2163 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2164 else
2165 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2166 mode, tmps[i], ssize);
2169 emit_queue ();
2171 /* Copy from the pseudo into the (probable) hard reg. */
2172 if (GET_CODE (dst) == REG)
2173 emit_move_insn (orig_dst, dst);
2176 /* Generate code to copy a BLKmode object of TYPE out of a
2177 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2178 is null, a stack temporary is created. TGTBLK is returned.
2180 The primary purpose of this routine is to handle functions
2181 that return BLKmode structures in registers. Some machines
2182 (the PA for example) want to return all small structures
2183 in registers regardless of the structure's alignment. */
2186 copy_blkmode_from_reg (tgtblk, srcreg, type)
2187 rtx tgtblk;
2188 rtx srcreg;
2189 tree type;
2191 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2192 rtx src = NULL, dst = NULL;
2193 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2194 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2196 if (tgtblk == 0)
2198 tgtblk = assign_temp (build_qualified_type (type,
2199 (TYPE_QUALS (type)
2200 | TYPE_QUAL_CONST)),
2201 0, 1, 1);
2202 preserve_temp_slots (tgtblk);
2205 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2206 into a new pseudo which is a full word.
2208 If FUNCTION_ARG_REG_LITTLE_ENDIAN is set and convert_to_mode does a copy,
2209 the wrong part of the register gets copied so we fake a type conversion
2210 in place. */
2211 if (GET_MODE (srcreg) != BLKmode
2212 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2214 if (FUNCTION_ARG_REG_LITTLE_ENDIAN)
2215 srcreg = simplify_gen_subreg (word_mode, srcreg, GET_MODE (srcreg), 0);
2216 else
2217 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2220 /* Structures whose size is not a multiple of a word are aligned
2221 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2222 machine, this means we must skip the empty high order bytes when
2223 calculating the bit offset. */
2224 if (BYTES_BIG_ENDIAN
2225 && !FUNCTION_ARG_REG_LITTLE_ENDIAN
2226 && bytes % UNITS_PER_WORD)
2227 big_endian_correction
2228 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2230 /* Copy the structure BITSIZE bites at a time.
2232 We could probably emit more efficient code for machines which do not use
2233 strict alignment, but it doesn't seem worth the effort at the current
2234 time. */
2235 for (bitpos = 0, xbitpos = big_endian_correction;
2236 bitpos < bytes * BITS_PER_UNIT;
2237 bitpos += bitsize, xbitpos += bitsize)
2239 /* We need a new source operand each time xbitpos is on a
2240 word boundary and when xbitpos == big_endian_correction
2241 (the first time through). */
2242 if (xbitpos % BITS_PER_WORD == 0
2243 || xbitpos == big_endian_correction)
2244 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2245 GET_MODE (srcreg));
2247 /* We need a new destination operand each time bitpos is on
2248 a word boundary. */
2249 if (bitpos % BITS_PER_WORD == 0)
2250 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2252 /* Use xbitpos for the source extraction (right justified) and
2253 xbitpos for the destination store (left justified). */
2254 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2255 extract_bit_field (src, bitsize,
2256 xbitpos % BITS_PER_WORD, 1,
2257 NULL_RTX, word_mode, word_mode,
2258 BITS_PER_WORD),
2259 BITS_PER_WORD);
2262 return tgtblk;
2265 /* Add a USE expression for REG to the (possibly empty) list pointed
2266 to by CALL_FUSAGE. REG must denote a hard register. */
2268 void
2269 use_reg (call_fusage, reg)
2270 rtx *call_fusage, reg;
2272 if (GET_CODE (reg) != REG
2273 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2274 abort ();
2276 *call_fusage
2277 = gen_rtx_EXPR_LIST (VOIDmode,
2278 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2281 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2282 starting at REGNO. All of these registers must be hard registers. */
2284 void
2285 use_regs (call_fusage, regno, nregs)
2286 rtx *call_fusage;
2287 int regno;
2288 int nregs;
2290 int i;
2292 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2293 abort ();
2295 for (i = 0; i < nregs; i++)
2296 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2299 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2300 PARALLEL REGS. This is for calls that pass values in multiple
2301 non-contiguous locations. The Irix 6 ABI has examples of this. */
2303 void
2304 use_group_regs (call_fusage, regs)
2305 rtx *call_fusage;
2306 rtx regs;
2308 int i;
2310 for (i = 0; i < XVECLEN (regs, 0); i++)
2312 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2314 /* A NULL entry means the parameter goes both on the stack and in
2315 registers. This can also be a MEM for targets that pass values
2316 partially on the stack and partially in registers. */
2317 if (reg != 0 && GET_CODE (reg) == REG)
2318 use_reg (call_fusage, reg);
2323 /* Determine whether the LEN bytes generated by CONSTFUN can be
2324 stored to memory using several move instructions. CONSTFUNDATA is
2325 a pointer which will be passed as argument in every CONSTFUN call.
2326 ALIGN is maximum alignment we can assume. Return nonzero if a
2327 call to store_by_pieces should succeed. */
2330 can_store_by_pieces (len, constfun, constfundata, align)
2331 unsigned HOST_WIDE_INT len;
2332 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2333 PTR constfundata;
2334 unsigned int align;
2336 unsigned HOST_WIDE_INT max_size, l;
2337 HOST_WIDE_INT offset = 0;
2338 enum machine_mode mode, tmode;
2339 enum insn_code icode;
2340 int reverse;
2341 rtx cst;
2343 if (! MOVE_BY_PIECES_P (len, align))
2344 return 0;
2346 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2347 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2348 align = MOVE_MAX * BITS_PER_UNIT;
2350 /* We would first store what we can in the largest integer mode, then go to
2351 successively smaller modes. */
2353 for (reverse = 0;
2354 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2355 reverse++)
2357 l = len;
2358 mode = VOIDmode;
2359 max_size = STORE_MAX_PIECES + 1;
2360 while (max_size > 1)
2362 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2363 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2364 if (GET_MODE_SIZE (tmode) < max_size)
2365 mode = tmode;
2367 if (mode == VOIDmode)
2368 break;
2370 icode = mov_optab->handlers[(int) mode].insn_code;
2371 if (icode != CODE_FOR_nothing
2372 && align >= GET_MODE_ALIGNMENT (mode))
2374 unsigned int size = GET_MODE_SIZE (mode);
2376 while (l >= size)
2378 if (reverse)
2379 offset -= size;
2381 cst = (*constfun) (constfundata, offset, mode);
2382 if (!LEGITIMATE_CONSTANT_P (cst))
2383 return 0;
2385 if (!reverse)
2386 offset += size;
2388 l -= size;
2392 max_size = GET_MODE_SIZE (mode);
2395 /* The code above should have handled everything. */
2396 if (l != 0)
2397 abort ();
2400 return 1;
2403 /* Generate several move instructions to store LEN bytes generated by
2404 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2405 pointer which will be passed as argument in every CONSTFUN call.
2406 ALIGN is maximum alignment we can assume. */
2408 void
2409 store_by_pieces (to, len, constfun, constfundata, align)
2410 rtx to;
2411 unsigned HOST_WIDE_INT len;
2412 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2413 PTR constfundata;
2414 unsigned int align;
2416 struct store_by_pieces data;
2418 if (! MOVE_BY_PIECES_P (len, align))
2419 abort ();
2420 to = protect_from_queue (to, 1);
2421 data.constfun = constfun;
2422 data.constfundata = constfundata;
2423 data.len = len;
2424 data.to = to;
2425 store_by_pieces_1 (&data, align);
2428 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2429 rtx with BLKmode). The caller must pass TO through protect_from_queue
2430 before calling. ALIGN is maximum alignment we can assume. */
2432 static void
2433 clear_by_pieces (to, len, align)
2434 rtx to;
2435 unsigned HOST_WIDE_INT len;
2436 unsigned int align;
2438 struct store_by_pieces data;
2440 data.constfun = clear_by_pieces_1;
2441 data.constfundata = NULL;
2442 data.len = len;
2443 data.to = to;
2444 store_by_pieces_1 (&data, align);
2447 /* Callback routine for clear_by_pieces.
2448 Return const0_rtx unconditionally. */
2450 static rtx
2451 clear_by_pieces_1 (data, offset, mode)
2452 PTR data ATTRIBUTE_UNUSED;
2453 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2454 enum machine_mode mode ATTRIBUTE_UNUSED;
2456 return const0_rtx;
2459 /* Subroutine of clear_by_pieces and store_by_pieces.
2460 Generate several move instructions to store LEN bytes of block TO. (A MEM
2461 rtx with BLKmode). The caller must pass TO through protect_from_queue
2462 before calling. ALIGN is maximum alignment we can assume. */
2464 static void
2465 store_by_pieces_1 (data, align)
2466 struct store_by_pieces *data;
2467 unsigned int align;
2469 rtx to_addr = XEXP (data->to, 0);
2470 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
2471 enum machine_mode mode = VOIDmode, tmode;
2472 enum insn_code icode;
2474 data->offset = 0;
2475 data->to_addr = to_addr;
2476 data->autinc_to
2477 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2478 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2480 data->explicit_inc_to = 0;
2481 data->reverse
2482 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2483 if (data->reverse)
2484 data->offset = data->len;
2486 /* If storing requires more than two move insns,
2487 copy addresses to registers (to make displacements shorter)
2488 and use post-increment if available. */
2489 if (!data->autinc_to
2490 && move_by_pieces_ninsns (data->len, align) > 2)
2492 /* Determine the main mode we'll be using. */
2493 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2494 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2495 if (GET_MODE_SIZE (tmode) < max_size)
2496 mode = tmode;
2498 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2500 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2501 data->autinc_to = 1;
2502 data->explicit_inc_to = -1;
2505 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2506 && ! data->autinc_to)
2508 data->to_addr = copy_addr_to_reg (to_addr);
2509 data->autinc_to = 1;
2510 data->explicit_inc_to = 1;
2513 if ( !data->autinc_to && CONSTANT_P (to_addr))
2514 data->to_addr = copy_addr_to_reg (to_addr);
2517 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2518 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2519 align = MOVE_MAX * BITS_PER_UNIT;
2521 /* First store what we can in the largest integer mode, then go to
2522 successively smaller modes. */
2524 while (max_size > 1)
2526 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2527 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2528 if (GET_MODE_SIZE (tmode) < max_size)
2529 mode = tmode;
2531 if (mode == VOIDmode)
2532 break;
2534 icode = mov_optab->handlers[(int) mode].insn_code;
2535 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2536 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2538 max_size = GET_MODE_SIZE (mode);
2541 /* The code above should have handled everything. */
2542 if (data->len != 0)
2543 abort ();
2546 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2547 with move instructions for mode MODE. GENFUN is the gen_... function
2548 to make a move insn for that mode. DATA has all the other info. */
2550 static void
2551 store_by_pieces_2 (genfun, mode, data)
2552 rtx (*genfun) PARAMS ((rtx, ...));
2553 enum machine_mode mode;
2554 struct store_by_pieces *data;
2556 unsigned int size = GET_MODE_SIZE (mode);
2557 rtx to1, cst;
2559 while (data->len >= size)
2561 if (data->reverse)
2562 data->offset -= size;
2564 if (data->autinc_to)
2565 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2566 data->offset);
2567 else
2568 to1 = adjust_address (data->to, mode, data->offset);
2570 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2571 emit_insn (gen_add2_insn (data->to_addr,
2572 GEN_INT (-(HOST_WIDE_INT) size)));
2574 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2575 emit_insn ((*genfun) (to1, cst));
2577 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2578 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2580 if (! data->reverse)
2581 data->offset += size;
2583 data->len -= size;
2587 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2588 its length in bytes. */
2590 static GTY(()) tree block_clear_fn;
2592 clear_storage (object, size)
2593 rtx object;
2594 rtx size;
2596 #ifdef TARGET_MEM_FUNCTIONS
2597 tree call_expr, arg_list;
2598 #endif
2599 rtx retval = 0;
2600 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2601 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2603 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2604 just move a zero. Otherwise, do this a piece at a time. */
2605 if (GET_MODE (object) != BLKmode
2606 && GET_CODE (size) == CONST_INT
2607 && GET_MODE_SIZE (GET_MODE (object)) == (unsigned int) INTVAL (size))
2608 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2609 else
2611 object = protect_from_queue (object, 1);
2612 size = protect_from_queue (size, 0);
2614 if (GET_CODE (size) == CONST_INT
2615 && MOVE_BY_PIECES_P (INTVAL (size), align))
2616 clear_by_pieces (object, INTVAL (size), align);
2617 else
2619 /* Try the most limited insn first, because there's no point
2620 including more than one in the machine description unless
2621 the more limited one has some advantage. */
2623 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2624 enum machine_mode mode;
2626 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2627 mode = GET_MODE_WIDER_MODE (mode))
2629 enum insn_code code = clrstr_optab[(int) mode];
2630 insn_operand_predicate_fn pred;
2632 if (code != CODE_FOR_nothing
2633 /* We don't need MODE to be narrower than
2634 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2635 the mode mask, as it is returned by the macro, it will
2636 definitely be less than the actual mode mask. */
2637 && ((GET_CODE (size) == CONST_INT
2638 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2639 <= (GET_MODE_MASK (mode) >> 1)))
2640 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2641 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2642 || (*pred) (object, BLKmode))
2643 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2644 || (*pred) (opalign, VOIDmode)))
2646 rtx op1;
2647 rtx last = get_last_insn ();
2648 rtx pat;
2650 op1 = convert_to_mode (mode, size, 1);
2651 pred = insn_data[(int) code].operand[1].predicate;
2652 if (pred != 0 && ! (*pred) (op1, mode))
2653 op1 = copy_to_mode_reg (mode, op1);
2655 pat = GEN_FCN ((int) code) (object, op1, opalign);
2656 if (pat)
2658 emit_insn (pat);
2659 return 0;
2661 else
2662 delete_insns_since (last);
2666 /* OBJECT or SIZE may have been passed through protect_from_queue.
2668 It is unsafe to save the value generated by protect_from_queue
2669 and reuse it later. Consider what happens if emit_queue is
2670 called before the return value from protect_from_queue is used.
2672 Expansion of the CALL_EXPR below will call emit_queue before
2673 we are finished emitting RTL for argument setup. So if we are
2674 not careful we could get the wrong value for an argument.
2676 To avoid this problem we go ahead and emit code to copy OBJECT
2677 and SIZE into new pseudos. We can then place those new pseudos
2678 into an RTL_EXPR and use them later, even after a call to
2679 emit_queue.
2681 Note this is not strictly needed for library calls since they
2682 do not call emit_queue before loading their arguments. However,
2683 we may need to have library calls call emit_queue in the future
2684 since failing to do so could cause problems for targets which
2685 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2686 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2688 #ifdef TARGET_MEM_FUNCTIONS
2689 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2690 #else
2691 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2692 TREE_UNSIGNED (integer_type_node));
2693 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2694 #endif
2696 #ifdef TARGET_MEM_FUNCTIONS
2697 /* It is incorrect to use the libcall calling conventions to call
2698 memset in this context.
2700 This could be a user call to memset and the user may wish to
2701 examine the return value from memset.
2703 For targets where libcalls and normal calls have different
2704 conventions for returning pointers, we could end up generating
2705 incorrect code.
2707 So instead of using a libcall sequence we build up a suitable
2708 CALL_EXPR and expand the call in the normal fashion. */
2709 if (block_clear_fn == NULL_TREE)
2711 tree fntype;
2713 /* This was copied from except.c, I don't know if all this is
2714 necessary in this context or not. */
2715 block_clear_fn = get_identifier ("memset");
2716 fntype = build_pointer_type (void_type_node);
2717 fntype = build_function_type (fntype, NULL_TREE);
2718 block_clear_fn = build_decl (FUNCTION_DECL, block_clear_fn,
2719 fntype);
2720 DECL_EXTERNAL (block_clear_fn) = 1;
2721 TREE_PUBLIC (block_clear_fn) = 1;
2722 DECL_ARTIFICIAL (block_clear_fn) = 1;
2723 TREE_NOTHROW (block_clear_fn) = 1;
2724 make_decl_rtl (block_clear_fn, NULL);
2725 assemble_external (block_clear_fn);
2728 /* We need to make an argument list for the function call.
2730 memset has three arguments, the first is a void * addresses, the
2731 second an integer with the initialization value, the last is a
2732 size_t byte count for the copy. */
2733 arg_list
2734 = build_tree_list (NULL_TREE,
2735 make_tree (build_pointer_type (void_type_node),
2736 object));
2737 TREE_CHAIN (arg_list)
2738 = build_tree_list (NULL_TREE,
2739 make_tree (integer_type_node, const0_rtx));
2740 TREE_CHAIN (TREE_CHAIN (arg_list))
2741 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2742 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2744 /* Now we have to build up the CALL_EXPR itself. */
2745 call_expr = build1 (ADDR_EXPR,
2746 build_pointer_type (TREE_TYPE (block_clear_fn)),
2747 block_clear_fn);
2748 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (block_clear_fn)),
2749 call_expr, arg_list, NULL_TREE);
2750 TREE_SIDE_EFFECTS (call_expr) = 1;
2752 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2753 #else
2754 emit_library_call (bzero_libfunc, LCT_NORMAL,
2755 VOIDmode, 2, object, Pmode, size,
2756 TYPE_MODE (integer_type_node));
2757 #endif
2759 /* If we are initializing a readonly value, show the above call
2760 clobbered it. Otherwise, a load from it may erroneously be
2761 hoisted from a loop. */
2762 if (RTX_UNCHANGING_P (object))
2763 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
2767 return retval;
2770 /* Generate code to copy Y into X.
2771 Both Y and X must have the same mode, except that
2772 Y can be a constant with VOIDmode.
2773 This mode cannot be BLKmode; use emit_block_move for that.
2775 Return the last instruction emitted. */
2778 emit_move_insn (x, y)
2779 rtx x, y;
2781 enum machine_mode mode = GET_MODE (x);
2782 rtx y_cst = NULL_RTX;
2783 rtx last_insn;
2785 x = protect_from_queue (x, 1);
2786 y = protect_from_queue (y, 0);
2788 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2789 abort ();
2791 /* Never force constant_p_rtx to memory. */
2792 if (GET_CODE (y) == CONSTANT_P_RTX)
2794 else if (CONSTANT_P (y))
2796 if (optimize
2797 && FLOAT_MODE_P (GET_MODE (x))
2798 && (last_insn = compress_float_constant (x, y)))
2799 return last_insn;
2801 if (!LEGITIMATE_CONSTANT_P (y))
2803 y_cst = y;
2804 y = force_const_mem (mode, y);
2808 /* If X or Y are memory references, verify that their addresses are valid
2809 for the machine. */
2810 if (GET_CODE (x) == MEM
2811 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2812 && ! push_operand (x, GET_MODE (x)))
2813 || (flag_force_addr
2814 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2815 x = validize_mem (x);
2817 if (GET_CODE (y) == MEM
2818 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2819 || (flag_force_addr
2820 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2821 y = validize_mem (y);
2823 if (mode == BLKmode)
2824 abort ();
2826 last_insn = emit_move_insn_1 (x, y);
2828 if (y_cst && GET_CODE (x) == REG)
2829 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
2831 return last_insn;
2834 /* Low level part of emit_move_insn.
2835 Called just like emit_move_insn, but assumes X and Y
2836 are basically valid. */
2839 emit_move_insn_1 (x, y)
2840 rtx x, y;
2842 enum machine_mode mode = GET_MODE (x);
2843 enum machine_mode submode;
2844 enum mode_class class = GET_MODE_CLASS (mode);
2846 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2847 abort ();
2849 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2850 return
2851 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2853 /* Expand complex moves by moving real part and imag part, if possible. */
2854 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2855 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2856 * BITS_PER_UNIT),
2857 (class == MODE_COMPLEX_INT
2858 ? MODE_INT : MODE_FLOAT),
2860 && (mov_optab->handlers[(int) submode].insn_code
2861 != CODE_FOR_nothing))
2863 /* Don't split destination if it is a stack push. */
2864 int stack = push_operand (x, GET_MODE (x));
2866 #ifdef PUSH_ROUNDING
2867 /* In case we output to the stack, but the size is smaller machine can
2868 push exactly, we need to use move instructions. */
2869 if (stack
2870 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
2871 != GET_MODE_SIZE (submode)))
2873 rtx temp;
2874 HOST_WIDE_INT offset1, offset2;
2876 /* Do not use anti_adjust_stack, since we don't want to update
2877 stack_pointer_delta. */
2878 temp = expand_binop (Pmode,
2879 #ifdef STACK_GROWS_DOWNWARD
2880 sub_optab,
2881 #else
2882 add_optab,
2883 #endif
2884 stack_pointer_rtx,
2885 GEN_INT
2886 (PUSH_ROUNDING
2887 (GET_MODE_SIZE (GET_MODE (x)))),
2888 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2890 if (temp != stack_pointer_rtx)
2891 emit_move_insn (stack_pointer_rtx, temp);
2893 #ifdef STACK_GROWS_DOWNWARD
2894 offset1 = 0;
2895 offset2 = GET_MODE_SIZE (submode);
2896 #else
2897 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2898 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2899 + GET_MODE_SIZE (submode));
2900 #endif
2902 emit_move_insn (change_address (x, submode,
2903 gen_rtx_PLUS (Pmode,
2904 stack_pointer_rtx,
2905 GEN_INT (offset1))),
2906 gen_realpart (submode, y));
2907 emit_move_insn (change_address (x, submode,
2908 gen_rtx_PLUS (Pmode,
2909 stack_pointer_rtx,
2910 GEN_INT (offset2))),
2911 gen_imagpart (submode, y));
2913 else
2914 #endif
2915 /* If this is a stack, push the highpart first, so it
2916 will be in the argument order.
2918 In that case, change_address is used only to convert
2919 the mode, not to change the address. */
2920 if (stack)
2922 /* Note that the real part always precedes the imag part in memory
2923 regardless of machine's endianness. */
2924 #ifdef STACK_GROWS_DOWNWARD
2925 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2926 (gen_rtx_MEM (submode, XEXP (x, 0)),
2927 gen_imagpart (submode, y)));
2928 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2929 (gen_rtx_MEM (submode, XEXP (x, 0)),
2930 gen_realpart (submode, y)));
2931 #else
2932 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2933 (gen_rtx_MEM (submode, XEXP (x, 0)),
2934 gen_realpart (submode, y)));
2935 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2936 (gen_rtx_MEM (submode, XEXP (x, 0)),
2937 gen_imagpart (submode, y)));
2938 #endif
2940 else
2942 rtx realpart_x, realpart_y;
2943 rtx imagpart_x, imagpart_y;
2945 /* If this is a complex value with each part being smaller than a
2946 word, the usual calling sequence will likely pack the pieces into
2947 a single register. Unfortunately, SUBREG of hard registers only
2948 deals in terms of words, so we have a problem converting input
2949 arguments to the CONCAT of two registers that is used elsewhere
2950 for complex values. If this is before reload, we can copy it into
2951 memory and reload. FIXME, we should see about using extract and
2952 insert on integer registers, but complex short and complex char
2953 variables should be rarely used. */
2954 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2955 && (reload_in_progress | reload_completed) == 0)
2957 int packed_dest_p
2958 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2959 int packed_src_p
2960 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2962 if (packed_dest_p || packed_src_p)
2964 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2965 ? MODE_FLOAT : MODE_INT);
2967 enum machine_mode reg_mode
2968 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2970 if (reg_mode != BLKmode)
2972 rtx mem = assign_stack_temp (reg_mode,
2973 GET_MODE_SIZE (mode), 0);
2974 rtx cmem = adjust_address (mem, mode, 0);
2976 cfun->cannot_inline
2977 = N_("function using short complex types cannot be inline");
2979 if (packed_dest_p)
2981 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2983 emit_move_insn_1 (cmem, y);
2984 return emit_move_insn_1 (sreg, mem);
2986 else
2988 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2990 emit_move_insn_1 (mem, sreg);
2991 return emit_move_insn_1 (x, cmem);
2997 realpart_x = gen_realpart (submode, x);
2998 realpart_y = gen_realpart (submode, y);
2999 imagpart_x = gen_imagpart (submode, x);
3000 imagpart_y = gen_imagpart (submode, y);
3002 /* Show the output dies here. This is necessary for SUBREGs
3003 of pseudos since we cannot track their lifetimes correctly;
3004 hard regs shouldn't appear here except as return values.
3005 We never want to emit such a clobber after reload. */
3006 if (x != y
3007 && ! (reload_in_progress || reload_completed)
3008 && (GET_CODE (realpart_x) == SUBREG
3009 || GET_CODE (imagpart_x) == SUBREG))
3010 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3012 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3013 (realpart_x, realpart_y));
3014 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3015 (imagpart_x, imagpart_y));
3018 return get_last_insn ();
3021 /* This will handle any multi-word mode that lacks a move_insn pattern.
3022 However, you will get better code if you define such patterns,
3023 even if they must turn into multiple assembler instructions. */
3024 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
3026 rtx last_insn = 0;
3027 rtx seq, inner;
3028 int need_clobber;
3029 int i;
3031 #ifdef PUSH_ROUNDING
3033 /* If X is a push on the stack, do the push now and replace
3034 X with a reference to the stack pointer. */
3035 if (push_operand (x, GET_MODE (x)))
3037 rtx temp;
3038 enum rtx_code code;
3040 /* Do not use anti_adjust_stack, since we don't want to update
3041 stack_pointer_delta. */
3042 temp = expand_binop (Pmode,
3043 #ifdef STACK_GROWS_DOWNWARD
3044 sub_optab,
3045 #else
3046 add_optab,
3047 #endif
3048 stack_pointer_rtx,
3049 GEN_INT
3050 (PUSH_ROUNDING
3051 (GET_MODE_SIZE (GET_MODE (x)))),
3052 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3054 if (temp != stack_pointer_rtx)
3055 emit_move_insn (stack_pointer_rtx, temp);
3057 code = GET_CODE (XEXP (x, 0));
3059 /* Just hope that small offsets off SP are OK. */
3060 if (code == POST_INC)
3061 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3062 GEN_INT (-((HOST_WIDE_INT)
3063 GET_MODE_SIZE (GET_MODE (x)))));
3064 else if (code == POST_DEC)
3065 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3066 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3067 else
3068 temp = stack_pointer_rtx;
3070 x = change_address (x, VOIDmode, temp);
3072 #endif
3074 /* If we are in reload, see if either operand is a MEM whose address
3075 is scheduled for replacement. */
3076 if (reload_in_progress && GET_CODE (x) == MEM
3077 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3078 x = replace_equiv_address_nv (x, inner);
3079 if (reload_in_progress && GET_CODE (y) == MEM
3080 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3081 y = replace_equiv_address_nv (y, inner);
3083 start_sequence ();
3085 need_clobber = 0;
3086 for (i = 0;
3087 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3088 i++)
3090 rtx xpart = operand_subword (x, i, 1, mode);
3091 rtx ypart = operand_subword (y, i, 1, mode);
3093 /* If we can't get a part of Y, put Y into memory if it is a
3094 constant. Otherwise, force it into a register. If we still
3095 can't get a part of Y, abort. */
3096 if (ypart == 0 && CONSTANT_P (y))
3098 y = force_const_mem (mode, y);
3099 ypart = operand_subword (y, i, 1, mode);
3101 else if (ypart == 0)
3102 ypart = operand_subword_force (y, i, mode);
3104 if (xpart == 0 || ypart == 0)
3105 abort ();
3107 need_clobber |= (GET_CODE (xpart) == SUBREG);
3109 last_insn = emit_move_insn (xpart, ypart);
3112 seq = gen_sequence ();
3113 end_sequence ();
3115 /* Show the output dies here. This is necessary for SUBREGs
3116 of pseudos since we cannot track their lifetimes correctly;
3117 hard regs shouldn't appear here except as return values.
3118 We never want to emit such a clobber after reload. */
3119 if (x != y
3120 && ! (reload_in_progress || reload_completed)
3121 && need_clobber != 0)
3122 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3124 emit_insn (seq);
3126 return last_insn;
3128 else
3129 abort ();
3132 /* If Y is representable exactly in a narrower mode, and the target can
3133 perform the extension directly from constant or memory, then emit the
3134 move as an extension. */
3136 static rtx
3137 compress_float_constant (x, y)
3138 rtx x, y;
3140 enum machine_mode dstmode = GET_MODE (x);
3141 enum machine_mode orig_srcmode = GET_MODE (y);
3142 enum machine_mode srcmode;
3143 REAL_VALUE_TYPE r;
3145 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3147 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3148 srcmode != orig_srcmode;
3149 srcmode = GET_MODE_WIDER_MODE (srcmode))
3151 enum insn_code ic;
3152 rtx trunc_y, last_insn;
3154 /* Skip if the target can't extend this way. */
3155 ic = can_extend_p (dstmode, srcmode, 0);
3156 if (ic == CODE_FOR_nothing)
3157 continue;
3159 /* Skip if the narrowed value isn't exact. */
3160 if (! exact_real_truncate (srcmode, &r))
3161 continue;
3163 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3165 if (LEGITIMATE_CONSTANT_P (trunc_y))
3167 /* Skip if the target needs extra instructions to perform
3168 the extension. */
3169 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3170 continue;
3172 else if (float_extend_from_mem[dstmode][srcmode])
3173 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3174 else
3175 continue;
3177 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3178 last_insn = get_last_insn ();
3180 if (GET_CODE (x) == REG)
3181 REG_NOTES (last_insn)
3182 = gen_rtx_EXPR_LIST (REG_EQUAL, y, REG_NOTES (last_insn));
3184 return last_insn;
3187 return NULL_RTX;
3190 /* Pushing data onto the stack. */
3192 /* Push a block of length SIZE (perhaps variable)
3193 and return an rtx to address the beginning of the block.
3194 Note that it is not possible for the value returned to be a QUEUED.
3195 The value may be virtual_outgoing_args_rtx.
3197 EXTRA is the number of bytes of padding to push in addition to SIZE.
3198 BELOW nonzero means this padding comes at low addresses;
3199 otherwise, the padding comes at high addresses. */
3202 push_block (size, extra, below)
3203 rtx size;
3204 int extra, below;
3206 rtx temp;
3208 size = convert_modes (Pmode, ptr_mode, size, 1);
3209 if (CONSTANT_P (size))
3210 anti_adjust_stack (plus_constant (size, extra));
3211 else if (GET_CODE (size) == REG && extra == 0)
3212 anti_adjust_stack (size);
3213 else
3215 temp = copy_to_mode_reg (Pmode, size);
3216 if (extra != 0)
3217 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3218 temp, 0, OPTAB_LIB_WIDEN);
3219 anti_adjust_stack (temp);
3222 #ifndef STACK_GROWS_DOWNWARD
3223 if (0)
3224 #else
3225 if (1)
3226 #endif
3228 temp = virtual_outgoing_args_rtx;
3229 if (extra != 0 && below)
3230 temp = plus_constant (temp, extra);
3232 else
3234 if (GET_CODE (size) == CONST_INT)
3235 temp = plus_constant (virtual_outgoing_args_rtx,
3236 -INTVAL (size) - (below ? 0 : extra));
3237 else if (extra != 0 && !below)
3238 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3239 negate_rtx (Pmode, plus_constant (size, extra)));
3240 else
3241 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3242 negate_rtx (Pmode, size));
3245 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3248 #ifdef PUSH_ROUNDING
3250 /* Emit single push insn. */
3252 static void
3253 emit_single_push_insn (mode, x, type)
3254 rtx x;
3255 enum machine_mode mode;
3256 tree type;
3258 rtx dest_addr;
3259 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3260 rtx dest;
3261 enum insn_code icode;
3262 insn_operand_predicate_fn pred;
3264 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3265 /* If there is push pattern, use it. Otherwise try old way of throwing
3266 MEM representing push operation to move expander. */
3267 icode = push_optab->handlers[(int) mode].insn_code;
3268 if (icode != CODE_FOR_nothing)
3270 if (((pred = insn_data[(int) icode].operand[0].predicate)
3271 && !((*pred) (x, mode))))
3272 x = force_reg (mode, x);
3273 emit_insn (GEN_FCN (icode) (x));
3274 return;
3276 if (GET_MODE_SIZE (mode) == rounded_size)
3277 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3278 else
3280 #ifdef STACK_GROWS_DOWNWARD
3281 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3282 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3283 #else
3284 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3285 GEN_INT (rounded_size));
3286 #endif
3287 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3290 dest = gen_rtx_MEM (mode, dest_addr);
3292 if (type != 0)
3294 set_mem_attributes (dest, type, 1);
3296 if (flag_optimize_sibling_calls)
3297 /* Function incoming arguments may overlap with sibling call
3298 outgoing arguments and we cannot allow reordering of reads
3299 from function arguments with stores to outgoing arguments
3300 of sibling calls. */
3301 set_mem_alias_set (dest, 0);
3303 emit_move_insn (dest, x);
3305 #endif
3307 /* Generate code to push X onto the stack, assuming it has mode MODE and
3308 type TYPE.
3309 MODE is redundant except when X is a CONST_INT (since they don't
3310 carry mode info).
3311 SIZE is an rtx for the size of data to be copied (in bytes),
3312 needed only if X is BLKmode.
3314 ALIGN (in bits) is maximum alignment we can assume.
3316 If PARTIAL and REG are both nonzero, then copy that many of the first
3317 words of X into registers starting with REG, and push the rest of X.
3318 The amount of space pushed is decreased by PARTIAL words,
3319 rounded *down* to a multiple of PARM_BOUNDARY.
3320 REG must be a hard register in this case.
3321 If REG is zero but PARTIAL is not, take any all others actions for an
3322 argument partially in registers, but do not actually load any
3323 registers.
3325 EXTRA is the amount in bytes of extra space to leave next to this arg.
3326 This is ignored if an argument block has already been allocated.
3328 On a machine that lacks real push insns, ARGS_ADDR is the address of
3329 the bottom of the argument block for this call. We use indexing off there
3330 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3331 argument block has not been preallocated.
3333 ARGS_SO_FAR is the size of args previously pushed for this call.
3335 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3336 for arguments passed in registers. If nonzero, it will be the number
3337 of bytes required. */
3339 void
3340 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3341 args_addr, args_so_far, reg_parm_stack_space,
3342 alignment_pad)
3343 rtx x;
3344 enum machine_mode mode;
3345 tree type;
3346 rtx size;
3347 unsigned int align;
3348 int partial;
3349 rtx reg;
3350 int extra;
3351 rtx args_addr;
3352 rtx args_so_far;
3353 int reg_parm_stack_space;
3354 rtx alignment_pad;
3356 rtx xinner;
3357 enum direction stack_direction
3358 #ifdef STACK_GROWS_DOWNWARD
3359 = downward;
3360 #else
3361 = upward;
3362 #endif
3364 /* Decide where to pad the argument: `downward' for below,
3365 `upward' for above, or `none' for don't pad it.
3366 Default is below for small data on big-endian machines; else above. */
3367 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3369 /* Invert direction if stack is post-decrement.
3370 FIXME: why? */
3371 if (STACK_PUSH_CODE == POST_DEC)
3372 if (where_pad != none)
3373 where_pad = (where_pad == downward ? upward : downward);
3375 xinner = x = protect_from_queue (x, 0);
3377 if (mode == BLKmode)
3379 /* Copy a block into the stack, entirely or partially. */
3381 rtx temp;
3382 int used = partial * UNITS_PER_WORD;
3383 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3384 int skip;
3386 if (size == 0)
3387 abort ();
3389 used -= offset;
3391 /* USED is now the # of bytes we need not copy to the stack
3392 because registers will take care of them. */
3394 if (partial != 0)
3395 xinner = adjust_address (xinner, BLKmode, used);
3397 /* If the partial register-part of the arg counts in its stack size,
3398 skip the part of stack space corresponding to the registers.
3399 Otherwise, start copying to the beginning of the stack space,
3400 by setting SKIP to 0. */
3401 skip = (reg_parm_stack_space == 0) ? 0 : used;
3403 #ifdef PUSH_ROUNDING
3404 /* Do it with several push insns if that doesn't take lots of insns
3405 and if there is no difficulty with push insns that skip bytes
3406 on the stack for alignment purposes. */
3407 if (args_addr == 0
3408 && PUSH_ARGS
3409 && GET_CODE (size) == CONST_INT
3410 && skip == 0
3411 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3412 /* Here we avoid the case of a structure whose weak alignment
3413 forces many pushes of a small amount of data,
3414 and such small pushes do rounding that causes trouble. */
3415 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3416 || align >= BIGGEST_ALIGNMENT
3417 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3418 == (align / BITS_PER_UNIT)))
3419 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3421 /* Push padding now if padding above and stack grows down,
3422 or if padding below and stack grows up.
3423 But if space already allocated, this has already been done. */
3424 if (extra && args_addr == 0
3425 && where_pad != none && where_pad != stack_direction)
3426 anti_adjust_stack (GEN_INT (extra));
3428 move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
3430 else
3431 #endif /* PUSH_ROUNDING */
3433 rtx target;
3435 /* Otherwise make space on the stack and copy the data
3436 to the address of that space. */
3438 /* Deduct words put into registers from the size we must copy. */
3439 if (partial != 0)
3441 if (GET_CODE (size) == CONST_INT)
3442 size = GEN_INT (INTVAL (size) - used);
3443 else
3444 size = expand_binop (GET_MODE (size), sub_optab, size,
3445 GEN_INT (used), NULL_RTX, 0,
3446 OPTAB_LIB_WIDEN);
3449 /* Get the address of the stack space.
3450 In this case, we do not deal with EXTRA separately.
3451 A single stack adjust will do. */
3452 if (! args_addr)
3454 temp = push_block (size, extra, where_pad == downward);
3455 extra = 0;
3457 else if (GET_CODE (args_so_far) == CONST_INT)
3458 temp = memory_address (BLKmode,
3459 plus_constant (args_addr,
3460 skip + INTVAL (args_so_far)));
3461 else
3462 temp = memory_address (BLKmode,
3463 plus_constant (gen_rtx_PLUS (Pmode,
3464 args_addr,
3465 args_so_far),
3466 skip));
3467 target = gen_rtx_MEM (BLKmode, temp);
3469 if (type != 0)
3471 set_mem_attributes (target, type, 1);
3472 /* Function incoming arguments may overlap with sibling call
3473 outgoing arguments and we cannot allow reordering of reads
3474 from function arguments with stores to outgoing arguments
3475 of sibling calls. */
3476 set_mem_alias_set (target, 0);
3478 else
3479 set_mem_align (target, align);
3481 /* TEMP is the address of the block. Copy the data there. */
3482 if (GET_CODE (size) == CONST_INT
3483 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3485 move_by_pieces (target, xinner, INTVAL (size), align);
3486 goto ret;
3488 else
3490 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3491 enum machine_mode mode;
3493 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3494 mode != VOIDmode;
3495 mode = GET_MODE_WIDER_MODE (mode))
3497 enum insn_code code = movstr_optab[(int) mode];
3498 insn_operand_predicate_fn pred;
3500 if (code != CODE_FOR_nothing
3501 && ((GET_CODE (size) == CONST_INT
3502 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3503 <= (GET_MODE_MASK (mode) >> 1)))
3504 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3505 && (!(pred = insn_data[(int) code].operand[0].predicate)
3506 || ((*pred) (target, BLKmode)))
3507 && (!(pred = insn_data[(int) code].operand[1].predicate)
3508 || ((*pred) (xinner, BLKmode)))
3509 && (!(pred = insn_data[(int) code].operand[3].predicate)
3510 || ((*pred) (opalign, VOIDmode))))
3512 rtx op2 = convert_to_mode (mode, size, 1);
3513 rtx last = get_last_insn ();
3514 rtx pat;
3516 pred = insn_data[(int) code].operand[2].predicate;
3517 if (pred != 0 && ! (*pred) (op2, mode))
3518 op2 = copy_to_mode_reg (mode, op2);
3520 pat = GEN_FCN ((int) code) (target, xinner,
3521 op2, opalign);
3522 if (pat)
3524 emit_insn (pat);
3525 goto ret;
3527 else
3528 delete_insns_since (last);
3533 if (!ACCUMULATE_OUTGOING_ARGS)
3535 /* If the source is referenced relative to the stack pointer,
3536 copy it to another register to stabilize it. We do not need
3537 to do this if we know that we won't be changing sp. */
3539 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3540 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3541 temp = copy_to_reg (temp);
3544 /* Make inhibit_defer_pop nonzero around the library call
3545 to force it to pop the bcopy-arguments right away. */
3546 NO_DEFER_POP;
3547 #ifdef TARGET_MEM_FUNCTIONS
3548 emit_library_call (memcpy_libfunc, LCT_NORMAL,
3549 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3550 convert_to_mode (TYPE_MODE (sizetype),
3551 size, TREE_UNSIGNED (sizetype)),
3552 TYPE_MODE (sizetype));
3553 #else
3554 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3555 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3556 convert_to_mode (TYPE_MODE (integer_type_node),
3557 size,
3558 TREE_UNSIGNED (integer_type_node)),
3559 TYPE_MODE (integer_type_node));
3560 #endif
3561 OK_DEFER_POP;
3564 else if (partial > 0)
3566 /* Scalar partly in registers. */
3568 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3569 int i;
3570 int not_stack;
3571 /* # words of start of argument
3572 that we must make space for but need not store. */
3573 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3574 int args_offset = INTVAL (args_so_far);
3575 int skip;
3577 /* Push padding now if padding above and stack grows down,
3578 or if padding below and stack grows up.
3579 But if space already allocated, this has already been done. */
3580 if (extra && args_addr == 0
3581 && where_pad != none && where_pad != stack_direction)
3582 anti_adjust_stack (GEN_INT (extra));
3584 /* If we make space by pushing it, we might as well push
3585 the real data. Otherwise, we can leave OFFSET nonzero
3586 and leave the space uninitialized. */
3587 if (args_addr == 0)
3588 offset = 0;
3590 /* Now NOT_STACK gets the number of words that we don't need to
3591 allocate on the stack. */
3592 not_stack = partial - offset;
3594 /* If the partial register-part of the arg counts in its stack size,
3595 skip the part of stack space corresponding to the registers.
3596 Otherwise, start copying to the beginning of the stack space,
3597 by setting SKIP to 0. */
3598 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3600 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3601 x = validize_mem (force_const_mem (mode, x));
3603 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3604 SUBREGs of such registers are not allowed. */
3605 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3606 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3607 x = copy_to_reg (x);
3609 /* Loop over all the words allocated on the stack for this arg. */
3610 /* We can do it by words, because any scalar bigger than a word
3611 has a size a multiple of a word. */
3612 #ifndef PUSH_ARGS_REVERSED
3613 for (i = not_stack; i < size; i++)
3614 #else
3615 for (i = size - 1; i >= not_stack; i--)
3616 #endif
3617 if (i >= not_stack + offset)
3618 emit_push_insn (operand_subword_force (x, i, mode),
3619 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3620 0, args_addr,
3621 GEN_INT (args_offset + ((i - not_stack + skip)
3622 * UNITS_PER_WORD)),
3623 reg_parm_stack_space, alignment_pad);
3625 else
3627 rtx addr;
3628 rtx target = NULL_RTX;
3629 rtx dest;
3631 /* Push padding now if padding above and stack grows down,
3632 or if padding below and stack grows up.
3633 But if space already allocated, this has already been done. */
3634 if (extra && args_addr == 0
3635 && where_pad != none && where_pad != stack_direction)
3636 anti_adjust_stack (GEN_INT (extra));
3638 #ifdef PUSH_ROUNDING
3639 if (args_addr == 0 && PUSH_ARGS)
3640 emit_single_push_insn (mode, x, type);
3641 else
3642 #endif
3644 if (GET_CODE (args_so_far) == CONST_INT)
3645 addr
3646 = memory_address (mode,
3647 plus_constant (args_addr,
3648 INTVAL (args_so_far)));
3649 else
3650 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3651 args_so_far));
3652 target = addr;
3653 dest = gen_rtx_MEM (mode, addr);
3654 if (type != 0)
3656 set_mem_attributes (dest, type, 1);
3657 /* Function incoming arguments may overlap with sibling call
3658 outgoing arguments and we cannot allow reordering of reads
3659 from function arguments with stores to outgoing arguments
3660 of sibling calls. */
3661 set_mem_alias_set (dest, 0);
3664 emit_move_insn (dest, x);
3669 ret:
3670 /* If part should go in registers, copy that part
3671 into the appropriate registers. Do this now, at the end,
3672 since mem-to-mem copies above may do function calls. */
3673 if (partial > 0 && reg != 0)
3675 /* Handle calls that pass values in multiple non-contiguous locations.
3676 The Irix 6 ABI has examples of this. */
3677 if (GET_CODE (reg) == PARALLEL)
3678 emit_group_load (reg, x, -1); /* ??? size? */
3679 else
3680 move_block_to_reg (REGNO (reg), x, partial, mode);
3683 if (extra && args_addr == 0 && where_pad == stack_direction)
3684 anti_adjust_stack (GEN_INT (extra));
3686 if (alignment_pad && args_addr == 0)
3687 anti_adjust_stack (alignment_pad);
3690 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3691 operations. */
3693 static rtx
3694 get_subtarget (x)
3695 rtx x;
3697 return ((x == 0
3698 /* Only registers can be subtargets. */
3699 || GET_CODE (x) != REG
3700 /* If the register is readonly, it can't be set more than once. */
3701 || RTX_UNCHANGING_P (x)
3702 /* Don't use hard regs to avoid extending their life. */
3703 || REGNO (x) < FIRST_PSEUDO_REGISTER
3704 /* Avoid subtargets inside loops,
3705 since they hide some invariant expressions. */
3706 || preserve_subexpressions_p ())
3707 ? 0 : x);
3710 /* Expand an assignment that stores the value of FROM into TO.
3711 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3712 (This may contain a QUEUED rtx;
3713 if the value is constant, this rtx is a constant.)
3714 Otherwise, the returned value is NULL_RTX.
3716 SUGGEST_REG is no longer actually used.
3717 It used to mean, copy the value through a register
3718 and return that register, if that is possible.
3719 We now use WANT_VALUE to decide whether to do this. */
3722 expand_assignment (to, from, want_value, suggest_reg)
3723 tree to, from;
3724 int want_value;
3725 int suggest_reg ATTRIBUTE_UNUSED;
3727 rtx to_rtx = 0;
3728 rtx result;
3730 /* Don't crash if the lhs of the assignment was erroneous. */
3732 if (TREE_CODE (to) == ERROR_MARK)
3734 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3735 return want_value ? result : NULL_RTX;
3738 /* Assignment of a structure component needs special treatment
3739 if the structure component's rtx is not simply a MEM.
3740 Assignment of an array element at a constant index, and assignment of
3741 an array element in an unaligned packed structure field, has the same
3742 problem. */
3744 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3745 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF)
3747 enum machine_mode mode1;
3748 HOST_WIDE_INT bitsize, bitpos;
3749 rtx orig_to_rtx;
3750 tree offset;
3751 int unsignedp;
3752 int volatilep = 0;
3753 tree tem;
3755 push_temp_slots ();
3756 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3757 &unsignedp, &volatilep);
3759 /* If we are going to use store_bit_field and extract_bit_field,
3760 make sure to_rtx will be safe for multiple use. */
3762 if (mode1 == VOIDmode && want_value)
3763 tem = stabilize_reference (tem);
3765 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3767 if (offset != 0)
3769 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
3771 if (GET_CODE (to_rtx) != MEM)
3772 abort ();
3774 #ifdef POINTERS_EXTEND_UNSIGNED
3775 if (GET_MODE (offset_rtx) != Pmode)
3776 offset_rtx = convert_memory_address (Pmode, offset_rtx);
3777 #else
3778 if (GET_MODE (offset_rtx) != ptr_mode)
3779 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3780 #endif
3782 /* A constant address in TO_RTX can have VOIDmode, we must not try
3783 to call force_reg for that case. Avoid that case. */
3784 if (GET_CODE (to_rtx) == MEM
3785 && GET_MODE (to_rtx) == BLKmode
3786 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3787 && bitsize > 0
3788 && (bitpos % bitsize) == 0
3789 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3790 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3792 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3793 bitpos = 0;
3796 to_rtx = offset_address (to_rtx, offset_rtx,
3797 highest_pow2_factor_for_type (TREE_TYPE (to),
3798 offset));
3801 if (GET_CODE (to_rtx) == MEM)
3803 tree old_expr = MEM_EXPR (to_rtx);
3805 /* If the field is at offset zero, we could have been given the
3806 DECL_RTX of the parent struct. Don't munge it. */
3807 to_rtx = shallow_copy_rtx (to_rtx);
3809 set_mem_attributes (to_rtx, to, 0);
3811 /* If we changed MEM_EXPR, that means we're now referencing
3812 the COMPONENT_REF, which means that MEM_OFFSET must be
3813 relative to that field. But we've not yet reflected BITPOS
3814 in TO_RTX. This will be done in store_field. Adjust for
3815 that by biasing MEM_OFFSET by -bitpos. */
3816 if (MEM_EXPR (to_rtx) != old_expr && MEM_OFFSET (to_rtx)
3817 && (bitpos / BITS_PER_UNIT) != 0)
3818 set_mem_offset (to_rtx, GEN_INT (INTVAL (MEM_OFFSET (to_rtx))
3819 - (bitpos / BITS_PER_UNIT)));
3822 /* Deal with volatile and readonly fields. The former is only done
3823 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3824 if (volatilep && GET_CODE (to_rtx) == MEM)
3826 if (to_rtx == orig_to_rtx)
3827 to_rtx = copy_rtx (to_rtx);
3828 MEM_VOLATILE_P (to_rtx) = 1;
3831 if (TREE_CODE (to) == COMPONENT_REF
3832 && TREE_READONLY (TREE_OPERAND (to, 1)))
3834 if (to_rtx == orig_to_rtx)
3835 to_rtx = copy_rtx (to_rtx);
3836 RTX_UNCHANGING_P (to_rtx) = 1;
3839 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
3841 if (to_rtx == orig_to_rtx)
3842 to_rtx = copy_rtx (to_rtx);
3843 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3846 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3847 (want_value
3848 /* Spurious cast for HPUX compiler. */
3849 ? ((enum machine_mode)
3850 TYPE_MODE (TREE_TYPE (to)))
3851 : VOIDmode),
3852 unsignedp, TREE_TYPE (tem), get_alias_set (to));
3854 preserve_temp_slots (result);
3855 free_temp_slots ();
3856 pop_temp_slots ();
3858 /* If the value is meaningful, convert RESULT to the proper mode.
3859 Otherwise, return nothing. */
3860 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3861 TYPE_MODE (TREE_TYPE (from)),
3862 result,
3863 TREE_UNSIGNED (TREE_TYPE (to)))
3864 : NULL_RTX);
3867 /* If the rhs is a function call and its value is not an aggregate,
3868 call the function before we start to compute the lhs.
3869 This is needed for correct code for cases such as
3870 val = setjmp (buf) on machines where reference to val
3871 requires loading up part of an address in a separate insn.
3873 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3874 since it might be a promoted variable where the zero- or sign- extension
3875 needs to be done. Handling this in the normal way is safe because no
3876 computation is done before the call. */
3877 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3878 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3879 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3880 && GET_CODE (DECL_RTL (to)) == REG))
3882 rtx value;
3884 push_temp_slots ();
3885 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3886 if (to_rtx == 0)
3887 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3889 /* Handle calls that return values in multiple non-contiguous locations.
3890 The Irix 6 ABI has examples of this. */
3891 if (GET_CODE (to_rtx) == PARALLEL)
3892 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)));
3893 else if (GET_MODE (to_rtx) == BLKmode)
3894 emit_block_move (to_rtx, value, expr_size (from));
3895 else
3897 #ifdef POINTERS_EXTEND_UNSIGNED
3898 if (POINTER_TYPE_P (TREE_TYPE (to))
3899 && GET_MODE (to_rtx) != GET_MODE (value))
3900 value = convert_memory_address (GET_MODE (to_rtx), value);
3901 #endif
3902 emit_move_insn (to_rtx, value);
3904 preserve_temp_slots (to_rtx);
3905 free_temp_slots ();
3906 pop_temp_slots ();
3907 return want_value ? to_rtx : NULL_RTX;
3910 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3911 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3913 if (to_rtx == 0)
3914 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3916 /* Don't move directly into a return register. */
3917 if (TREE_CODE (to) == RESULT_DECL
3918 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3920 rtx temp;
3922 push_temp_slots ();
3923 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3925 if (GET_CODE (to_rtx) == PARALLEL)
3926 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)));
3927 else
3928 emit_move_insn (to_rtx, temp);
3930 preserve_temp_slots (to_rtx);
3931 free_temp_slots ();
3932 pop_temp_slots ();
3933 return want_value ? to_rtx : NULL_RTX;
3936 /* In case we are returning the contents of an object which overlaps
3937 the place the value is being stored, use a safe function when copying
3938 a value through a pointer into a structure value return block. */
3939 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3940 && current_function_returns_struct
3941 && !current_function_returns_pcc_struct)
3943 rtx from_rtx, size;
3945 push_temp_slots ();
3946 size = expr_size (from);
3947 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
3949 #ifdef TARGET_MEM_FUNCTIONS
3950 emit_library_call (memmove_libfunc, LCT_NORMAL,
3951 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3952 XEXP (from_rtx, 0), Pmode,
3953 convert_to_mode (TYPE_MODE (sizetype),
3954 size, TREE_UNSIGNED (sizetype)),
3955 TYPE_MODE (sizetype));
3956 #else
3957 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3958 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3959 XEXP (to_rtx, 0), Pmode,
3960 convert_to_mode (TYPE_MODE (integer_type_node),
3961 size, TREE_UNSIGNED (integer_type_node)),
3962 TYPE_MODE (integer_type_node));
3963 #endif
3965 preserve_temp_slots (to_rtx);
3966 free_temp_slots ();
3967 pop_temp_slots ();
3968 return want_value ? to_rtx : NULL_RTX;
3971 /* Compute FROM and store the value in the rtx we got. */
3973 push_temp_slots ();
3974 result = store_expr (from, to_rtx, want_value);
3975 preserve_temp_slots (result);
3976 free_temp_slots ();
3977 pop_temp_slots ();
3978 return want_value ? result : NULL_RTX;
3981 /* Generate code for computing expression EXP,
3982 and storing the value into TARGET.
3983 TARGET may contain a QUEUED rtx.
3985 If WANT_VALUE is nonzero, return a copy of the value
3986 not in TARGET, so that we can be sure to use the proper
3987 value in a containing expression even if TARGET has something
3988 else stored in it. If possible, we copy the value through a pseudo
3989 and return that pseudo. Or, if the value is constant, we try to
3990 return the constant. In some cases, we return a pseudo
3991 copied *from* TARGET.
3993 If the mode is BLKmode then we may return TARGET itself.
3994 It turns out that in BLKmode it doesn't cause a problem.
3995 because C has no operators that could combine two different
3996 assignments into the same BLKmode object with different values
3997 with no sequence point. Will other languages need this to
3998 be more thorough?
4000 If WANT_VALUE is 0, we return NULL, to make sure
4001 to catch quickly any cases where the caller uses the value
4002 and fails to set WANT_VALUE. */
4005 store_expr (exp, target, want_value)
4006 tree exp;
4007 rtx target;
4008 int want_value;
4010 rtx temp;
4011 int dont_return_target = 0;
4012 int dont_store_target = 0;
4014 if (TREE_CODE (exp) == COMPOUND_EXPR)
4016 /* Perform first part of compound expression, then assign from second
4017 part. */
4018 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4019 emit_queue ();
4020 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4022 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4024 /* For conditional expression, get safe form of the target. Then
4025 test the condition, doing the appropriate assignment on either
4026 side. This avoids the creation of unnecessary temporaries.
4027 For non-BLKmode, it is more efficient not to do this. */
4029 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4031 emit_queue ();
4032 target = protect_from_queue (target, 1);
4034 do_pending_stack_adjust ();
4035 NO_DEFER_POP;
4036 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4037 start_cleanup_deferral ();
4038 store_expr (TREE_OPERAND (exp, 1), target, 0);
4039 end_cleanup_deferral ();
4040 emit_queue ();
4041 emit_jump_insn (gen_jump (lab2));
4042 emit_barrier ();
4043 emit_label (lab1);
4044 start_cleanup_deferral ();
4045 store_expr (TREE_OPERAND (exp, 2), target, 0);
4046 end_cleanup_deferral ();
4047 emit_queue ();
4048 emit_label (lab2);
4049 OK_DEFER_POP;
4051 return want_value ? target : NULL_RTX;
4053 else if (queued_subexp_p (target))
4054 /* If target contains a postincrement, let's not risk
4055 using it as the place to generate the rhs. */
4057 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4059 /* Expand EXP into a new pseudo. */
4060 temp = gen_reg_rtx (GET_MODE (target));
4061 temp = expand_expr (exp, temp, GET_MODE (target), 0);
4063 else
4064 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
4066 /* If target is volatile, ANSI requires accessing the value
4067 *from* the target, if it is accessed. So make that happen.
4068 In no case return the target itself. */
4069 if (! MEM_VOLATILE_P (target) && want_value)
4070 dont_return_target = 1;
4072 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
4073 && GET_MODE (target) != BLKmode)
4074 /* If target is in memory and caller wants value in a register instead,
4075 arrange that. Pass TARGET as target for expand_expr so that,
4076 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4077 We know expand_expr will not use the target in that case.
4078 Don't do this if TARGET is volatile because we are supposed
4079 to write it and then read it. */
4081 temp = expand_expr (exp, target, GET_MODE (target), 0);
4082 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4084 /* If TEMP is already in the desired TARGET, only copy it from
4085 memory and don't store it there again. */
4086 if (temp == target
4087 || (rtx_equal_p (temp, target)
4088 && ! side_effects_p (temp) && ! side_effects_p (target)))
4089 dont_store_target = 1;
4090 temp = copy_to_reg (temp);
4092 dont_return_target = 1;
4094 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4095 /* If this is an scalar in a register that is stored in a wider mode
4096 than the declared mode, compute the result into its declared mode
4097 and then convert to the wider mode. Our value is the computed
4098 expression. */
4100 rtx inner_target = 0;
4102 /* If we don't want a value, we can do the conversion inside EXP,
4103 which will often result in some optimizations. Do the conversion
4104 in two steps: first change the signedness, if needed, then
4105 the extend. But don't do this if the type of EXP is a subtype
4106 of something else since then the conversion might involve
4107 more than just converting modes. */
4108 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4109 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4111 if (TREE_UNSIGNED (TREE_TYPE (exp))
4112 != SUBREG_PROMOTED_UNSIGNED_P (target))
4113 exp = convert
4114 ((*lang_hooks.types.signed_or_unsigned_type)
4115 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4117 exp = convert ((*lang_hooks.types.type_for_mode)
4118 (GET_MODE (SUBREG_REG (target)),
4119 SUBREG_PROMOTED_UNSIGNED_P (target)),
4120 exp);
4122 inner_target = SUBREG_REG (target);
4125 temp = expand_expr (exp, inner_target, VOIDmode, 0);
4127 /* If TEMP is a volatile MEM and we want a result value, make
4128 the access now so it gets done only once. Likewise if
4129 it contains TARGET. */
4130 if (GET_CODE (temp) == MEM && want_value
4131 && (MEM_VOLATILE_P (temp)
4132 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
4133 temp = copy_to_reg (temp);
4135 /* If TEMP is a VOIDmode constant, use convert_modes to make
4136 sure that we properly convert it. */
4137 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4139 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4140 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4141 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4142 GET_MODE (target), temp,
4143 SUBREG_PROMOTED_UNSIGNED_P (target));
4146 convert_move (SUBREG_REG (target), temp,
4147 SUBREG_PROMOTED_UNSIGNED_P (target));
4149 /* If we promoted a constant, change the mode back down to match
4150 target. Otherwise, the caller might get confused by a result whose
4151 mode is larger than expected. */
4153 if (want_value && GET_MODE (temp) != GET_MODE (target))
4155 if (GET_MODE (temp) != VOIDmode)
4157 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4158 SUBREG_PROMOTED_VAR_P (temp) = 1;
4159 SUBREG_PROMOTED_UNSIGNED_SET (temp,
4160 SUBREG_PROMOTED_UNSIGNED_P (target));
4162 else
4163 temp = convert_modes (GET_MODE (target),
4164 GET_MODE (SUBREG_REG (target)),
4165 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4168 return want_value ? temp : NULL_RTX;
4170 else
4172 temp = expand_expr (exp, target, GET_MODE (target), 0);
4173 /* Return TARGET if it's a specified hardware register.
4174 If TARGET is a volatile mem ref, either return TARGET
4175 or return a reg copied *from* TARGET; ANSI requires this.
4177 Otherwise, if TEMP is not TARGET, return TEMP
4178 if it is constant (for efficiency),
4179 or if we really want the correct value. */
4180 if (!(target && GET_CODE (target) == REG
4181 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4182 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4183 && ! rtx_equal_p (temp, target)
4184 && (CONSTANT_P (temp) || want_value))
4185 dont_return_target = 1;
4188 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4189 the same as that of TARGET, adjust the constant. This is needed, for
4190 example, in case it is a CONST_DOUBLE and we want only a word-sized
4191 value. */
4192 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4193 && TREE_CODE (exp) != ERROR_MARK
4194 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4195 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4196 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4198 /* If value was not generated in the target, store it there.
4199 Convert the value to TARGET's type first if necessary.
4200 If TEMP and TARGET compare equal according to rtx_equal_p, but
4201 one or both of them are volatile memory refs, we have to distinguish
4202 two cases:
4203 - expand_expr has used TARGET. In this case, we must not generate
4204 another copy. This can be detected by TARGET being equal according
4205 to == .
4206 - expand_expr has not used TARGET - that means that the source just
4207 happens to have the same RTX form. Since temp will have been created
4208 by expand_expr, it will compare unequal according to == .
4209 We must generate a copy in this case, to reach the correct number
4210 of volatile memory references. */
4212 if ((! rtx_equal_p (temp, target)
4213 || (temp != target && (side_effects_p (temp)
4214 || side_effects_p (target))))
4215 && TREE_CODE (exp) != ERROR_MARK
4216 && ! dont_store_target
4217 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4218 but TARGET is not valid memory reference, TEMP will differ
4219 from TARGET although it is really the same location. */
4220 && (TREE_CODE_CLASS (TREE_CODE (exp)) != 'd'
4221 || target != DECL_RTL_IF_SET (exp)))
4223 target = protect_from_queue (target, 1);
4224 if (GET_MODE (temp) != GET_MODE (target)
4225 && GET_MODE (temp) != VOIDmode)
4227 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4228 if (dont_return_target)
4230 /* In this case, we will return TEMP,
4231 so make sure it has the proper mode.
4232 But don't forget to store the value into TARGET. */
4233 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4234 emit_move_insn (target, temp);
4236 else
4237 convert_move (target, temp, unsignedp);
4240 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4242 /* Handle copying a string constant into an array. The string
4243 constant may be shorter than the array. So copy just the string's
4244 actual length, and clear the rest. First get the size of the data
4245 type of the string, which is actually the size of the target. */
4246 rtx size = expr_size (exp);
4248 if (GET_CODE (size) == CONST_INT
4249 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4250 emit_block_move (target, temp, size);
4251 else
4253 /* Compute the size of the data to copy from the string. */
4254 tree copy_size
4255 = size_binop (MIN_EXPR,
4256 make_tree (sizetype, size),
4257 size_int (TREE_STRING_LENGTH (exp)));
4258 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
4259 VOIDmode, 0);
4260 rtx label = 0;
4262 /* Copy that much. */
4263 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx, 0);
4264 emit_block_move (target, temp, copy_size_rtx);
4266 /* Figure out how much is left in TARGET that we have to clear.
4267 Do all calculations in ptr_mode. */
4268 if (GET_CODE (copy_size_rtx) == CONST_INT)
4270 size = plus_constant (size, -INTVAL (copy_size_rtx));
4271 target = adjust_address (target, BLKmode,
4272 INTVAL (copy_size_rtx));
4274 else
4276 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4277 copy_size_rtx, NULL_RTX, 0,
4278 OPTAB_LIB_WIDEN);
4280 #ifdef POINTERS_EXTEND_UNSIGNED
4281 if (GET_MODE (copy_size_rtx) != Pmode)
4282 copy_size_rtx = convert_memory_address (Pmode,
4283 copy_size_rtx);
4284 #endif
4286 target = offset_address (target, copy_size_rtx,
4287 highest_pow2_factor (copy_size));
4288 label = gen_label_rtx ();
4289 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4290 GET_MODE (size), 0, label);
4293 if (size != const0_rtx)
4294 clear_storage (target, size);
4296 if (label)
4297 emit_label (label);
4300 /* Handle calls that return values in multiple non-contiguous locations.
4301 The Irix 6 ABI has examples of this. */
4302 else if (GET_CODE (target) == PARALLEL)
4303 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)));
4304 else if (GET_MODE (temp) == BLKmode)
4305 emit_block_move (target, temp, expr_size (exp));
4306 else
4307 emit_move_insn (target, temp);
4310 /* If we don't want a value, return NULL_RTX. */
4311 if (! want_value)
4312 return NULL_RTX;
4314 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4315 ??? The latter test doesn't seem to make sense. */
4316 else if (dont_return_target && GET_CODE (temp) != MEM)
4317 return temp;
4319 /* Return TARGET itself if it is a hard register. */
4320 else if (want_value && GET_MODE (target) != BLKmode
4321 && ! (GET_CODE (target) == REG
4322 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4323 return copy_to_reg (target);
4325 else
4326 return target;
4329 /* Return 1 if EXP just contains zeros. */
4331 static int
4332 is_zeros_p (exp)
4333 tree exp;
4335 tree elt;
4337 switch (TREE_CODE (exp))
4339 case CONVERT_EXPR:
4340 case NOP_EXPR:
4341 case NON_LVALUE_EXPR:
4342 case VIEW_CONVERT_EXPR:
4343 return is_zeros_p (TREE_OPERAND (exp, 0));
4345 case INTEGER_CST:
4346 return integer_zerop (exp);
4348 case COMPLEX_CST:
4349 return
4350 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4352 case REAL_CST:
4353 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4355 case VECTOR_CST:
4356 for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4357 elt = TREE_CHAIN (elt))
4358 if (!is_zeros_p (TREE_VALUE (elt)))
4359 return 0;
4361 return 1;
4363 case CONSTRUCTOR:
4364 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4365 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4366 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4367 if (! is_zeros_p (TREE_VALUE (elt)))
4368 return 0;
4370 return 1;
4372 default:
4373 return 0;
4377 /* Return 1 if EXP contains mostly (3/4) zeros. */
4379 static int
4380 mostly_zeros_p (exp)
4381 tree exp;
4383 if (TREE_CODE (exp) == CONSTRUCTOR)
4385 int elts = 0, zeros = 0;
4386 tree elt = CONSTRUCTOR_ELTS (exp);
4387 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4389 /* If there are no ranges of true bits, it is all zero. */
4390 return elt == NULL_TREE;
4392 for (; elt; elt = TREE_CHAIN (elt))
4394 /* We do not handle the case where the index is a RANGE_EXPR,
4395 so the statistic will be somewhat inaccurate.
4396 We do make a more accurate count in store_constructor itself,
4397 so since this function is only used for nested array elements,
4398 this should be close enough. */
4399 if (mostly_zeros_p (TREE_VALUE (elt)))
4400 zeros++;
4401 elts++;
4404 return 4 * zeros >= 3 * elts;
4407 return is_zeros_p (exp);
4410 /* Helper function for store_constructor.
4411 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4412 TYPE is the type of the CONSTRUCTOR, not the element type.
4413 CLEARED is as for store_constructor.
4414 ALIAS_SET is the alias set to use for any stores.
4416 This provides a recursive shortcut back to store_constructor when it isn't
4417 necessary to go through store_field. This is so that we can pass through
4418 the cleared field to let store_constructor know that we may not have to
4419 clear a substructure if the outer structure has already been cleared. */
4421 static void
4422 store_constructor_field (target, bitsize, bitpos, mode, exp, type, cleared,
4423 alias_set)
4424 rtx target;
4425 unsigned HOST_WIDE_INT bitsize;
4426 HOST_WIDE_INT bitpos;
4427 enum machine_mode mode;
4428 tree exp, type;
4429 int cleared;
4430 int alias_set;
4432 if (TREE_CODE (exp) == CONSTRUCTOR
4433 && bitpos % BITS_PER_UNIT == 0
4434 /* If we have a non-zero bitpos for a register target, then we just
4435 let store_field do the bitfield handling. This is unlikely to
4436 generate unnecessary clear instructions anyways. */
4437 && (bitpos == 0 || GET_CODE (target) == MEM))
4439 if (GET_CODE (target) == MEM)
4440 target
4441 = adjust_address (target,
4442 GET_MODE (target) == BLKmode
4443 || 0 != (bitpos
4444 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4445 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4448 /* Update the alias set, if required. */
4449 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4450 && MEM_ALIAS_SET (target) != 0)
4452 target = copy_rtx (target);
4453 set_mem_alias_set (target, alias_set);
4456 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4458 else
4459 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4460 alias_set);
4463 /* Store the value of constructor EXP into the rtx TARGET.
4464 TARGET is either a REG or a MEM; we know it cannot conflict, since
4465 safe_from_p has been called.
4466 CLEARED is true if TARGET is known to have been zero'd.
4467 SIZE is the number of bytes of TARGET we are allowed to modify: this
4468 may not be the same as the size of EXP if we are assigning to a field
4469 which has been packed to exclude padding bits. */
4471 static void
4472 store_constructor (exp, target, cleared, size)
4473 tree exp;
4474 rtx target;
4475 int cleared;
4476 HOST_WIDE_INT size;
4478 tree type = TREE_TYPE (exp);
4479 #ifdef WORD_REGISTER_OPERATIONS
4480 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4481 #endif
4483 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4484 || TREE_CODE (type) == QUAL_UNION_TYPE)
4486 tree elt;
4488 /* We either clear the aggregate or indicate the value is dead. */
4489 if ((TREE_CODE (type) == UNION_TYPE
4490 || TREE_CODE (type) == QUAL_UNION_TYPE)
4491 && ! cleared
4492 && ! CONSTRUCTOR_ELTS (exp))
4493 /* If the constructor is empty, clear the union. */
4495 clear_storage (target, expr_size (exp));
4496 cleared = 1;
4499 /* If we are building a static constructor into a register,
4500 set the initial value as zero so we can fold the value into
4501 a constant. But if more than one register is involved,
4502 this probably loses. */
4503 else if (! cleared && GET_CODE (target) == REG && TREE_STATIC (exp)
4504 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4506 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4507 cleared = 1;
4510 /* If the constructor has fewer fields than the structure
4511 or if we are initializing the structure to mostly zeros,
4512 clear the whole structure first. Don't do this if TARGET is a
4513 register whose mode size isn't equal to SIZE since clear_storage
4514 can't handle this case. */
4515 else if (! cleared && size > 0
4516 && ((list_length (CONSTRUCTOR_ELTS (exp))
4517 != fields_length (type))
4518 || mostly_zeros_p (exp))
4519 && (GET_CODE (target) != REG
4520 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4521 == size)))
4523 clear_storage (target, GEN_INT (size));
4524 cleared = 1;
4527 if (! cleared)
4528 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4530 /* Store each element of the constructor into
4531 the corresponding field of TARGET. */
4533 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4535 tree field = TREE_PURPOSE (elt);
4536 tree value = TREE_VALUE (elt);
4537 enum machine_mode mode;
4538 HOST_WIDE_INT bitsize;
4539 HOST_WIDE_INT bitpos = 0;
4540 int unsignedp;
4541 tree offset;
4542 rtx to_rtx = target;
4544 /* Just ignore missing fields.
4545 We cleared the whole structure, above,
4546 if any fields are missing. */
4547 if (field == 0)
4548 continue;
4550 if (cleared && is_zeros_p (value))
4551 continue;
4553 if (host_integerp (DECL_SIZE (field), 1))
4554 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4555 else
4556 bitsize = -1;
4558 unsignedp = TREE_UNSIGNED (field);
4559 mode = DECL_MODE (field);
4560 if (DECL_BIT_FIELD (field))
4561 mode = VOIDmode;
4563 offset = DECL_FIELD_OFFSET (field);
4564 if (host_integerp (offset, 0)
4565 && host_integerp (bit_position (field), 0))
4567 bitpos = int_bit_position (field);
4568 offset = 0;
4570 else
4571 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4573 if (offset)
4575 rtx offset_rtx;
4577 if (contains_placeholder_p (offset))
4578 offset = build (WITH_RECORD_EXPR, sizetype,
4579 offset, make_tree (TREE_TYPE (exp), target));
4581 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4582 if (GET_CODE (to_rtx) != MEM)
4583 abort ();
4585 #ifdef POINTERS_EXTEND_UNSIGNED
4586 if (GET_MODE (offset_rtx) != Pmode)
4587 offset_rtx = convert_memory_address (Pmode, offset_rtx);
4588 #else
4589 if (GET_MODE (offset_rtx) != ptr_mode)
4590 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4591 #endif
4593 to_rtx = offset_address (to_rtx, offset_rtx,
4594 highest_pow2_factor (offset));
4597 if (TREE_READONLY (field))
4599 if (GET_CODE (to_rtx) == MEM)
4600 to_rtx = copy_rtx (to_rtx);
4602 RTX_UNCHANGING_P (to_rtx) = 1;
4605 #ifdef WORD_REGISTER_OPERATIONS
4606 /* If this initializes a field that is smaller than a word, at the
4607 start of a word, try to widen it to a full word.
4608 This special case allows us to output C++ member function
4609 initializations in a form that the optimizers can understand. */
4610 if (GET_CODE (target) == REG
4611 && bitsize < BITS_PER_WORD
4612 && bitpos % BITS_PER_WORD == 0
4613 && GET_MODE_CLASS (mode) == MODE_INT
4614 && TREE_CODE (value) == INTEGER_CST
4615 && exp_size >= 0
4616 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4618 tree type = TREE_TYPE (value);
4620 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4622 type = (*lang_hooks.types.type_for_size)
4623 (BITS_PER_WORD, TREE_UNSIGNED (type));
4624 value = convert (type, value);
4627 if (BYTES_BIG_ENDIAN)
4628 value
4629 = fold (build (LSHIFT_EXPR, type, value,
4630 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4631 bitsize = BITS_PER_WORD;
4632 mode = word_mode;
4634 #endif
4636 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4637 && DECL_NONADDRESSABLE_P (field))
4639 to_rtx = copy_rtx (to_rtx);
4640 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4643 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4644 value, type, cleared,
4645 get_alias_set (TREE_TYPE (field)));
4648 else if (TREE_CODE (type) == ARRAY_TYPE
4649 || TREE_CODE (type) == VECTOR_TYPE)
4651 tree elt;
4652 int i;
4653 int need_to_clear;
4654 tree domain = TYPE_DOMAIN (type);
4655 tree elttype = TREE_TYPE (type);
4656 int const_bounds_p;
4657 HOST_WIDE_INT minelt = 0;
4658 HOST_WIDE_INT maxelt = 0;
4660 /* Vectors are like arrays, but the domain is stored via an array
4661 type indirectly. */
4662 if (TREE_CODE (type) == VECTOR_TYPE)
4664 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
4665 the same field as TYPE_DOMAIN, we are not guaranteed that
4666 it always will. */
4667 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
4668 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
4671 const_bounds_p = (TYPE_MIN_VALUE (domain)
4672 && TYPE_MAX_VALUE (domain)
4673 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4674 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4676 /* If we have constant bounds for the range of the type, get them. */
4677 if (const_bounds_p)
4679 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4680 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4683 /* If the constructor has fewer elements than the array,
4684 clear the whole array first. Similarly if this is
4685 static constructor of a non-BLKmode object. */
4686 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4687 need_to_clear = 1;
4688 else
4690 HOST_WIDE_INT count = 0, zero_count = 0;
4691 need_to_clear = ! const_bounds_p;
4693 /* This loop is a more accurate version of the loop in
4694 mostly_zeros_p (it handles RANGE_EXPR in an index).
4695 It is also needed to check for missing elements. */
4696 for (elt = CONSTRUCTOR_ELTS (exp);
4697 elt != NULL_TREE && ! need_to_clear;
4698 elt = TREE_CHAIN (elt))
4700 tree index = TREE_PURPOSE (elt);
4701 HOST_WIDE_INT this_node_count;
4703 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4705 tree lo_index = TREE_OPERAND (index, 0);
4706 tree hi_index = TREE_OPERAND (index, 1);
4708 if (! host_integerp (lo_index, 1)
4709 || ! host_integerp (hi_index, 1))
4711 need_to_clear = 1;
4712 break;
4715 this_node_count = (tree_low_cst (hi_index, 1)
4716 - tree_low_cst (lo_index, 1) + 1);
4718 else
4719 this_node_count = 1;
4721 count += this_node_count;
4722 if (mostly_zeros_p (TREE_VALUE (elt)))
4723 zero_count += this_node_count;
4726 /* Clear the entire array first if there are any missing elements,
4727 or if the incidence of zero elements is >= 75%. */
4728 if (! need_to_clear
4729 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4730 need_to_clear = 1;
4733 if (need_to_clear && size > 0)
4735 if (! cleared)
4737 if (REG_P (target))
4738 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4739 else
4740 clear_storage (target, GEN_INT (size));
4742 cleared = 1;
4744 else if (REG_P (target))
4745 /* Inform later passes that the old value is dead. */
4746 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4748 /* Store each element of the constructor into
4749 the corresponding element of TARGET, determined
4750 by counting the elements. */
4751 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4752 elt;
4753 elt = TREE_CHAIN (elt), i++)
4755 enum machine_mode mode;
4756 HOST_WIDE_INT bitsize;
4757 HOST_WIDE_INT bitpos;
4758 int unsignedp;
4759 tree value = TREE_VALUE (elt);
4760 tree index = TREE_PURPOSE (elt);
4761 rtx xtarget = target;
4763 if (cleared && is_zeros_p (value))
4764 continue;
4766 unsignedp = TREE_UNSIGNED (elttype);
4767 mode = TYPE_MODE (elttype);
4768 if (mode == BLKmode)
4769 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4770 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4771 : -1);
4772 else
4773 bitsize = GET_MODE_BITSIZE (mode);
4775 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4777 tree lo_index = TREE_OPERAND (index, 0);
4778 tree hi_index = TREE_OPERAND (index, 1);
4779 rtx index_r, pos_rtx, hi_r, loop_top, loop_end;
4780 struct nesting *loop;
4781 HOST_WIDE_INT lo, hi, count;
4782 tree position;
4784 /* If the range is constant and "small", unroll the loop. */
4785 if (const_bounds_p
4786 && host_integerp (lo_index, 0)
4787 && host_integerp (hi_index, 0)
4788 && (lo = tree_low_cst (lo_index, 0),
4789 hi = tree_low_cst (hi_index, 0),
4790 count = hi - lo + 1,
4791 (GET_CODE (target) != MEM
4792 || count <= 2
4793 || (host_integerp (TYPE_SIZE (elttype), 1)
4794 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4795 <= 40 * 8)))))
4797 lo -= minelt; hi -= minelt;
4798 for (; lo <= hi; lo++)
4800 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4802 if (GET_CODE (target) == MEM
4803 && !MEM_KEEP_ALIAS_SET_P (target)
4804 && TREE_CODE (type) == ARRAY_TYPE
4805 && TYPE_NONALIASED_COMPONENT (type))
4807 target = copy_rtx (target);
4808 MEM_KEEP_ALIAS_SET_P (target) = 1;
4811 store_constructor_field
4812 (target, bitsize, bitpos, mode, value, type, cleared,
4813 get_alias_set (elttype));
4816 else
4818 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4819 loop_top = gen_label_rtx ();
4820 loop_end = gen_label_rtx ();
4822 unsignedp = TREE_UNSIGNED (domain);
4824 index = build_decl (VAR_DECL, NULL_TREE, domain);
4826 index_r
4827 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4828 &unsignedp, 0));
4829 SET_DECL_RTL (index, index_r);
4830 if (TREE_CODE (value) == SAVE_EXPR
4831 && SAVE_EXPR_RTL (value) == 0)
4833 /* Make sure value gets expanded once before the
4834 loop. */
4835 expand_expr (value, const0_rtx, VOIDmode, 0);
4836 emit_queue ();
4838 store_expr (lo_index, index_r, 0);
4839 loop = expand_start_loop (0);
4841 /* Assign value to element index. */
4842 position
4843 = convert (ssizetype,
4844 fold (build (MINUS_EXPR, TREE_TYPE (index),
4845 index, TYPE_MIN_VALUE (domain))));
4846 position = size_binop (MULT_EXPR, position,
4847 convert (ssizetype,
4848 TYPE_SIZE_UNIT (elttype)));
4850 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4851 xtarget = offset_address (target, pos_rtx,
4852 highest_pow2_factor (position));
4853 xtarget = adjust_address (xtarget, mode, 0);
4854 if (TREE_CODE (value) == CONSTRUCTOR)
4855 store_constructor (value, xtarget, cleared,
4856 bitsize / BITS_PER_UNIT);
4857 else
4858 store_expr (value, xtarget, 0);
4860 expand_exit_loop_if_false (loop,
4861 build (LT_EXPR, integer_type_node,
4862 index, hi_index));
4864 expand_increment (build (PREINCREMENT_EXPR,
4865 TREE_TYPE (index),
4866 index, integer_one_node), 0, 0);
4867 expand_end_loop ();
4868 emit_label (loop_end);
4871 else if ((index != 0 && ! host_integerp (index, 0))
4872 || ! host_integerp (TYPE_SIZE (elttype), 1))
4874 tree position;
4876 if (index == 0)
4877 index = ssize_int (1);
4879 if (minelt)
4880 index = convert (ssizetype,
4881 fold (build (MINUS_EXPR, index,
4882 TYPE_MIN_VALUE (domain))));
4884 position = size_binop (MULT_EXPR, index,
4885 convert (ssizetype,
4886 TYPE_SIZE_UNIT (elttype)));
4887 xtarget = offset_address (target,
4888 expand_expr (position, 0, VOIDmode, 0),
4889 highest_pow2_factor (position));
4890 xtarget = adjust_address (xtarget, mode, 0);
4891 store_expr (value, xtarget, 0);
4893 else
4895 if (index != 0)
4896 bitpos = ((tree_low_cst (index, 0) - minelt)
4897 * tree_low_cst (TYPE_SIZE (elttype), 1));
4898 else
4899 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4901 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
4902 && TREE_CODE (type) == ARRAY_TYPE
4903 && TYPE_NONALIASED_COMPONENT (type))
4905 target = copy_rtx (target);
4906 MEM_KEEP_ALIAS_SET_P (target) = 1;
4909 store_constructor_field (target, bitsize, bitpos, mode, value,
4910 type, cleared, get_alias_set (elttype));
4916 /* Set constructor assignments. */
4917 else if (TREE_CODE (type) == SET_TYPE)
4919 tree elt = CONSTRUCTOR_ELTS (exp);
4920 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4921 tree domain = TYPE_DOMAIN (type);
4922 tree domain_min, domain_max, bitlength;
4924 /* The default implementation strategy is to extract the constant
4925 parts of the constructor, use that to initialize the target,
4926 and then "or" in whatever non-constant ranges we need in addition.
4928 If a large set is all zero or all ones, it is
4929 probably better to set it using memset (if available) or bzero.
4930 Also, if a large set has just a single range, it may also be
4931 better to first clear all the first clear the set (using
4932 bzero/memset), and set the bits we want. */
4934 /* Check for all zeros. */
4935 if (elt == NULL_TREE && size > 0)
4937 if (!cleared)
4938 clear_storage (target, GEN_INT (size));
4939 return;
4942 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4943 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4944 bitlength = size_binop (PLUS_EXPR,
4945 size_diffop (domain_max, domain_min),
4946 ssize_int (1));
4948 nbits = tree_low_cst (bitlength, 1);
4950 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4951 are "complicated" (more than one range), initialize (the
4952 constant parts) by copying from a constant. */
4953 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4954 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4956 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4957 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4958 char *bit_buffer = (char *) alloca (nbits);
4959 HOST_WIDE_INT word = 0;
4960 unsigned int bit_pos = 0;
4961 unsigned int ibit = 0;
4962 unsigned int offset = 0; /* In bytes from beginning of set. */
4964 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4965 for (;;)
4967 if (bit_buffer[ibit])
4969 if (BYTES_BIG_ENDIAN)
4970 word |= (1 << (set_word_size - 1 - bit_pos));
4971 else
4972 word |= 1 << bit_pos;
4975 bit_pos++; ibit++;
4976 if (bit_pos >= set_word_size || ibit == nbits)
4978 if (word != 0 || ! cleared)
4980 rtx datum = GEN_INT (word);
4981 rtx to_rtx;
4983 /* The assumption here is that it is safe to use
4984 XEXP if the set is multi-word, but not if
4985 it's single-word. */
4986 if (GET_CODE (target) == MEM)
4987 to_rtx = adjust_address (target, mode, offset);
4988 else if (offset == 0)
4989 to_rtx = target;
4990 else
4991 abort ();
4992 emit_move_insn (to_rtx, datum);
4995 if (ibit == nbits)
4996 break;
4997 word = 0;
4998 bit_pos = 0;
4999 offset += set_word_size / BITS_PER_UNIT;
5003 else if (!cleared)
5004 /* Don't bother clearing storage if the set is all ones. */
5005 if (TREE_CHAIN (elt) != NULL_TREE
5006 || (TREE_PURPOSE (elt) == NULL_TREE
5007 ? nbits != 1
5008 : ( ! host_integerp (TREE_VALUE (elt), 0)
5009 || ! host_integerp (TREE_PURPOSE (elt), 0)
5010 || (tree_low_cst (TREE_VALUE (elt), 0)
5011 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5012 != (HOST_WIDE_INT) nbits))))
5013 clear_storage (target, expr_size (exp));
5015 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5017 /* Start of range of element or NULL. */
5018 tree startbit = TREE_PURPOSE (elt);
5019 /* End of range of element, or element value. */
5020 tree endbit = TREE_VALUE (elt);
5021 #ifdef TARGET_MEM_FUNCTIONS
5022 HOST_WIDE_INT startb, endb;
5023 #endif
5024 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5026 bitlength_rtx = expand_expr (bitlength,
5027 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5029 /* Handle non-range tuple element like [ expr ]. */
5030 if (startbit == NULL_TREE)
5032 startbit = save_expr (endbit);
5033 endbit = startbit;
5036 startbit = convert (sizetype, startbit);
5037 endbit = convert (sizetype, endbit);
5038 if (! integer_zerop (domain_min))
5040 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5041 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5043 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5044 EXPAND_CONST_ADDRESS);
5045 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5046 EXPAND_CONST_ADDRESS);
5048 if (REG_P (target))
5050 targetx
5051 = assign_temp
5052 ((build_qualified_type ((*lang_hooks.types.type_for_mode)
5053 (GET_MODE (target), 0),
5054 TYPE_QUAL_CONST)),
5055 0, 1, 1);
5056 emit_move_insn (targetx, target);
5059 else if (GET_CODE (target) == MEM)
5060 targetx = target;
5061 else
5062 abort ();
5064 #ifdef TARGET_MEM_FUNCTIONS
5065 /* Optimization: If startbit and endbit are
5066 constants divisible by BITS_PER_UNIT,
5067 call memset instead. */
5068 if (TREE_CODE (startbit) == INTEGER_CST
5069 && TREE_CODE (endbit) == INTEGER_CST
5070 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5071 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5073 emit_library_call (memset_libfunc, LCT_NORMAL,
5074 VOIDmode, 3,
5075 plus_constant (XEXP (targetx, 0),
5076 startb / BITS_PER_UNIT),
5077 Pmode,
5078 constm1_rtx, TYPE_MODE (integer_type_node),
5079 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5080 TYPE_MODE (sizetype));
5082 else
5083 #endif
5084 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
5085 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
5086 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5087 startbit_rtx, TYPE_MODE (sizetype),
5088 endbit_rtx, TYPE_MODE (sizetype));
5090 if (REG_P (target))
5091 emit_move_insn (target, targetx);
5095 else
5096 abort ();
5099 /* Store the value of EXP (an expression tree)
5100 into a subfield of TARGET which has mode MODE and occupies
5101 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5102 If MODE is VOIDmode, it means that we are storing into a bit-field.
5104 If VALUE_MODE is VOIDmode, return nothing in particular.
5105 UNSIGNEDP is not used in this case.
5107 Otherwise, return an rtx for the value stored. This rtx
5108 has mode VALUE_MODE if that is convenient to do.
5109 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5111 TYPE is the type of the underlying object,
5113 ALIAS_SET is the alias set for the destination. This value will
5114 (in general) be different from that for TARGET, since TARGET is a
5115 reference to the containing structure. */
5117 static rtx
5118 store_field (target, bitsize, bitpos, mode, exp, value_mode, unsignedp, type,
5119 alias_set)
5120 rtx target;
5121 HOST_WIDE_INT bitsize;
5122 HOST_WIDE_INT bitpos;
5123 enum machine_mode mode;
5124 tree exp;
5125 enum machine_mode value_mode;
5126 int unsignedp;
5127 tree type;
5128 int alias_set;
5130 HOST_WIDE_INT width_mask = 0;
5132 if (TREE_CODE (exp) == ERROR_MARK)
5133 return const0_rtx;
5135 /* If we have nothing to store, do nothing unless the expression has
5136 side-effects. */
5137 if (bitsize == 0)
5138 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5139 else if (bitsize >=0 && bitsize < HOST_BITS_PER_WIDE_INT)
5140 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5142 /* If we are storing into an unaligned field of an aligned union that is
5143 in a register, we may have the mode of TARGET being an integer mode but
5144 MODE == BLKmode. In that case, get an aligned object whose size and
5145 alignment are the same as TARGET and store TARGET into it (we can avoid
5146 the store if the field being stored is the entire width of TARGET). Then
5147 call ourselves recursively to store the field into a BLKmode version of
5148 that object. Finally, load from the object into TARGET. This is not
5149 very efficient in general, but should only be slightly more expensive
5150 than the otherwise-required unaligned accesses. Perhaps this can be
5151 cleaned up later. */
5153 if (mode == BLKmode
5154 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5156 rtx object
5157 = assign_temp
5158 (build_qualified_type (type, TYPE_QUALS (type) | TYPE_QUAL_CONST),
5159 0, 1, 1);
5160 rtx blk_object = adjust_address (object, BLKmode, 0);
5162 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5163 emit_move_insn (object, target);
5165 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5166 alias_set);
5168 emit_move_insn (target, object);
5170 /* We want to return the BLKmode version of the data. */
5171 return blk_object;
5174 if (GET_CODE (target) == CONCAT)
5176 /* We're storing into a struct containing a single __complex. */
5178 if (bitpos != 0)
5179 abort ();
5180 return store_expr (exp, target, 0);
5183 /* If the structure is in a register or if the component
5184 is a bit field, we cannot use addressing to access it.
5185 Use bit-field techniques or SUBREG to store in it. */
5187 if (mode == VOIDmode
5188 || (mode != BLKmode && ! direct_store[(int) mode]
5189 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5190 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5191 || GET_CODE (target) == REG
5192 || GET_CODE (target) == SUBREG
5193 /* If the field isn't aligned enough to store as an ordinary memref,
5194 store it as a bit field. */
5195 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
5196 && (MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode)
5197 || bitpos % GET_MODE_ALIGNMENT (mode)))
5198 /* If the RHS and field are a constant size and the size of the
5199 RHS isn't the same size as the bitfield, we must use bitfield
5200 operations. */
5201 || (bitsize >= 0
5202 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5203 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5205 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5207 /* If BITSIZE is narrower than the size of the type of EXP
5208 we will be narrowing TEMP. Normally, what's wanted are the
5209 low-order bits. However, if EXP's type is a record and this is
5210 big-endian machine, we want the upper BITSIZE bits. */
5211 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5212 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5213 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5214 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5215 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5216 - bitsize),
5217 temp, 1);
5219 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5220 MODE. */
5221 if (mode != VOIDmode && mode != BLKmode
5222 && mode != TYPE_MODE (TREE_TYPE (exp)))
5223 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5225 /* If the modes of TARGET and TEMP are both BLKmode, both
5226 must be in memory and BITPOS must be aligned on a byte
5227 boundary. If so, we simply do a block copy. */
5228 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5230 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5231 || bitpos % BITS_PER_UNIT != 0)
5232 abort ();
5234 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5235 emit_block_move (target, temp,
5236 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5237 / BITS_PER_UNIT));
5239 return value_mode == VOIDmode ? const0_rtx : target;
5242 /* Store the value in the bitfield. */
5243 store_bit_field (target, bitsize, bitpos, mode, temp,
5244 int_size_in_bytes (type));
5246 if (value_mode != VOIDmode)
5248 /* The caller wants an rtx for the value.
5249 If possible, avoid refetching from the bitfield itself. */
5250 if (width_mask != 0
5251 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5253 tree count;
5254 enum machine_mode tmode;
5256 tmode = GET_MODE (temp);
5257 if (tmode == VOIDmode)
5258 tmode = value_mode;
5260 if (unsignedp)
5261 return expand_and (tmode, temp,
5262 gen_int_mode (width_mask, tmode),
5263 NULL_RTX);
5265 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5266 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5267 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5270 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5271 NULL_RTX, value_mode, VOIDmode,
5272 int_size_in_bytes (type));
5274 return const0_rtx;
5276 else
5278 rtx addr = XEXP (target, 0);
5279 rtx to_rtx = target;
5281 /* If a value is wanted, it must be the lhs;
5282 so make the address stable for multiple use. */
5284 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5285 && ! CONSTANT_ADDRESS_P (addr)
5286 /* A frame-pointer reference is already stable. */
5287 && ! (GET_CODE (addr) == PLUS
5288 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5289 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5290 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5291 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5293 /* Now build a reference to just the desired component. */
5295 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5297 if (to_rtx == target)
5298 to_rtx = copy_rtx (to_rtx);
5300 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5301 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5302 set_mem_alias_set (to_rtx, alias_set);
5304 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5308 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5309 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5310 codes and find the ultimate containing object, which we return.
5312 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5313 bit position, and *PUNSIGNEDP to the signedness of the field.
5314 If the position of the field is variable, we store a tree
5315 giving the variable offset (in units) in *POFFSET.
5316 This offset is in addition to the bit position.
5317 If the position is not variable, we store 0 in *POFFSET.
5319 If any of the extraction expressions is volatile,
5320 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5322 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5323 is a mode that can be used to access the field. In that case, *PBITSIZE
5324 is redundant.
5326 If the field describes a variable-sized object, *PMODE is set to
5327 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5328 this case, but the address of the object can be found. */
5330 tree
5331 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5332 punsignedp, pvolatilep)
5333 tree exp;
5334 HOST_WIDE_INT *pbitsize;
5335 HOST_WIDE_INT *pbitpos;
5336 tree *poffset;
5337 enum machine_mode *pmode;
5338 int *punsignedp;
5339 int *pvolatilep;
5341 tree size_tree = 0;
5342 enum machine_mode mode = VOIDmode;
5343 tree offset = size_zero_node;
5344 tree bit_offset = bitsize_zero_node;
5345 tree placeholder_ptr = 0;
5346 tree tem;
5348 /* First get the mode, signedness, and size. We do this from just the
5349 outermost expression. */
5350 if (TREE_CODE (exp) == COMPONENT_REF)
5352 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5353 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5354 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5356 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5358 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5360 size_tree = TREE_OPERAND (exp, 1);
5361 *punsignedp = TREE_UNSIGNED (exp);
5363 else
5365 mode = TYPE_MODE (TREE_TYPE (exp));
5366 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5368 if (mode == BLKmode)
5369 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5370 else
5371 *pbitsize = GET_MODE_BITSIZE (mode);
5374 if (size_tree != 0)
5376 if (! host_integerp (size_tree, 1))
5377 mode = BLKmode, *pbitsize = -1;
5378 else
5379 *pbitsize = tree_low_cst (size_tree, 1);
5382 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5383 and find the ultimate containing object. */
5384 while (1)
5386 if (TREE_CODE (exp) == BIT_FIELD_REF)
5387 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5388 else if (TREE_CODE (exp) == COMPONENT_REF)
5390 tree field = TREE_OPERAND (exp, 1);
5391 tree this_offset = DECL_FIELD_OFFSET (field);
5393 /* If this field hasn't been filled in yet, don't go
5394 past it. This should only happen when folding expressions
5395 made during type construction. */
5396 if (this_offset == 0)
5397 break;
5398 else if (! TREE_CONSTANT (this_offset)
5399 && contains_placeholder_p (this_offset))
5400 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5402 offset = size_binop (PLUS_EXPR, offset, this_offset);
5403 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5404 DECL_FIELD_BIT_OFFSET (field));
5406 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5409 else if (TREE_CODE (exp) == ARRAY_REF
5410 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5412 tree index = TREE_OPERAND (exp, 1);
5413 tree array = TREE_OPERAND (exp, 0);
5414 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5415 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5416 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5418 /* We assume all arrays have sizes that are a multiple of a byte.
5419 First subtract the lower bound, if any, in the type of the
5420 index, then convert to sizetype and multiply by the size of the
5421 array element. */
5422 if (low_bound != 0 && ! integer_zerop (low_bound))
5423 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5424 index, low_bound));
5426 /* If the index has a self-referential type, pass it to a
5427 WITH_RECORD_EXPR; if the component size is, pass our
5428 component to one. */
5429 if (! TREE_CONSTANT (index)
5430 && contains_placeholder_p (index))
5431 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5432 if (! TREE_CONSTANT (unit_size)
5433 && contains_placeholder_p (unit_size))
5434 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5436 offset = size_binop (PLUS_EXPR, offset,
5437 size_binop (MULT_EXPR,
5438 convert (sizetype, index),
5439 unit_size));
5442 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5444 tree new = find_placeholder (exp, &placeholder_ptr);
5446 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5447 We might have been called from tree optimization where we
5448 haven't set up an object yet. */
5449 if (new == 0)
5450 break;
5451 else
5452 exp = new;
5454 continue;
5456 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5457 && TREE_CODE (exp) != VIEW_CONVERT_EXPR
5458 && ! ((TREE_CODE (exp) == NOP_EXPR
5459 || TREE_CODE (exp) == CONVERT_EXPR)
5460 && (TYPE_MODE (TREE_TYPE (exp))
5461 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5462 break;
5464 /* If any reference in the chain is volatile, the effect is volatile. */
5465 if (TREE_THIS_VOLATILE (exp))
5466 *pvolatilep = 1;
5468 exp = TREE_OPERAND (exp, 0);
5471 /* If OFFSET is constant, see if we can return the whole thing as a
5472 constant bit position. Otherwise, split it up. */
5473 if (host_integerp (offset, 0)
5474 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5475 bitsize_unit_node))
5476 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5477 && host_integerp (tem, 0))
5478 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5479 else
5480 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5482 *pmode = mode;
5483 return exp;
5486 /* Return 1 if T is an expression that get_inner_reference handles. */
5489 handled_component_p (t)
5490 tree t;
5492 switch (TREE_CODE (t))
5494 case BIT_FIELD_REF:
5495 case COMPONENT_REF:
5496 case ARRAY_REF:
5497 case ARRAY_RANGE_REF:
5498 case NON_LVALUE_EXPR:
5499 case VIEW_CONVERT_EXPR:
5500 return 1;
5502 case NOP_EXPR:
5503 case CONVERT_EXPR:
5504 return (TYPE_MODE (TREE_TYPE (t))
5505 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5507 default:
5508 return 0;
5512 /* Given an rtx VALUE that may contain additions and multiplications, return
5513 an equivalent value that just refers to a register, memory, or constant.
5514 This is done by generating instructions to perform the arithmetic and
5515 returning a pseudo-register containing the value.
5517 The returned value may be a REG, SUBREG, MEM or constant. */
5520 force_operand (value, target)
5521 rtx value, target;
5523 rtx op1, op2;
5524 /* Use subtarget as the target for operand 0 of a binary operation. */
5525 rtx subtarget = get_subtarget (target);
5526 enum rtx_code code = GET_CODE (value);
5528 /* Check for a PIC address load. */
5529 if ((code == PLUS || code == MINUS)
5530 && XEXP (value, 0) == pic_offset_table_rtx
5531 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5532 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5533 || GET_CODE (XEXP (value, 1)) == CONST))
5535 if (!subtarget)
5536 subtarget = gen_reg_rtx (GET_MODE (value));
5537 emit_move_insn (subtarget, value);
5538 return subtarget;
5541 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5543 if (!target)
5544 target = gen_reg_rtx (GET_MODE (value));
5545 convert_move (target, force_operand (XEXP (value, 0), NULL),
5546 code == ZERO_EXTEND);
5547 return target;
5550 if (GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c')
5552 op2 = XEXP (value, 1);
5553 if (!CONSTANT_P (op2) && !(GET_CODE (op2) == REG && op2 != subtarget))
5554 subtarget = 0;
5555 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5557 code = PLUS;
5558 op2 = negate_rtx (GET_MODE (value), op2);
5561 /* Check for an addition with OP2 a constant integer and our first
5562 operand a PLUS of a virtual register and something else. In that
5563 case, we want to emit the sum of the virtual register and the
5564 constant first and then add the other value. This allows virtual
5565 register instantiation to simply modify the constant rather than
5566 creating another one around this addition. */
5567 if (code == PLUS && GET_CODE (op2) == CONST_INT
5568 && GET_CODE (XEXP (value, 0)) == PLUS
5569 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5570 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5571 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5573 rtx temp = expand_simple_binop (GET_MODE (value), code,
5574 XEXP (XEXP (value, 0), 0), op2,
5575 subtarget, 0, OPTAB_LIB_WIDEN);
5576 return expand_simple_binop (GET_MODE (value), code, temp,
5577 force_operand (XEXP (XEXP (value,
5578 0), 1), 0),
5579 target, 0, OPTAB_LIB_WIDEN);
5582 op1 = force_operand (XEXP (value, 0), subtarget);
5583 op2 = force_operand (op2, NULL_RTX);
5584 switch (code)
5586 case MULT:
5587 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5588 case DIV:
5589 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5590 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5591 target, 1, OPTAB_LIB_WIDEN);
5592 else
5593 return expand_divmod (0,
5594 FLOAT_MODE_P (GET_MODE (value))
5595 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5596 GET_MODE (value), op1, op2, target, 0);
5597 break;
5598 case MOD:
5599 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5600 target, 0);
5601 break;
5602 case UDIV:
5603 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5604 target, 1);
5605 break;
5606 case UMOD:
5607 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5608 target, 1);
5609 break;
5610 case ASHIFTRT:
5611 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5612 target, 0, OPTAB_LIB_WIDEN);
5613 break;
5614 default:
5615 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5616 target, 1, OPTAB_LIB_WIDEN);
5619 if (GET_RTX_CLASS (code) == '1')
5621 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5622 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5625 #ifdef INSN_SCHEDULING
5626 /* On machines that have insn scheduling, we want all memory reference to be
5627 explicit, so we need to deal with such paradoxical SUBREGs. */
5628 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
5629 && (GET_MODE_SIZE (GET_MODE (value))
5630 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5631 value
5632 = simplify_gen_subreg (GET_MODE (value),
5633 force_reg (GET_MODE (SUBREG_REG (value)),
5634 force_operand (SUBREG_REG (value),
5635 NULL_RTX)),
5636 GET_MODE (SUBREG_REG (value)),
5637 SUBREG_BYTE (value));
5638 #endif
5640 return value;
5643 /* Subroutine of expand_expr: return nonzero iff there is no way that
5644 EXP can reference X, which is being modified. TOP_P is nonzero if this
5645 call is going to be used to determine whether we need a temporary
5646 for EXP, as opposed to a recursive call to this function.
5648 It is always safe for this routine to return zero since it merely
5649 searches for optimization opportunities. */
5652 safe_from_p (x, exp, top_p)
5653 rtx x;
5654 tree exp;
5655 int top_p;
5657 rtx exp_rtl = 0;
5658 int i, nops;
5659 static tree save_expr_list;
5661 if (x == 0
5662 /* If EXP has varying size, we MUST use a target since we currently
5663 have no way of allocating temporaries of variable size
5664 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5665 So we assume here that something at a higher level has prevented a
5666 clash. This is somewhat bogus, but the best we can do. Only
5667 do this when X is BLKmode and when we are at the top level. */
5668 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5669 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5670 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5671 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5672 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5673 != INTEGER_CST)
5674 && GET_MODE (x) == BLKmode)
5675 /* If X is in the outgoing argument area, it is always safe. */
5676 || (GET_CODE (x) == MEM
5677 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5678 || (GET_CODE (XEXP (x, 0)) == PLUS
5679 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5680 return 1;
5682 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5683 find the underlying pseudo. */
5684 if (GET_CODE (x) == SUBREG)
5686 x = SUBREG_REG (x);
5687 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5688 return 0;
5691 /* A SAVE_EXPR might appear many times in the expression passed to the
5692 top-level safe_from_p call, and if it has a complex subexpression,
5693 examining it multiple times could result in a combinatorial explosion.
5694 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5695 with optimization took about 28 minutes to compile -- even though it was
5696 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5697 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5698 we have processed. Note that the only test of top_p was above. */
5700 if (top_p)
5702 int rtn;
5703 tree t;
5705 save_expr_list = 0;
5707 rtn = safe_from_p (x, exp, 0);
5709 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5710 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5712 return rtn;
5715 /* Now look at our tree code and possibly recurse. */
5716 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5718 case 'd':
5719 exp_rtl = DECL_RTL_IF_SET (exp);
5720 break;
5722 case 'c':
5723 return 1;
5725 case 'x':
5726 if (TREE_CODE (exp) == TREE_LIST)
5727 return ((TREE_VALUE (exp) == 0
5728 || safe_from_p (x, TREE_VALUE (exp), 0))
5729 && (TREE_CHAIN (exp) == 0
5730 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5731 else if (TREE_CODE (exp) == ERROR_MARK)
5732 return 1; /* An already-visited SAVE_EXPR? */
5733 else
5734 return 0;
5736 case '1':
5737 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5739 case '2':
5740 case '<':
5741 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5742 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5744 case 'e':
5745 case 'r':
5746 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5747 the expression. If it is set, we conflict iff we are that rtx or
5748 both are in memory. Otherwise, we check all operands of the
5749 expression recursively. */
5751 switch (TREE_CODE (exp))
5753 case ADDR_EXPR:
5754 /* If the operand is static or we are static, we can't conflict.
5755 Likewise if we don't conflict with the operand at all. */
5756 if (staticp (TREE_OPERAND (exp, 0))
5757 || TREE_STATIC (exp)
5758 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5759 return 1;
5761 /* Otherwise, the only way this can conflict is if we are taking
5762 the address of a DECL a that address if part of X, which is
5763 very rare. */
5764 exp = TREE_OPERAND (exp, 0);
5765 if (DECL_P (exp))
5767 if (!DECL_RTL_SET_P (exp)
5768 || GET_CODE (DECL_RTL (exp)) != MEM)
5769 return 0;
5770 else
5771 exp_rtl = XEXP (DECL_RTL (exp), 0);
5773 break;
5775 case INDIRECT_REF:
5776 if (GET_CODE (x) == MEM
5777 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5778 get_alias_set (exp)))
5779 return 0;
5780 break;
5782 case CALL_EXPR:
5783 /* Assume that the call will clobber all hard registers and
5784 all of memory. */
5785 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5786 || GET_CODE (x) == MEM)
5787 return 0;
5788 break;
5790 case RTL_EXPR:
5791 /* If a sequence exists, we would have to scan every instruction
5792 in the sequence to see if it was safe. This is probably not
5793 worthwhile. */
5794 if (RTL_EXPR_SEQUENCE (exp))
5795 return 0;
5797 exp_rtl = RTL_EXPR_RTL (exp);
5798 break;
5800 case WITH_CLEANUP_EXPR:
5801 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
5802 break;
5804 case CLEANUP_POINT_EXPR:
5805 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5807 case SAVE_EXPR:
5808 exp_rtl = SAVE_EXPR_RTL (exp);
5809 if (exp_rtl)
5810 break;
5812 /* If we've already scanned this, don't do it again. Otherwise,
5813 show we've scanned it and record for clearing the flag if we're
5814 going on. */
5815 if (TREE_PRIVATE (exp))
5816 return 1;
5818 TREE_PRIVATE (exp) = 1;
5819 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5821 TREE_PRIVATE (exp) = 0;
5822 return 0;
5825 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
5826 return 1;
5828 case BIND_EXPR:
5829 /* The only operand we look at is operand 1. The rest aren't
5830 part of the expression. */
5831 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5833 case METHOD_CALL_EXPR:
5834 /* This takes an rtx argument, but shouldn't appear here. */
5835 abort ();
5837 default:
5838 break;
5841 /* If we have an rtx, we do not need to scan our operands. */
5842 if (exp_rtl)
5843 break;
5845 nops = first_rtl_op (TREE_CODE (exp));
5846 for (i = 0; i < nops; i++)
5847 if (TREE_OPERAND (exp, i) != 0
5848 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5849 return 0;
5851 /* If this is a language-specific tree code, it may require
5852 special handling. */
5853 if ((unsigned int) TREE_CODE (exp)
5854 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5855 && !(*lang_hooks.safe_from_p) (x, exp))
5856 return 0;
5859 /* If we have an rtl, find any enclosed object. Then see if we conflict
5860 with it. */
5861 if (exp_rtl)
5863 if (GET_CODE (exp_rtl) == SUBREG)
5865 exp_rtl = SUBREG_REG (exp_rtl);
5866 if (GET_CODE (exp_rtl) == REG
5867 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5868 return 0;
5871 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5872 are memory and they conflict. */
5873 return ! (rtx_equal_p (x, exp_rtl)
5874 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5875 && true_dependence (exp_rtl, VOIDmode, x,
5876 rtx_addr_varies_p)));
5879 /* If we reach here, it is safe. */
5880 return 1;
5883 /* Subroutine of expand_expr: return rtx if EXP is a
5884 variable or parameter; else return 0. */
5886 static rtx
5887 var_rtx (exp)
5888 tree exp;
5890 STRIP_NOPS (exp);
5891 switch (TREE_CODE (exp))
5893 case PARM_DECL:
5894 case VAR_DECL:
5895 return DECL_RTL (exp);
5896 default:
5897 return 0;
5901 #ifdef MAX_INTEGER_COMPUTATION_MODE
5903 void
5904 check_max_integer_computation_mode (exp)
5905 tree exp;
5907 enum tree_code code;
5908 enum machine_mode mode;
5910 /* Strip any NOPs that don't change the mode. */
5911 STRIP_NOPS (exp);
5912 code = TREE_CODE (exp);
5914 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5915 if (code == NOP_EXPR
5916 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5917 return;
5919 /* First check the type of the overall operation. We need only look at
5920 unary, binary and relational operations. */
5921 if (TREE_CODE_CLASS (code) == '1'
5922 || TREE_CODE_CLASS (code) == '2'
5923 || TREE_CODE_CLASS (code) == '<')
5925 mode = TYPE_MODE (TREE_TYPE (exp));
5926 if (GET_MODE_CLASS (mode) == MODE_INT
5927 && mode > MAX_INTEGER_COMPUTATION_MODE)
5928 internal_error ("unsupported wide integer operation");
5931 /* Check operand of a unary op. */
5932 if (TREE_CODE_CLASS (code) == '1')
5934 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5935 if (GET_MODE_CLASS (mode) == MODE_INT
5936 && mode > MAX_INTEGER_COMPUTATION_MODE)
5937 internal_error ("unsupported wide integer operation");
5940 /* Check operands of a binary/comparison op. */
5941 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5943 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5944 if (GET_MODE_CLASS (mode) == MODE_INT
5945 && mode > MAX_INTEGER_COMPUTATION_MODE)
5946 internal_error ("unsupported wide integer operation");
5948 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5949 if (GET_MODE_CLASS (mode) == MODE_INT
5950 && mode > MAX_INTEGER_COMPUTATION_MODE)
5951 internal_error ("unsupported wide integer operation");
5954 #endif
5956 /* Return the highest power of two that EXP is known to be a multiple of.
5957 This is used in updating alignment of MEMs in array references. */
5959 static HOST_WIDE_INT
5960 highest_pow2_factor (exp)
5961 tree exp;
5963 HOST_WIDE_INT c0, c1;
5965 switch (TREE_CODE (exp))
5967 case INTEGER_CST:
5968 /* We can find the lowest bit that's a one. If the low
5969 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
5970 We need to handle this case since we can find it in a COND_EXPR,
5971 a MIN_EXPR, or a MAX_EXPR. If the constant overlows, we have an
5972 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
5973 later ICE. */
5974 if (TREE_CONSTANT_OVERFLOW (exp))
5975 return BIGGEST_ALIGNMENT;
5976 else
5978 /* Note: tree_low_cst is intentionally not used here,
5979 we don't care about the upper bits. */
5980 c0 = TREE_INT_CST_LOW (exp);
5981 c0 &= -c0;
5982 return c0 ? c0 : BIGGEST_ALIGNMENT;
5984 break;
5986 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
5987 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5988 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5989 return MIN (c0, c1);
5991 case MULT_EXPR:
5992 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5993 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5994 return c0 * c1;
5996 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
5997 case CEIL_DIV_EXPR:
5998 if (integer_pow2p (TREE_OPERAND (exp, 1))
5999 && host_integerp (TREE_OPERAND (exp, 1), 1))
6001 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6002 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6003 return MAX (1, c0 / c1);
6005 break;
6007 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6008 case SAVE_EXPR: case WITH_RECORD_EXPR:
6009 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6011 case COMPOUND_EXPR:
6012 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6014 case COND_EXPR:
6015 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6016 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6017 return MIN (c0, c1);
6019 default:
6020 break;
6023 return 1;
6026 /* Similar, except that it is known that the expression must be a multiple
6027 of the alignment of TYPE. */
6029 static HOST_WIDE_INT
6030 highest_pow2_factor_for_type (type, exp)
6031 tree type;
6032 tree exp;
6034 HOST_WIDE_INT type_align, factor;
6036 factor = highest_pow2_factor (exp);
6037 type_align = TYPE_ALIGN (type) / BITS_PER_UNIT;
6038 return MAX (factor, type_align);
6041 /* Return an object on the placeholder list that matches EXP, a
6042 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6043 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6044 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6045 is a location which initially points to a starting location in the
6046 placeholder list (zero means start of the list) and where a pointer into
6047 the placeholder list at which the object is found is placed. */
6049 tree
6050 find_placeholder (exp, plist)
6051 tree exp;
6052 tree *plist;
6054 tree type = TREE_TYPE (exp);
6055 tree placeholder_expr;
6057 for (placeholder_expr
6058 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
6059 placeholder_expr != 0;
6060 placeholder_expr = TREE_CHAIN (placeholder_expr))
6062 tree need_type = TYPE_MAIN_VARIANT (type);
6063 tree elt;
6065 /* Find the outermost reference that is of the type we want. If none,
6066 see if any object has a type that is a pointer to the type we
6067 want. */
6068 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6069 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
6070 || TREE_CODE (elt) == COND_EXPR)
6071 ? TREE_OPERAND (elt, 1)
6072 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6073 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6074 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6075 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6076 ? TREE_OPERAND (elt, 0) : 0))
6077 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6079 if (plist)
6080 *plist = placeholder_expr;
6081 return elt;
6084 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6086 = ((TREE_CODE (elt) == COMPOUND_EXPR
6087 || TREE_CODE (elt) == COND_EXPR)
6088 ? TREE_OPERAND (elt, 1)
6089 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6090 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6091 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6092 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6093 ? TREE_OPERAND (elt, 0) : 0))
6094 if (POINTER_TYPE_P (TREE_TYPE (elt))
6095 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6096 == need_type))
6098 if (plist)
6099 *plist = placeholder_expr;
6100 return build1 (INDIRECT_REF, need_type, elt);
6104 return 0;
6107 /* expand_expr: generate code for computing expression EXP.
6108 An rtx for the computed value is returned. The value is never null.
6109 In the case of a void EXP, const0_rtx is returned.
6111 The value may be stored in TARGET if TARGET is nonzero.
6112 TARGET is just a suggestion; callers must assume that
6113 the rtx returned may not be the same as TARGET.
6115 If TARGET is CONST0_RTX, it means that the value will be ignored.
6117 If TMODE is not VOIDmode, it suggests generating the
6118 result in mode TMODE. But this is done only when convenient.
6119 Otherwise, TMODE is ignored and the value generated in its natural mode.
6120 TMODE is just a suggestion; callers must assume that
6121 the rtx returned may not have mode TMODE.
6123 Note that TARGET may have neither TMODE nor MODE. In that case, it
6124 probably will not be used.
6126 If MODIFIER is EXPAND_SUM then when EXP is an addition
6127 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6128 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6129 products as above, or REG or MEM, or constant.
6130 Ordinarily in such cases we would output mul or add instructions
6131 and then return a pseudo reg containing the sum.
6133 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6134 it also marks a label as absolutely required (it can't be dead).
6135 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6136 This is used for outputting expressions used in initializers.
6138 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6139 with a constant address even if that address is not normally legitimate.
6140 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
6143 expand_expr (exp, target, tmode, modifier)
6144 tree exp;
6145 rtx target;
6146 enum machine_mode tmode;
6147 enum expand_modifier modifier;
6149 rtx op0, op1, temp;
6150 tree type = TREE_TYPE (exp);
6151 int unsignedp = TREE_UNSIGNED (type);
6152 enum machine_mode mode;
6153 enum tree_code code = TREE_CODE (exp);
6154 optab this_optab;
6155 rtx subtarget, original_target;
6156 int ignore;
6157 tree context;
6159 /* Handle ERROR_MARK before anybody tries to access its type. */
6160 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6162 op0 = CONST0_RTX (tmode);
6163 if (op0 != 0)
6164 return op0;
6165 return const0_rtx;
6168 mode = TYPE_MODE (type);
6169 /* Use subtarget as the target for operand 0 of a binary operation. */
6170 subtarget = get_subtarget (target);
6171 original_target = target;
6172 ignore = (target == const0_rtx
6173 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6174 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6175 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
6176 && TREE_CODE (type) == VOID_TYPE));
6178 /* If we are going to ignore this result, we need only do something
6179 if there is a side-effect somewhere in the expression. If there
6180 is, short-circuit the most common cases here. Note that we must
6181 not call expand_expr with anything but const0_rtx in case this
6182 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6184 if (ignore)
6186 if (! TREE_SIDE_EFFECTS (exp))
6187 return const0_rtx;
6189 /* Ensure we reference a volatile object even if value is ignored, but
6190 don't do this if all we are doing is taking its address. */
6191 if (TREE_THIS_VOLATILE (exp)
6192 && TREE_CODE (exp) != FUNCTION_DECL
6193 && mode != VOIDmode && mode != BLKmode
6194 && modifier != EXPAND_CONST_ADDRESS)
6196 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6197 if (GET_CODE (temp) == MEM)
6198 temp = copy_to_reg (temp);
6199 return const0_rtx;
6202 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6203 || code == INDIRECT_REF || code == BUFFER_REF)
6204 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6205 modifier);
6207 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6208 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6210 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6211 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6212 return const0_rtx;
6214 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6215 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6216 /* If the second operand has no side effects, just evaluate
6217 the first. */
6218 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6219 modifier);
6220 else if (code == BIT_FIELD_REF)
6222 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6223 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6224 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6225 return const0_rtx;
6228 target = 0;
6231 #ifdef MAX_INTEGER_COMPUTATION_MODE
6232 /* Only check stuff here if the mode we want is different from the mode
6233 of the expression; if it's the same, check_max_integer_computiation_mode
6234 will handle it. Do we really need to check this stuff at all? */
6236 if (target
6237 && GET_MODE (target) != mode
6238 && TREE_CODE (exp) != INTEGER_CST
6239 && TREE_CODE (exp) != PARM_DECL
6240 && TREE_CODE (exp) != ARRAY_REF
6241 && TREE_CODE (exp) != ARRAY_RANGE_REF
6242 && TREE_CODE (exp) != COMPONENT_REF
6243 && TREE_CODE (exp) != BIT_FIELD_REF
6244 && TREE_CODE (exp) != INDIRECT_REF
6245 && TREE_CODE (exp) != CALL_EXPR
6246 && TREE_CODE (exp) != VAR_DECL
6247 && TREE_CODE (exp) != RTL_EXPR)
6249 enum machine_mode mode = GET_MODE (target);
6251 if (GET_MODE_CLASS (mode) == MODE_INT
6252 && mode > MAX_INTEGER_COMPUTATION_MODE)
6253 internal_error ("unsupported wide integer operation");
6256 if (tmode != mode
6257 && TREE_CODE (exp) != INTEGER_CST
6258 && TREE_CODE (exp) != PARM_DECL
6259 && TREE_CODE (exp) != ARRAY_REF
6260 && TREE_CODE (exp) != ARRAY_RANGE_REF
6261 && TREE_CODE (exp) != COMPONENT_REF
6262 && TREE_CODE (exp) != BIT_FIELD_REF
6263 && TREE_CODE (exp) != INDIRECT_REF
6264 && TREE_CODE (exp) != VAR_DECL
6265 && TREE_CODE (exp) != CALL_EXPR
6266 && TREE_CODE (exp) != RTL_EXPR
6267 && GET_MODE_CLASS (tmode) == MODE_INT
6268 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6269 internal_error ("unsupported wide integer operation");
6271 check_max_integer_computation_mode (exp);
6272 #endif
6274 /* If will do cse, generate all results into pseudo registers
6275 since 1) that allows cse to find more things
6276 and 2) otherwise cse could produce an insn the machine
6277 cannot support. And exception is a CONSTRUCTOR into a multi-word
6278 MEM: that's much more likely to be most efficient into the MEM. */
6280 if (! cse_not_expected && mode != BLKmode && target
6281 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6282 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD))
6283 target = subtarget;
6285 switch (code)
6287 case LABEL_DECL:
6289 tree function = decl_function_context (exp);
6290 /* Handle using a label in a containing function. */
6291 if (function != current_function_decl
6292 && function != inline_function_decl && function != 0)
6294 struct function *p = find_function_data (function);
6295 p->expr->x_forced_labels
6296 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6297 p->expr->x_forced_labels);
6299 else
6301 if (modifier == EXPAND_INITIALIZER)
6302 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6303 label_rtx (exp),
6304 forced_labels);
6307 temp = gen_rtx_MEM (FUNCTION_MODE,
6308 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6309 if (function != current_function_decl
6310 && function != inline_function_decl && function != 0)
6311 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6312 return temp;
6315 case PARM_DECL:
6316 if (DECL_RTL (exp) == 0)
6318 error_with_decl (exp, "prior parameter's size depends on `%s'");
6319 return CONST0_RTX (mode);
6322 /* ... fall through ... */
6324 case VAR_DECL:
6325 /* If a static var's type was incomplete when the decl was written,
6326 but the type is complete now, lay out the decl now. */
6327 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6328 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6330 rtx value = DECL_RTL_IF_SET (exp);
6332 layout_decl (exp, 0);
6334 /* If the RTL was already set, update its mode and memory
6335 attributes. */
6336 if (value != 0)
6338 PUT_MODE (value, DECL_MODE (exp));
6339 SET_DECL_RTL (exp, 0);
6340 set_mem_attributes (value, exp, 1);
6341 SET_DECL_RTL (exp, value);
6345 /* ... fall through ... */
6347 case FUNCTION_DECL:
6348 case RESULT_DECL:
6349 if (DECL_RTL (exp) == 0)
6350 abort ();
6352 /* Ensure variable marked as used even if it doesn't go through
6353 a parser. If it hasn't be used yet, write out an external
6354 definition. */
6355 if (! TREE_USED (exp))
6357 assemble_external (exp);
6358 TREE_USED (exp) = 1;
6361 /* Show we haven't gotten RTL for this yet. */
6362 temp = 0;
6364 /* Handle variables inherited from containing functions. */
6365 context = decl_function_context (exp);
6367 /* We treat inline_function_decl as an alias for the current function
6368 because that is the inline function whose vars, types, etc.
6369 are being merged into the current function.
6370 See expand_inline_function. */
6372 if (context != 0 && context != current_function_decl
6373 && context != inline_function_decl
6374 /* If var is static, we don't need a static chain to access it. */
6375 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6376 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6378 rtx addr;
6380 /* Mark as non-local and addressable. */
6381 DECL_NONLOCAL (exp) = 1;
6382 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6383 abort ();
6384 (*lang_hooks.mark_addressable) (exp);
6385 if (GET_CODE (DECL_RTL (exp)) != MEM)
6386 abort ();
6387 addr = XEXP (DECL_RTL (exp), 0);
6388 if (GET_CODE (addr) == MEM)
6389 addr
6390 = replace_equiv_address (addr,
6391 fix_lexical_addr (XEXP (addr, 0), exp));
6392 else
6393 addr = fix_lexical_addr (addr, exp);
6395 temp = replace_equiv_address (DECL_RTL (exp), addr);
6398 /* This is the case of an array whose size is to be determined
6399 from its initializer, while the initializer is still being parsed.
6400 See expand_decl. */
6402 else if (GET_CODE (DECL_RTL (exp)) == MEM
6403 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6404 temp = validize_mem (DECL_RTL (exp));
6406 /* If DECL_RTL is memory, we are in the normal case and either
6407 the address is not valid or it is not a register and -fforce-addr
6408 is specified, get the address into a register. */
6410 else if (GET_CODE (DECL_RTL (exp)) == MEM
6411 && modifier != EXPAND_CONST_ADDRESS
6412 && modifier != EXPAND_SUM
6413 && modifier != EXPAND_INITIALIZER
6414 && (! memory_address_p (DECL_MODE (exp),
6415 XEXP (DECL_RTL (exp), 0))
6416 || (flag_force_addr
6417 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6418 temp = replace_equiv_address (DECL_RTL (exp),
6419 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6421 /* If we got something, return it. But first, set the alignment
6422 if the address is a register. */
6423 if (temp != 0)
6425 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6426 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6428 return temp;
6431 /* If the mode of DECL_RTL does not match that of the decl, it
6432 must be a promoted value. We return a SUBREG of the wanted mode,
6433 but mark it so that we know that it was already extended. */
6435 if (GET_CODE (DECL_RTL (exp)) == REG
6436 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6438 /* Get the signedness used for this variable. Ensure we get the
6439 same mode we got when the variable was declared. */
6440 if (GET_MODE (DECL_RTL (exp))
6441 != promote_mode (type, DECL_MODE (exp), &unsignedp,
6442 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6443 abort ();
6445 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6446 SUBREG_PROMOTED_VAR_P (temp) = 1;
6447 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6448 return temp;
6451 return DECL_RTL (exp);
6453 case INTEGER_CST:
6454 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6455 TREE_INT_CST_HIGH (exp), mode);
6457 /* ??? If overflow is set, fold will have done an incomplete job,
6458 which can result in (plus xx (const_int 0)), which can get
6459 simplified by validate_replace_rtx during virtual register
6460 instantiation, which can result in unrecognizable insns.
6461 Avoid this by forcing all overflows into registers. */
6462 if (TREE_CONSTANT_OVERFLOW (exp)
6463 && modifier != EXPAND_INITIALIZER)
6464 temp = force_reg (mode, temp);
6466 return temp;
6468 case CONST_DECL:
6469 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
6471 case REAL_CST:
6472 /* If optimized, generate immediate CONST_DOUBLE
6473 which will be turned into memory by reload if necessary.
6475 We used to force a register so that loop.c could see it. But
6476 this does not allow gen_* patterns to perform optimizations with
6477 the constants. It also produces two insns in cases like "x = 1.0;".
6478 On most machines, floating-point constants are not permitted in
6479 many insns, so we'd end up copying it to a register in any case.
6481 Now, we do the copying in expand_binop, if appropriate. */
6482 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6483 TYPE_MODE (TREE_TYPE (exp)));
6485 case COMPLEX_CST:
6486 case STRING_CST:
6487 if (! TREE_CST_RTL (exp))
6488 output_constant_def (exp, 1);
6490 /* TREE_CST_RTL probably contains a constant address.
6491 On RISC machines where a constant address isn't valid,
6492 make some insns to get that address into a register. */
6493 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6494 && modifier != EXPAND_CONST_ADDRESS
6495 && modifier != EXPAND_INITIALIZER
6496 && modifier != EXPAND_SUM
6497 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6498 || (flag_force_addr
6499 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6500 return replace_equiv_address (TREE_CST_RTL (exp),
6501 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6502 return TREE_CST_RTL (exp);
6504 case EXPR_WITH_FILE_LOCATION:
6506 rtx to_return;
6507 const char *saved_input_filename = input_filename;
6508 int saved_lineno = lineno;
6509 input_filename = EXPR_WFL_FILENAME (exp);
6510 lineno = EXPR_WFL_LINENO (exp);
6511 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6512 emit_line_note (input_filename, lineno);
6513 /* Possibly avoid switching back and forth here. */
6514 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6515 input_filename = saved_input_filename;
6516 lineno = saved_lineno;
6517 return to_return;
6520 case SAVE_EXPR:
6521 context = decl_function_context (exp);
6523 /* If this SAVE_EXPR was at global context, assume we are an
6524 initialization function and move it into our context. */
6525 if (context == 0)
6526 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6528 /* We treat inline_function_decl as an alias for the current function
6529 because that is the inline function whose vars, types, etc.
6530 are being merged into the current function.
6531 See expand_inline_function. */
6532 if (context == current_function_decl || context == inline_function_decl)
6533 context = 0;
6535 /* If this is non-local, handle it. */
6536 if (context)
6538 /* The following call just exists to abort if the context is
6539 not of a containing function. */
6540 find_function_data (context);
6542 temp = SAVE_EXPR_RTL (exp);
6543 if (temp && GET_CODE (temp) == REG)
6545 put_var_into_stack (exp);
6546 temp = SAVE_EXPR_RTL (exp);
6548 if (temp == 0 || GET_CODE (temp) != MEM)
6549 abort ();
6550 return
6551 replace_equiv_address (temp,
6552 fix_lexical_addr (XEXP (temp, 0), exp));
6554 if (SAVE_EXPR_RTL (exp) == 0)
6556 if (mode == VOIDmode)
6557 temp = const0_rtx;
6558 else
6559 temp = assign_temp (build_qualified_type (type,
6560 (TYPE_QUALS (type)
6561 | TYPE_QUAL_CONST)),
6562 3, 0, 0);
6564 SAVE_EXPR_RTL (exp) = temp;
6565 if (!optimize && GET_CODE (temp) == REG)
6566 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6567 save_expr_regs);
6569 /* If the mode of TEMP does not match that of the expression, it
6570 must be a promoted value. We pass store_expr a SUBREG of the
6571 wanted mode but mark it so that we know that it was already
6572 extended. Note that `unsignedp' was modified above in
6573 this case. */
6575 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6577 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6578 SUBREG_PROMOTED_VAR_P (temp) = 1;
6579 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6582 if (temp == const0_rtx)
6583 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6584 else
6585 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6587 TREE_USED (exp) = 1;
6590 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6591 must be a promoted value. We return a SUBREG of the wanted mode,
6592 but mark it so that we know that it was already extended. */
6594 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6595 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6597 /* Compute the signedness and make the proper SUBREG. */
6598 promote_mode (type, mode, &unsignedp, 0);
6599 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6600 SUBREG_PROMOTED_VAR_P (temp) = 1;
6601 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6602 return temp;
6605 return SAVE_EXPR_RTL (exp);
6607 case UNSAVE_EXPR:
6609 rtx temp;
6610 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6611 TREE_OPERAND (exp, 0)
6612 = (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0));
6613 return temp;
6616 case PLACEHOLDER_EXPR:
6618 tree old_list = placeholder_list;
6619 tree placeholder_expr = 0;
6621 exp = find_placeholder (exp, &placeholder_expr);
6622 if (exp == 0)
6623 abort ();
6625 placeholder_list = TREE_CHAIN (placeholder_expr);
6626 temp = expand_expr (exp, original_target, tmode, modifier);
6627 placeholder_list = old_list;
6628 return temp;
6631 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6632 abort ();
6634 case WITH_RECORD_EXPR:
6635 /* Put the object on the placeholder list, expand our first operand,
6636 and pop the list. */
6637 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6638 placeholder_list);
6639 target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
6640 modifier);
6641 placeholder_list = TREE_CHAIN (placeholder_list);
6642 return target;
6644 case GOTO_EXPR:
6645 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6646 expand_goto (TREE_OPERAND (exp, 0));
6647 else
6648 expand_computed_goto (TREE_OPERAND (exp, 0));
6649 return const0_rtx;
6651 case EXIT_EXPR:
6652 expand_exit_loop_if_false (NULL,
6653 invert_truthvalue (TREE_OPERAND (exp, 0)));
6654 return const0_rtx;
6656 case LABELED_BLOCK_EXPR:
6657 if (LABELED_BLOCK_BODY (exp))
6658 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
6659 /* Should perhaps use expand_label, but this is simpler and safer. */
6660 do_pending_stack_adjust ();
6661 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6662 return const0_rtx;
6664 case EXIT_BLOCK_EXPR:
6665 if (EXIT_BLOCK_RETURN (exp))
6666 sorry ("returned value in block_exit_expr");
6667 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6668 return const0_rtx;
6670 case LOOP_EXPR:
6671 push_temp_slots ();
6672 expand_start_loop (1);
6673 expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
6674 expand_end_loop ();
6675 pop_temp_slots ();
6677 return const0_rtx;
6679 case BIND_EXPR:
6681 tree vars = TREE_OPERAND (exp, 0);
6682 int vars_need_expansion = 0;
6684 /* Need to open a binding contour here because
6685 if there are any cleanups they must be contained here. */
6686 expand_start_bindings (2);
6688 /* Mark the corresponding BLOCK for output in its proper place. */
6689 if (TREE_OPERAND (exp, 2) != 0
6690 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6691 (*lang_hooks.decls.insert_block) (TREE_OPERAND (exp, 2));
6693 /* If VARS have not yet been expanded, expand them now. */
6694 while (vars)
6696 if (!DECL_RTL_SET_P (vars))
6698 vars_need_expansion = 1;
6699 expand_decl (vars);
6701 expand_decl_init (vars);
6702 vars = TREE_CHAIN (vars);
6705 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
6707 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6709 return temp;
6712 case RTL_EXPR:
6713 if (RTL_EXPR_SEQUENCE (exp))
6715 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6716 abort ();
6717 emit_insns (RTL_EXPR_SEQUENCE (exp));
6718 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6720 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6721 free_temps_for_rtl_expr (exp);
6722 return RTL_EXPR_RTL (exp);
6724 case CONSTRUCTOR:
6725 /* If we don't need the result, just ensure we evaluate any
6726 subexpressions. */
6727 if (ignore)
6729 tree elt;
6731 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6732 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6734 return const0_rtx;
6737 /* All elts simple constants => refer to a constant in memory. But
6738 if this is a non-BLKmode mode, let it store a field at a time
6739 since that should make a CONST_INT or CONST_DOUBLE when we
6740 fold. Likewise, if we have a target we can use, it is best to
6741 store directly into the target unless the type is large enough
6742 that memcpy will be used. If we are making an initializer and
6743 all operands are constant, put it in memory as well.
6745 FIXME: Avoid trying to fill vector constructors piece-meal.
6746 Output them with output_constant_def below unless we're sure
6747 they're zeros. This should go away when vector initializers
6748 are treated like VECTOR_CST instead of arrays.
6750 else if ((TREE_STATIC (exp)
6751 && ((mode == BLKmode
6752 && ! (target != 0 && safe_from_p (target, exp, 1)))
6753 || TREE_ADDRESSABLE (exp)
6754 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6755 && (! MOVE_BY_PIECES_P
6756 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6757 TYPE_ALIGN (type)))
6758 && ((TREE_CODE (type) == VECTOR_TYPE
6759 && !is_zeros_p (exp))
6760 || ! mostly_zeros_p (exp)))))
6761 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6763 rtx constructor = output_constant_def (exp, 1);
6765 if (modifier != EXPAND_CONST_ADDRESS
6766 && modifier != EXPAND_INITIALIZER
6767 && modifier != EXPAND_SUM)
6768 constructor = validize_mem (constructor);
6770 return constructor;
6772 else
6774 /* Handle calls that pass values in multiple non-contiguous
6775 locations. The Irix 6 ABI has examples of this. */
6776 if (target == 0 || ! safe_from_p (target, exp, 1)
6777 || GET_CODE (target) == PARALLEL)
6778 target
6779 = assign_temp (build_qualified_type (type,
6780 (TYPE_QUALS (type)
6781 | (TREE_READONLY (exp)
6782 * TYPE_QUAL_CONST))),
6783 0, TREE_ADDRESSABLE (exp), 1);
6785 store_constructor (exp, target, 0,
6786 int_size_in_bytes (TREE_TYPE (exp)));
6787 return target;
6790 case INDIRECT_REF:
6792 tree exp1 = TREE_OPERAND (exp, 0);
6793 tree index;
6794 tree string = string_constant (exp1, &index);
6796 /* Try to optimize reads from const strings. */
6797 if (string
6798 && TREE_CODE (string) == STRING_CST
6799 && TREE_CODE (index) == INTEGER_CST
6800 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6801 && GET_MODE_CLASS (mode) == MODE_INT
6802 && GET_MODE_SIZE (mode) == 1
6803 && modifier != EXPAND_WRITE)
6804 return gen_int_mode (TREE_STRING_POINTER (string)
6805 [TREE_INT_CST_LOW (index)], mode);
6807 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6808 op0 = memory_address (mode, op0);
6809 temp = gen_rtx_MEM (mode, op0);
6810 set_mem_attributes (temp, exp, 0);
6812 /* If we are writing to this object and its type is a record with
6813 readonly fields, we must mark it as readonly so it will
6814 conflict with readonly references to those fields. */
6815 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
6816 RTX_UNCHANGING_P (temp) = 1;
6818 return temp;
6821 case ARRAY_REF:
6822 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6823 abort ();
6826 tree array = TREE_OPERAND (exp, 0);
6827 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6828 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6829 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6830 HOST_WIDE_INT i;
6832 /* Optimize the special-case of a zero lower bound.
6834 We convert the low_bound to sizetype to avoid some problems
6835 with constant folding. (E.g. suppose the lower bound is 1,
6836 and its mode is QI. Without the conversion, (ARRAY
6837 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6838 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6840 if (! integer_zerop (low_bound))
6841 index = size_diffop (index, convert (sizetype, low_bound));
6843 /* Fold an expression like: "foo"[2].
6844 This is not done in fold so it won't happen inside &.
6845 Don't fold if this is for wide characters since it's too
6846 difficult to do correctly and this is a very rare case. */
6848 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6849 && TREE_CODE (array) == STRING_CST
6850 && TREE_CODE (index) == INTEGER_CST
6851 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6852 && GET_MODE_CLASS (mode) == MODE_INT
6853 && GET_MODE_SIZE (mode) == 1)
6854 return gen_int_mode (TREE_STRING_POINTER (array)
6855 [TREE_INT_CST_LOW (index)], mode);
6857 /* If this is a constant index into a constant array,
6858 just get the value from the array. Handle both the cases when
6859 we have an explicit constructor and when our operand is a variable
6860 that was declared const. */
6862 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6863 && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6864 && TREE_CODE (index) == INTEGER_CST
6865 && 0 > compare_tree_int (index,
6866 list_length (CONSTRUCTOR_ELTS
6867 (TREE_OPERAND (exp, 0)))))
6869 tree elem;
6871 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6872 i = TREE_INT_CST_LOW (index);
6873 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6876 if (elem)
6877 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6878 modifier);
6881 else if (optimize >= 1
6882 && modifier != EXPAND_CONST_ADDRESS
6883 && modifier != EXPAND_INITIALIZER
6884 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6885 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6886 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6888 if (TREE_CODE (index) == INTEGER_CST)
6890 tree init = DECL_INITIAL (array);
6892 if (TREE_CODE (init) == CONSTRUCTOR)
6894 tree elem;
6896 for (elem = CONSTRUCTOR_ELTS (init);
6897 (elem
6898 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6899 elem = TREE_CHAIN (elem))
6902 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6903 return expand_expr (fold (TREE_VALUE (elem)), target,
6904 tmode, modifier);
6906 else if (TREE_CODE (init) == STRING_CST
6907 && 0 > compare_tree_int (index,
6908 TREE_STRING_LENGTH (init)))
6910 tree type = TREE_TYPE (TREE_TYPE (init));
6911 enum machine_mode mode = TYPE_MODE (type);
6913 if (GET_MODE_CLASS (mode) == MODE_INT
6914 && GET_MODE_SIZE (mode) == 1)
6915 return gen_int_mode (TREE_STRING_POINTER (init)
6916 [TREE_INT_CST_LOW (index)], mode);
6921 /* Fall through. */
6923 case COMPONENT_REF:
6924 case BIT_FIELD_REF:
6925 case ARRAY_RANGE_REF:
6926 /* If the operand is a CONSTRUCTOR, we can just extract the
6927 appropriate field if it is present. Don't do this if we have
6928 already written the data since we want to refer to that copy
6929 and varasm.c assumes that's what we'll do. */
6930 if (code == COMPONENT_REF
6931 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6932 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6934 tree elt;
6936 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6937 elt = TREE_CHAIN (elt))
6938 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6939 /* We can normally use the value of the field in the
6940 CONSTRUCTOR. However, if this is a bitfield in
6941 an integral mode that we can fit in a HOST_WIDE_INT,
6942 we must mask only the number of bits in the bitfield,
6943 since this is done implicitly by the constructor. If
6944 the bitfield does not meet either of those conditions,
6945 we can't do this optimization. */
6946 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6947 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6948 == MODE_INT)
6949 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6950 <= HOST_BITS_PER_WIDE_INT))))
6952 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6953 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6955 HOST_WIDE_INT bitsize
6956 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6957 enum machine_mode imode
6958 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6960 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6962 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6963 op0 = expand_and (imode, op0, op1, target);
6965 else
6967 tree count
6968 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6971 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6972 target, 0);
6973 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6974 target, 0);
6978 return op0;
6983 enum machine_mode mode1;
6984 HOST_WIDE_INT bitsize, bitpos;
6985 tree offset;
6986 int volatilep = 0;
6987 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6988 &mode1, &unsignedp, &volatilep);
6989 rtx orig_op0;
6991 /* If we got back the original object, something is wrong. Perhaps
6992 we are evaluating an expression too early. In any event, don't
6993 infinitely recurse. */
6994 if (tem == exp)
6995 abort ();
6997 /* If TEM's type is a union of variable size, pass TARGET to the inner
6998 computation, since it will need a temporary and TARGET is known
6999 to have to do. This occurs in unchecked conversion in Ada. */
7001 orig_op0 = op0
7002 = expand_expr (tem,
7003 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7004 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7005 != INTEGER_CST)
7006 ? target : NULL_RTX),
7007 VOIDmode,
7008 (modifier == EXPAND_INITIALIZER
7009 || modifier == EXPAND_CONST_ADDRESS)
7010 ? modifier : EXPAND_NORMAL);
7012 /* If this is a constant, put it into a register if it is a
7013 legitimate constant and OFFSET is 0 and memory if it isn't. */
7014 if (CONSTANT_P (op0))
7016 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7017 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7018 && offset == 0)
7019 op0 = force_reg (mode, op0);
7020 else
7021 op0 = validize_mem (force_const_mem (mode, op0));
7024 if (offset != 0)
7026 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
7028 /* If this object is in a register, put it into memory.
7029 This case can't occur in C, but can in Ada if we have
7030 unchecked conversion of an expression from a scalar type to
7031 an array or record type. */
7032 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7033 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7035 /* If the operand is a SAVE_EXPR, we can deal with this by
7036 forcing the SAVE_EXPR into memory. */
7037 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7039 put_var_into_stack (TREE_OPERAND (exp, 0));
7040 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7042 else
7044 tree nt
7045 = build_qualified_type (TREE_TYPE (tem),
7046 (TYPE_QUALS (TREE_TYPE (tem))
7047 | TYPE_QUAL_CONST));
7048 rtx memloc = assign_temp (nt, 1, 1, 1);
7050 emit_move_insn (memloc, op0);
7051 op0 = memloc;
7055 if (GET_CODE (op0) != MEM)
7056 abort ();
7058 #ifdef POINTERS_EXTEND_UNSIGNED
7059 if (GET_MODE (offset_rtx) != Pmode)
7060 offset_rtx = convert_memory_address (Pmode, offset_rtx);
7061 #else
7062 if (GET_MODE (offset_rtx) != ptr_mode)
7063 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7064 #endif
7066 /* A constant address in OP0 can have VOIDmode, we must not try
7067 to call force_reg for that case. Avoid that case. */
7068 if (GET_CODE (op0) == MEM
7069 && GET_MODE (op0) == BLKmode
7070 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7071 && bitsize != 0
7072 && (bitpos % bitsize) == 0
7073 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7074 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7076 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7077 bitpos = 0;
7080 op0 = offset_address (op0, offset_rtx,
7081 highest_pow2_factor (offset));
7084 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7085 record its alignment as BIGGEST_ALIGNMENT. */
7086 if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
7087 && is_aligning_offset (offset, tem))
7088 set_mem_align (op0, BIGGEST_ALIGNMENT);
7090 /* Don't forget about volatility even if this is a bitfield. */
7091 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7093 if (op0 == orig_op0)
7094 op0 = copy_rtx (op0);
7096 MEM_VOLATILE_P (op0) = 1;
7099 /* The following code doesn't handle CONCAT.
7100 Assume only bitpos == 0 can be used for CONCAT, due to
7101 one element arrays having the same mode as its element. */
7102 if (GET_CODE (op0) == CONCAT)
7104 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7105 abort ();
7106 return op0;
7109 /* In cases where an aligned union has an unaligned object
7110 as a field, we might be extracting a BLKmode value from
7111 an integer-mode (e.g., SImode) object. Handle this case
7112 by doing the extract into an object as wide as the field
7113 (which we know to be the width of a basic mode), then
7114 storing into memory, and changing the mode to BLKmode. */
7115 if (mode1 == VOIDmode
7116 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7117 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7118 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7119 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7120 && modifier != EXPAND_CONST_ADDRESS
7121 && modifier != EXPAND_INITIALIZER)
7122 /* If the field isn't aligned enough to fetch as a memref,
7123 fetch it as a bit field. */
7124 || (mode1 != BLKmode
7125 && SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))
7126 && ((TYPE_ALIGN (TREE_TYPE (tem))
7127 < GET_MODE_ALIGNMENT (mode))
7128 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
7129 /* If the type and the field are a constant size and the
7130 size of the type isn't the same size as the bitfield,
7131 we must use bitfield operations. */
7132 || (bitsize >= 0
7133 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7134 == INTEGER_CST)
7135 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7136 bitsize)))
7138 enum machine_mode ext_mode = mode;
7140 if (ext_mode == BLKmode
7141 && ! (target != 0 && GET_CODE (op0) == MEM
7142 && GET_CODE (target) == MEM
7143 && bitpos % BITS_PER_UNIT == 0))
7144 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7146 if (ext_mode == BLKmode)
7148 /* In this case, BITPOS must start at a byte boundary and
7149 TARGET, if specified, must be a MEM. */
7150 if (GET_CODE (op0) != MEM
7151 || (target != 0 && GET_CODE (target) != MEM)
7152 || bitpos % BITS_PER_UNIT != 0)
7153 abort ();
7155 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
7156 if (target == 0)
7157 target = assign_temp (type, 0, 1, 1);
7159 emit_block_move (target, op0,
7160 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7161 / BITS_PER_UNIT));
7163 return target;
7166 op0 = validize_mem (op0);
7168 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7169 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7171 op0 = extract_bit_field (op0, bitsize, bitpos,
7172 unsignedp, target, ext_mode, ext_mode,
7173 int_size_in_bytes (TREE_TYPE (tem)));
7175 /* If the result is a record type and BITSIZE is narrower than
7176 the mode of OP0, an integral mode, and this is a big endian
7177 machine, we must put the field into the high-order bits. */
7178 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7179 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7180 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7181 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7182 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7183 - bitsize),
7184 op0, 1);
7186 if (mode == BLKmode)
7188 rtx new = assign_temp (build_qualified_type
7189 ((*lang_hooks.types.type_for_mode)
7190 (ext_mode, 0),
7191 TYPE_QUAL_CONST), 0, 1, 1);
7193 emit_move_insn (new, op0);
7194 op0 = copy_rtx (new);
7195 PUT_MODE (op0, BLKmode);
7196 set_mem_attributes (op0, exp, 1);
7199 return op0;
7202 /* If the result is BLKmode, use that to access the object
7203 now as well. */
7204 if (mode == BLKmode)
7205 mode1 = BLKmode;
7207 /* Get a reference to just this component. */
7208 if (modifier == EXPAND_CONST_ADDRESS
7209 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7210 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7211 else
7212 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7214 if (op0 == orig_op0)
7215 op0 = copy_rtx (op0);
7217 set_mem_attributes (op0, exp, 0);
7218 if (GET_CODE (XEXP (op0, 0)) == REG)
7219 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7221 MEM_VOLATILE_P (op0) |= volatilep;
7222 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7223 || modifier == EXPAND_CONST_ADDRESS
7224 || modifier == EXPAND_INITIALIZER)
7225 return op0;
7226 else if (target == 0)
7227 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7229 convert_move (target, op0, unsignedp);
7230 return target;
7233 case VTABLE_REF:
7235 rtx insn, before = get_last_insn (), vtbl_ref;
7237 /* Evaluate the interior expression. */
7238 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7239 tmode, modifier);
7241 /* Get or create an instruction off which to hang a note. */
7242 if (REG_P (subtarget))
7244 target = subtarget;
7245 insn = get_last_insn ();
7246 if (insn == before)
7247 abort ();
7248 if (! INSN_P (insn))
7249 insn = prev_nonnote_insn (insn);
7251 else
7253 target = gen_reg_rtx (GET_MODE (subtarget));
7254 insn = emit_move_insn (target, subtarget);
7257 /* Collect the data for the note. */
7258 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7259 vtbl_ref = plus_constant (vtbl_ref,
7260 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7261 /* Discard the initial CONST that was added. */
7262 vtbl_ref = XEXP (vtbl_ref, 0);
7264 REG_NOTES (insn)
7265 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7267 return target;
7270 /* Intended for a reference to a buffer of a file-object in Pascal.
7271 But it's not certain that a special tree code will really be
7272 necessary for these. INDIRECT_REF might work for them. */
7273 case BUFFER_REF:
7274 abort ();
7276 case IN_EXPR:
7278 /* Pascal set IN expression.
7280 Algorithm:
7281 rlo = set_low - (set_low%bits_per_word);
7282 the_word = set [ (index - rlo)/bits_per_word ];
7283 bit_index = index % bits_per_word;
7284 bitmask = 1 << bit_index;
7285 return !!(the_word & bitmask); */
7287 tree set = TREE_OPERAND (exp, 0);
7288 tree index = TREE_OPERAND (exp, 1);
7289 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7290 tree set_type = TREE_TYPE (set);
7291 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7292 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7293 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7294 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7295 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7296 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7297 rtx setaddr = XEXP (setval, 0);
7298 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7299 rtx rlow;
7300 rtx diff, quo, rem, addr, bit, result;
7302 /* If domain is empty, answer is no. Likewise if index is constant
7303 and out of bounds. */
7304 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7305 && TREE_CODE (set_low_bound) == INTEGER_CST
7306 && tree_int_cst_lt (set_high_bound, set_low_bound))
7307 || (TREE_CODE (index) == INTEGER_CST
7308 && TREE_CODE (set_low_bound) == INTEGER_CST
7309 && tree_int_cst_lt (index, set_low_bound))
7310 || (TREE_CODE (set_high_bound) == INTEGER_CST
7311 && TREE_CODE (index) == INTEGER_CST
7312 && tree_int_cst_lt (set_high_bound, index))))
7313 return const0_rtx;
7315 if (target == 0)
7316 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7318 /* If we get here, we have to generate the code for both cases
7319 (in range and out of range). */
7321 op0 = gen_label_rtx ();
7322 op1 = gen_label_rtx ();
7324 if (! (GET_CODE (index_val) == CONST_INT
7325 && GET_CODE (lo_r) == CONST_INT))
7326 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7327 GET_MODE (index_val), iunsignedp, op1);
7329 if (! (GET_CODE (index_val) == CONST_INT
7330 && GET_CODE (hi_r) == CONST_INT))
7331 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7332 GET_MODE (index_val), iunsignedp, op1);
7334 /* Calculate the element number of bit zero in the first word
7335 of the set. */
7336 if (GET_CODE (lo_r) == CONST_INT)
7337 rlow = GEN_INT (INTVAL (lo_r)
7338 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7339 else
7340 rlow = expand_binop (index_mode, and_optab, lo_r,
7341 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7342 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7344 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7345 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7347 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7348 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7349 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7350 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7352 addr = memory_address (byte_mode,
7353 expand_binop (index_mode, add_optab, diff,
7354 setaddr, NULL_RTX, iunsignedp,
7355 OPTAB_LIB_WIDEN));
7357 /* Extract the bit we want to examine. */
7358 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7359 gen_rtx_MEM (byte_mode, addr),
7360 make_tree (TREE_TYPE (index), rem),
7361 NULL_RTX, 1);
7362 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7363 GET_MODE (target) == byte_mode ? target : 0,
7364 1, OPTAB_LIB_WIDEN);
7366 if (result != target)
7367 convert_move (target, result, 1);
7369 /* Output the code to handle the out-of-range case. */
7370 emit_jump (op0);
7371 emit_label (op1);
7372 emit_move_insn (target, const0_rtx);
7373 emit_label (op0);
7374 return target;
7377 case WITH_CLEANUP_EXPR:
7378 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7380 WITH_CLEANUP_EXPR_RTL (exp)
7381 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7382 expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1),
7383 CLEANUP_EH_ONLY (exp));
7385 /* That's it for this cleanup. */
7386 TREE_OPERAND (exp, 1) = 0;
7388 return WITH_CLEANUP_EXPR_RTL (exp);
7390 case CLEANUP_POINT_EXPR:
7392 /* Start a new binding layer that will keep track of all cleanup
7393 actions to be performed. */
7394 expand_start_bindings (2);
7396 target_temp_slot_level = temp_slot_level;
7398 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7399 /* If we're going to use this value, load it up now. */
7400 if (! ignore)
7401 op0 = force_not_mem (op0);
7402 preserve_temp_slots (op0);
7403 expand_end_bindings (NULL_TREE, 0, 0);
7405 return op0;
7407 case CALL_EXPR:
7408 /* Check for a built-in function. */
7409 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7410 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7411 == FUNCTION_DECL)
7412 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7414 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7415 == BUILT_IN_FRONTEND)
7416 return (*lang_hooks.expand_expr)
7417 (exp, original_target, tmode, modifier);
7418 else
7419 return expand_builtin (exp, target, subtarget, tmode, ignore);
7422 return expand_call (exp, target, ignore);
7424 case NON_LVALUE_EXPR:
7425 case NOP_EXPR:
7426 case CONVERT_EXPR:
7427 case REFERENCE_EXPR:
7428 if (TREE_OPERAND (exp, 0) == error_mark_node)
7429 return const0_rtx;
7431 if (TREE_CODE (type) == UNION_TYPE)
7433 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7435 /* If both input and output are BLKmode, this conversion isn't doing
7436 anything except possibly changing memory attribute. */
7437 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7439 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7440 modifier);
7442 result = copy_rtx (result);
7443 set_mem_attributes (result, exp, 0);
7444 return result;
7447 if (target == 0)
7448 target = assign_temp (type, 0, 1, 1);
7450 if (GET_CODE (target) == MEM)
7451 /* Store data into beginning of memory target. */
7452 store_expr (TREE_OPERAND (exp, 0),
7453 adjust_address (target, TYPE_MODE (valtype), 0), 0);
7455 else if (GET_CODE (target) == REG)
7456 /* Store this field into a union of the proper type. */
7457 store_field (target,
7458 MIN ((int_size_in_bytes (TREE_TYPE
7459 (TREE_OPERAND (exp, 0)))
7460 * BITS_PER_UNIT),
7461 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7462 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7463 VOIDmode, 0, type, 0);
7464 else
7465 abort ();
7467 /* Return the entire union. */
7468 return target;
7471 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7473 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7474 modifier);
7476 /* If the signedness of the conversion differs and OP0 is
7477 a promoted SUBREG, clear that indication since we now
7478 have to do the proper extension. */
7479 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7480 && GET_CODE (op0) == SUBREG)
7481 SUBREG_PROMOTED_VAR_P (op0) = 0;
7483 return op0;
7486 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7487 if (GET_MODE (op0) == mode)
7488 return op0;
7490 /* If OP0 is a constant, just convert it into the proper mode. */
7491 if (CONSTANT_P (op0))
7493 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7494 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7496 if (modifier == EXPAND_INITIALIZER)
7497 return simplify_gen_subreg (mode, op0, inner_mode,
7498 subreg_lowpart_offset (mode,
7499 inner_mode));
7500 else
7501 return convert_modes (mode, inner_mode, op0,
7502 TREE_UNSIGNED (inner_type));
7505 if (modifier == EXPAND_INITIALIZER)
7506 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7508 if (target == 0)
7509 return
7510 convert_to_mode (mode, op0,
7511 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7512 else
7513 convert_move (target, op0,
7514 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7515 return target;
7517 case VIEW_CONVERT_EXPR:
7518 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7520 /* If the input and output modes are both the same, we are done.
7521 Otherwise, if neither mode is BLKmode and both are within a word, we
7522 can use gen_lowpart. If neither is true, make sure the operand is
7523 in memory and convert the MEM to the new mode. */
7524 if (TYPE_MODE (type) == GET_MODE (op0))
7526 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7527 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7528 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7529 op0 = gen_lowpart (TYPE_MODE (type), op0);
7530 else if (GET_CODE (op0) != MEM)
7532 /* If the operand is not a MEM, force it into memory. Since we
7533 are going to be be changing the mode of the MEM, don't call
7534 force_const_mem for constants because we don't allow pool
7535 constants to change mode. */
7536 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7538 if (TREE_ADDRESSABLE (exp))
7539 abort ();
7541 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7542 target
7543 = assign_stack_temp_for_type
7544 (TYPE_MODE (inner_type),
7545 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7547 emit_move_insn (target, op0);
7548 op0 = target;
7551 /* At this point, OP0 is in the correct mode. If the output type is such
7552 that the operand is known to be aligned, indicate that it is.
7553 Otherwise, we need only be concerned about alignment for non-BLKmode
7554 results. */
7555 if (GET_CODE (op0) == MEM)
7557 op0 = copy_rtx (op0);
7559 if (TYPE_ALIGN_OK (type))
7560 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7561 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7562 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7564 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7565 HOST_WIDE_INT temp_size
7566 = MAX (int_size_in_bytes (inner_type),
7567 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7568 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7569 temp_size, 0, type);
7570 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7572 if (TREE_ADDRESSABLE (exp))
7573 abort ();
7575 if (GET_MODE (op0) == BLKmode)
7576 emit_block_move (new_with_op0_mode, op0,
7577 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))));
7578 else
7579 emit_move_insn (new_with_op0_mode, op0);
7581 op0 = new;
7584 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7587 return op0;
7589 case PLUS_EXPR:
7590 /* We come here from MINUS_EXPR when the second operand is a
7591 constant. */
7592 plus_expr:
7593 this_optab = ! unsignedp && flag_trapv
7594 && (GET_MODE_CLASS (mode) == MODE_INT)
7595 ? addv_optab : add_optab;
7597 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7598 something else, make sure we add the register to the constant and
7599 then to the other thing. This case can occur during strength
7600 reduction and doing it this way will produce better code if the
7601 frame pointer or argument pointer is eliminated.
7603 fold-const.c will ensure that the constant is always in the inner
7604 PLUS_EXPR, so the only case we need to do anything about is if
7605 sp, ap, or fp is our second argument, in which case we must swap
7606 the innermost first argument and our second argument. */
7608 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7609 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7610 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7611 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7612 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7613 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7615 tree t = TREE_OPERAND (exp, 1);
7617 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7618 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7621 /* If the result is to be ptr_mode and we are adding an integer to
7622 something, we might be forming a constant. So try to use
7623 plus_constant. If it produces a sum and we can't accept it,
7624 use force_operand. This allows P = &ARR[const] to generate
7625 efficient code on machines where a SYMBOL_REF is not a valid
7626 address.
7628 If this is an EXPAND_SUM call, always return the sum. */
7629 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7630 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7632 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7633 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7634 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7636 rtx constant_part;
7638 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7639 EXPAND_SUM);
7640 /* Use immed_double_const to ensure that the constant is
7641 truncated according to the mode of OP1, then sign extended
7642 to a HOST_WIDE_INT. Using the constant directly can result
7643 in non-canonical RTL in a 64x32 cross compile. */
7644 constant_part
7645 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7646 (HOST_WIDE_INT) 0,
7647 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7648 op1 = plus_constant (op1, INTVAL (constant_part));
7649 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7650 op1 = force_operand (op1, target);
7651 return op1;
7654 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7655 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7656 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7658 rtx constant_part;
7660 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7661 (modifier == EXPAND_INITIALIZER
7662 ? EXPAND_INITIALIZER : EXPAND_SUM));
7663 if (! CONSTANT_P (op0))
7665 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7666 VOIDmode, modifier);
7667 /* Don't go to both_summands if modifier
7668 says it's not right to return a PLUS. */
7669 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7670 goto binop2;
7671 goto both_summands;
7673 /* Use immed_double_const to ensure that the constant is
7674 truncated according to the mode of OP1, then sign extended
7675 to a HOST_WIDE_INT. Using the constant directly can result
7676 in non-canonical RTL in a 64x32 cross compile. */
7677 constant_part
7678 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7679 (HOST_WIDE_INT) 0,
7680 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7681 op0 = plus_constant (op0, INTVAL (constant_part));
7682 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7683 op0 = force_operand (op0, target);
7684 return op0;
7688 /* No sense saving up arithmetic to be done
7689 if it's all in the wrong mode to form part of an address.
7690 And force_operand won't know whether to sign-extend or
7691 zero-extend. */
7692 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7693 || mode != ptr_mode)
7694 goto binop;
7696 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7697 subtarget = 0;
7699 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
7700 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
7702 both_summands:
7703 /* Make sure any term that's a sum with a constant comes last. */
7704 if (GET_CODE (op0) == PLUS
7705 && CONSTANT_P (XEXP (op0, 1)))
7707 temp = op0;
7708 op0 = op1;
7709 op1 = temp;
7711 /* If adding to a sum including a constant,
7712 associate it to put the constant outside. */
7713 if (GET_CODE (op1) == PLUS
7714 && CONSTANT_P (XEXP (op1, 1)))
7716 rtx constant_term = const0_rtx;
7718 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7719 if (temp != 0)
7720 op0 = temp;
7721 /* Ensure that MULT comes first if there is one. */
7722 else if (GET_CODE (op0) == MULT)
7723 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7724 else
7725 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7727 /* Let's also eliminate constants from op0 if possible. */
7728 op0 = eliminate_constant_term (op0, &constant_term);
7730 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7731 their sum should be a constant. Form it into OP1, since the
7732 result we want will then be OP0 + OP1. */
7734 temp = simplify_binary_operation (PLUS, mode, constant_term,
7735 XEXP (op1, 1));
7736 if (temp != 0)
7737 op1 = temp;
7738 else
7739 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7742 /* Put a constant term last and put a multiplication first. */
7743 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7744 temp = op1, op1 = op0, op0 = temp;
7746 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7747 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7749 case MINUS_EXPR:
7750 /* For initializers, we are allowed to return a MINUS of two
7751 symbolic constants. Here we handle all cases when both operands
7752 are constant. */
7753 /* Handle difference of two symbolic constants,
7754 for the sake of an initializer. */
7755 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7756 && really_constant_p (TREE_OPERAND (exp, 0))
7757 && really_constant_p (TREE_OPERAND (exp, 1)))
7759 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
7760 modifier);
7761 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
7762 modifier);
7764 /* If the last operand is a CONST_INT, use plus_constant of
7765 the negated constant. Else make the MINUS. */
7766 if (GET_CODE (op1) == CONST_INT)
7767 return plus_constant (op0, - INTVAL (op1));
7768 else
7769 return gen_rtx_MINUS (mode, op0, op1);
7771 /* Convert A - const to A + (-const). */
7772 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7774 tree negated = fold (build1 (NEGATE_EXPR, type,
7775 TREE_OPERAND (exp, 1)));
7777 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7778 /* If we can't negate the constant in TYPE, leave it alone and
7779 expand_binop will negate it for us. We used to try to do it
7780 here in the signed version of TYPE, but that doesn't work
7781 on POINTER_TYPEs. */;
7782 else
7784 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7785 goto plus_expr;
7788 this_optab = ! unsignedp && flag_trapv
7789 && (GET_MODE_CLASS(mode) == MODE_INT)
7790 ? subv_optab : sub_optab;
7791 goto binop;
7793 case MULT_EXPR:
7794 /* If first operand is constant, swap them.
7795 Thus the following special case checks need only
7796 check the second operand. */
7797 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7799 tree t1 = TREE_OPERAND (exp, 0);
7800 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7801 TREE_OPERAND (exp, 1) = t1;
7804 /* Attempt to return something suitable for generating an
7805 indexed address, for machines that support that. */
7807 if (modifier == EXPAND_SUM && mode == ptr_mode
7808 && host_integerp (TREE_OPERAND (exp, 1), 0))
7810 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7811 EXPAND_SUM);
7813 /* If we knew for certain that this is arithmetic for an array
7814 reference, and we knew the bounds of the array, then we could
7815 apply the distributive law across (PLUS X C) for constant C.
7816 Without such knowledge, we risk overflowing the computation
7817 when both X and C are large, but X+C isn't. */
7818 /* ??? Could perhaps special-case EXP being unsigned and C being
7819 positive. In that case we are certain that X+C is no smaller
7820 than X and so the transformed expression will overflow iff the
7821 original would have. */
7823 if (GET_CODE (op0) != REG)
7824 op0 = force_operand (op0, NULL_RTX);
7825 if (GET_CODE (op0) != REG)
7826 op0 = copy_to_mode_reg (mode, op0);
7828 return
7829 gen_rtx_MULT (mode, op0,
7830 GEN_INT (tree_low_cst (TREE_OPERAND (exp, 1), 0)));
7833 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7834 subtarget = 0;
7836 /* Check for multiplying things that have been extended
7837 from a narrower type. If this machine supports multiplying
7838 in that narrower type with a result in the desired type,
7839 do it that way, and avoid the explicit type-conversion. */
7840 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7841 && TREE_CODE (type) == INTEGER_TYPE
7842 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7843 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7844 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7845 && int_fits_type_p (TREE_OPERAND (exp, 1),
7846 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7847 /* Don't use a widening multiply if a shift will do. */
7848 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7849 > HOST_BITS_PER_WIDE_INT)
7850 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7852 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7853 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7855 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7856 /* If both operands are extended, they must either both
7857 be zero-extended or both be sign-extended. */
7858 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7860 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7862 enum machine_mode innermode
7863 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7864 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7865 ? smul_widen_optab : umul_widen_optab);
7866 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7867 ? umul_widen_optab : smul_widen_optab);
7868 if (mode == GET_MODE_WIDER_MODE (innermode))
7870 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7872 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7873 NULL_RTX, VOIDmode, 0);
7874 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7875 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7876 VOIDmode, 0);
7877 else
7878 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7879 NULL_RTX, VOIDmode, 0);
7880 goto binop2;
7882 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7883 && innermode == word_mode)
7885 rtx htem;
7886 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7887 NULL_RTX, VOIDmode, 0);
7888 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7889 op1 = convert_modes (innermode, mode,
7890 expand_expr (TREE_OPERAND (exp, 1),
7891 NULL_RTX, VOIDmode, 0),
7892 unsignedp);
7893 else
7894 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7895 NULL_RTX, VOIDmode, 0);
7896 temp = expand_binop (mode, other_optab, op0, op1, target,
7897 unsignedp, OPTAB_LIB_WIDEN);
7898 htem = expand_mult_highpart_adjust (innermode,
7899 gen_highpart (innermode, temp),
7900 op0, op1,
7901 gen_highpart (innermode, temp),
7902 unsignedp);
7903 emit_move_insn (gen_highpart (innermode, temp), htem);
7904 return temp;
7908 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7909 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7910 return expand_mult (mode, op0, op1, target, unsignedp);
7912 case TRUNC_DIV_EXPR:
7913 case FLOOR_DIV_EXPR:
7914 case CEIL_DIV_EXPR:
7915 case ROUND_DIV_EXPR:
7916 case EXACT_DIV_EXPR:
7917 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7918 subtarget = 0;
7919 /* Possible optimization: compute the dividend with EXPAND_SUM
7920 then if the divisor is constant can optimize the case
7921 where some terms of the dividend have coeffs divisible by it. */
7922 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7923 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7924 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7926 case RDIV_EXPR:
7927 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7928 expensive divide. If not, combine will rebuild the original
7929 computation. */
7930 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7931 && TREE_CODE (type) == REAL_TYPE
7932 && !real_onep (TREE_OPERAND (exp, 0)))
7933 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7934 build (RDIV_EXPR, type,
7935 build_real (type, dconst1),
7936 TREE_OPERAND (exp, 1))),
7937 target, tmode, unsignedp);
7938 this_optab = sdiv_optab;
7939 goto binop;
7941 case TRUNC_MOD_EXPR:
7942 case FLOOR_MOD_EXPR:
7943 case CEIL_MOD_EXPR:
7944 case ROUND_MOD_EXPR:
7945 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7946 subtarget = 0;
7947 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7948 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7949 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7951 case FIX_ROUND_EXPR:
7952 case FIX_FLOOR_EXPR:
7953 case FIX_CEIL_EXPR:
7954 abort (); /* Not used for C. */
7956 case FIX_TRUNC_EXPR:
7957 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7958 if (target == 0)
7959 target = gen_reg_rtx (mode);
7960 expand_fix (target, op0, unsignedp);
7961 return target;
7963 case FLOAT_EXPR:
7964 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7965 if (target == 0)
7966 target = gen_reg_rtx (mode);
7967 /* expand_float can't figure out what to do if FROM has VOIDmode.
7968 So give it the correct mode. With -O, cse will optimize this. */
7969 if (GET_MODE (op0) == VOIDmode)
7970 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7971 op0);
7972 expand_float (target, op0,
7973 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7974 return target;
7976 case NEGATE_EXPR:
7977 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7978 temp = expand_unop (mode,
7979 ! unsignedp && flag_trapv
7980 && (GET_MODE_CLASS(mode) == MODE_INT)
7981 ? negv_optab : neg_optab, op0, target, 0);
7982 if (temp == 0)
7983 abort ();
7984 return temp;
7986 case ABS_EXPR:
7987 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7989 /* Handle complex values specially. */
7990 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7991 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7992 return expand_complex_abs (mode, op0, target, unsignedp);
7994 /* Unsigned abs is simply the operand. Testing here means we don't
7995 risk generating incorrect code below. */
7996 if (TREE_UNSIGNED (type))
7997 return op0;
7999 return expand_abs (mode, op0, target, unsignedp,
8000 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8002 case MAX_EXPR:
8003 case MIN_EXPR:
8004 target = original_target;
8005 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
8006 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
8007 || GET_MODE (target) != mode
8008 || (GET_CODE (target) == REG
8009 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8010 target = gen_reg_rtx (mode);
8011 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8012 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8014 /* First try to do it with a special MIN or MAX instruction.
8015 If that does not win, use a conditional jump to select the proper
8016 value. */
8017 this_optab = (TREE_UNSIGNED (type)
8018 ? (code == MIN_EXPR ? umin_optab : umax_optab)
8019 : (code == MIN_EXPR ? smin_optab : smax_optab));
8021 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8022 OPTAB_WIDEN);
8023 if (temp != 0)
8024 return temp;
8026 /* At this point, a MEM target is no longer useful; we will get better
8027 code without it. */
8029 if (GET_CODE (target) == MEM)
8030 target = gen_reg_rtx (mode);
8032 if (target != op0)
8033 emit_move_insn (target, op0);
8035 op0 = gen_label_rtx ();
8037 /* If this mode is an integer too wide to compare properly,
8038 compare word by word. Rely on cse to optimize constant cases. */
8039 if (GET_MODE_CLASS (mode) == MODE_INT
8040 && ! can_compare_p (GE, mode, ccp_jump))
8042 if (code == MAX_EXPR)
8043 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8044 target, op1, NULL_RTX, op0);
8045 else
8046 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8047 op1, target, NULL_RTX, op0);
8049 else
8051 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
8052 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
8053 unsignedp, mode, NULL_RTX, NULL_RTX,
8054 op0);
8056 emit_move_insn (target, op1);
8057 emit_label (op0);
8058 return target;
8060 case BIT_NOT_EXPR:
8061 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8062 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8063 if (temp == 0)
8064 abort ();
8065 return temp;
8067 case FFS_EXPR:
8068 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8069 temp = expand_unop (mode, ffs_optab, op0, target, 1);
8070 if (temp == 0)
8071 abort ();
8072 return temp;
8074 /* ??? Can optimize bitwise operations with one arg constant.
8075 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8076 and (a bitwise1 b) bitwise2 b (etc)
8077 but that is probably not worth while. */
8079 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8080 boolean values when we want in all cases to compute both of them. In
8081 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8082 as actual zero-or-1 values and then bitwise anding. In cases where
8083 there cannot be any side effects, better code would be made by
8084 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8085 how to recognize those cases. */
8087 case TRUTH_AND_EXPR:
8088 case BIT_AND_EXPR:
8089 this_optab = and_optab;
8090 goto binop;
8092 case TRUTH_OR_EXPR:
8093 case BIT_IOR_EXPR:
8094 this_optab = ior_optab;
8095 goto binop;
8097 case TRUTH_XOR_EXPR:
8098 case BIT_XOR_EXPR:
8099 this_optab = xor_optab;
8100 goto binop;
8102 case LSHIFT_EXPR:
8103 case RSHIFT_EXPR:
8104 case LROTATE_EXPR:
8105 case RROTATE_EXPR:
8106 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8107 subtarget = 0;
8108 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8109 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8110 unsignedp);
8112 /* Could determine the answer when only additive constants differ. Also,
8113 the addition of one can be handled by changing the condition. */
8114 case LT_EXPR:
8115 case LE_EXPR:
8116 case GT_EXPR:
8117 case GE_EXPR:
8118 case EQ_EXPR:
8119 case NE_EXPR:
8120 case UNORDERED_EXPR:
8121 case ORDERED_EXPR:
8122 case UNLT_EXPR:
8123 case UNLE_EXPR:
8124 case UNGT_EXPR:
8125 case UNGE_EXPR:
8126 case UNEQ_EXPR:
8127 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
8128 if (temp != 0)
8129 return temp;
8131 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8132 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8133 && original_target
8134 && GET_CODE (original_target) == REG
8135 && (GET_MODE (original_target)
8136 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8138 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8139 VOIDmode, 0);
8141 /* If temp is constant, we can just compute the result. */
8142 if (GET_CODE (temp) == CONST_INT)
8144 if (INTVAL (temp) != 0)
8145 emit_move_insn (target, const1_rtx);
8146 else
8147 emit_move_insn (target, const0_rtx);
8149 return target;
8152 if (temp != original_target)
8154 enum machine_mode mode1 = GET_MODE (temp);
8155 if (mode1 == VOIDmode)
8156 mode1 = tmode != VOIDmode ? tmode : mode;
8158 temp = copy_to_mode_reg (mode1, temp);
8161 op1 = gen_label_rtx ();
8162 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8163 GET_MODE (temp), unsignedp, op1);
8164 emit_move_insn (temp, const1_rtx);
8165 emit_label (op1);
8166 return temp;
8169 /* If no set-flag instruction, must generate a conditional
8170 store into a temporary variable. Drop through
8171 and handle this like && and ||. */
8173 case TRUTH_ANDIF_EXPR:
8174 case TRUTH_ORIF_EXPR:
8175 if (! ignore
8176 && (target == 0 || ! safe_from_p (target, exp, 1)
8177 /* Make sure we don't have a hard reg (such as function's return
8178 value) live across basic blocks, if not optimizing. */
8179 || (!optimize && GET_CODE (target) == REG
8180 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8181 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8183 if (target)
8184 emit_clr_insn (target);
8186 op1 = gen_label_rtx ();
8187 jumpifnot (exp, op1);
8189 if (target)
8190 emit_0_to_1_insn (target);
8192 emit_label (op1);
8193 return ignore ? const0_rtx : target;
8195 case TRUTH_NOT_EXPR:
8196 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8197 /* The parser is careful to generate TRUTH_NOT_EXPR
8198 only with operands that are always zero or one. */
8199 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8200 target, 1, OPTAB_LIB_WIDEN);
8201 if (temp == 0)
8202 abort ();
8203 return temp;
8205 case COMPOUND_EXPR:
8206 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8207 emit_queue ();
8208 return expand_expr (TREE_OPERAND (exp, 1),
8209 (ignore ? const0_rtx : target),
8210 VOIDmode, 0);
8212 case COND_EXPR:
8213 /* If we would have a "singleton" (see below) were it not for a
8214 conversion in each arm, bring that conversion back out. */
8215 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8216 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8217 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8218 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8220 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8221 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8223 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8224 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8225 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8226 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8227 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8228 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8229 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8230 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8231 return expand_expr (build1 (NOP_EXPR, type,
8232 build (COND_EXPR, TREE_TYPE (iftrue),
8233 TREE_OPERAND (exp, 0),
8234 iftrue, iffalse)),
8235 target, tmode, modifier);
8239 /* Note that COND_EXPRs whose type is a structure or union
8240 are required to be constructed to contain assignments of
8241 a temporary variable, so that we can evaluate them here
8242 for side effect only. If type is void, we must do likewise. */
8244 /* If an arm of the branch requires a cleanup,
8245 only that cleanup is performed. */
8247 tree singleton = 0;
8248 tree binary_op = 0, unary_op = 0;
8250 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8251 convert it to our mode, if necessary. */
8252 if (integer_onep (TREE_OPERAND (exp, 1))
8253 && integer_zerop (TREE_OPERAND (exp, 2))
8254 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8256 if (ignore)
8258 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8259 modifier);
8260 return const0_rtx;
8263 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8264 if (GET_MODE (op0) == mode)
8265 return op0;
8267 if (target == 0)
8268 target = gen_reg_rtx (mode);
8269 convert_move (target, op0, unsignedp);
8270 return target;
8273 /* Check for X ? A + B : A. If we have this, we can copy A to the
8274 output and conditionally add B. Similarly for unary operations.
8275 Don't do this if X has side-effects because those side effects
8276 might affect A or B and the "?" operation is a sequence point in
8277 ANSI. (operand_equal_p tests for side effects.) */
8279 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8280 && operand_equal_p (TREE_OPERAND (exp, 2),
8281 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8282 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8283 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8284 && operand_equal_p (TREE_OPERAND (exp, 1),
8285 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8286 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8287 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8288 && operand_equal_p (TREE_OPERAND (exp, 2),
8289 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8290 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8291 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8292 && operand_equal_p (TREE_OPERAND (exp, 1),
8293 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8294 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8296 /* If we are not to produce a result, we have no target. Otherwise,
8297 if a target was specified use it; it will not be used as an
8298 intermediate target unless it is safe. If no target, use a
8299 temporary. */
8301 if (ignore)
8302 temp = 0;
8303 else if (original_target
8304 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8305 || (singleton && GET_CODE (original_target) == REG
8306 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8307 && original_target == var_rtx (singleton)))
8308 && GET_MODE (original_target) == mode
8309 #ifdef HAVE_conditional_move
8310 && (! can_conditionally_move_p (mode)
8311 || GET_CODE (original_target) == REG
8312 || TREE_ADDRESSABLE (type))
8313 #endif
8314 && (GET_CODE (original_target) != MEM
8315 || TREE_ADDRESSABLE (type)))
8316 temp = original_target;
8317 else if (TREE_ADDRESSABLE (type))
8318 abort ();
8319 else
8320 temp = assign_temp (type, 0, 0, 1);
8322 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8323 do the test of X as a store-flag operation, do this as
8324 A + ((X != 0) << log C). Similarly for other simple binary
8325 operators. Only do for C == 1 if BRANCH_COST is low. */
8326 if (temp && singleton && binary_op
8327 && (TREE_CODE (binary_op) == PLUS_EXPR
8328 || TREE_CODE (binary_op) == MINUS_EXPR
8329 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8330 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8331 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8332 : integer_onep (TREE_OPERAND (binary_op, 1)))
8333 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8335 rtx result;
8336 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8337 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8338 ? addv_optab : add_optab)
8339 : TREE_CODE (binary_op) == MINUS_EXPR
8340 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8341 ? subv_optab : sub_optab)
8342 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8343 : xor_optab);
8345 /* If we had X ? A : A + 1, do this as A + (X == 0).
8347 We have to invert the truth value here and then put it
8348 back later if do_store_flag fails. We cannot simply copy
8349 TREE_OPERAND (exp, 0) to another variable and modify that
8350 because invert_truthvalue can modify the tree pointed to
8351 by its argument. */
8352 if (singleton == TREE_OPERAND (exp, 1))
8353 TREE_OPERAND (exp, 0)
8354 = invert_truthvalue (TREE_OPERAND (exp, 0));
8356 result = do_store_flag (TREE_OPERAND (exp, 0),
8357 (safe_from_p (temp, singleton, 1)
8358 ? temp : NULL_RTX),
8359 mode, BRANCH_COST <= 1);
8361 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8362 result = expand_shift (LSHIFT_EXPR, mode, result,
8363 build_int_2 (tree_log2
8364 (TREE_OPERAND
8365 (binary_op, 1)),
8367 (safe_from_p (temp, singleton, 1)
8368 ? temp : NULL_RTX), 0);
8370 if (result)
8372 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8373 return expand_binop (mode, boptab, op1, result, temp,
8374 unsignedp, OPTAB_LIB_WIDEN);
8376 else if (singleton == TREE_OPERAND (exp, 1))
8377 TREE_OPERAND (exp, 0)
8378 = invert_truthvalue (TREE_OPERAND (exp, 0));
8381 do_pending_stack_adjust ();
8382 NO_DEFER_POP;
8383 op0 = gen_label_rtx ();
8385 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8387 if (temp != 0)
8389 /* If the target conflicts with the other operand of the
8390 binary op, we can't use it. Also, we can't use the target
8391 if it is a hard register, because evaluating the condition
8392 might clobber it. */
8393 if ((binary_op
8394 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8395 || (GET_CODE (temp) == REG
8396 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8397 temp = gen_reg_rtx (mode);
8398 store_expr (singleton, temp, 0);
8400 else
8401 expand_expr (singleton,
8402 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8403 if (singleton == TREE_OPERAND (exp, 1))
8404 jumpif (TREE_OPERAND (exp, 0), op0);
8405 else
8406 jumpifnot (TREE_OPERAND (exp, 0), op0);
8408 start_cleanup_deferral ();
8409 if (binary_op && temp == 0)
8410 /* Just touch the other operand. */
8411 expand_expr (TREE_OPERAND (binary_op, 1),
8412 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8413 else if (binary_op)
8414 store_expr (build (TREE_CODE (binary_op), type,
8415 make_tree (type, temp),
8416 TREE_OPERAND (binary_op, 1)),
8417 temp, 0);
8418 else
8419 store_expr (build1 (TREE_CODE (unary_op), type,
8420 make_tree (type, temp)),
8421 temp, 0);
8422 op1 = op0;
8424 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8425 comparison operator. If we have one of these cases, set the
8426 output to A, branch on A (cse will merge these two references),
8427 then set the output to FOO. */
8428 else if (temp
8429 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8430 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8431 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8432 TREE_OPERAND (exp, 1), 0)
8433 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8434 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8435 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8437 if (GET_CODE (temp) == REG
8438 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8439 temp = gen_reg_rtx (mode);
8440 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8441 jumpif (TREE_OPERAND (exp, 0), op0);
8443 start_cleanup_deferral ();
8444 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8445 op1 = op0;
8447 else if (temp
8448 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8449 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8450 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8451 TREE_OPERAND (exp, 2), 0)
8452 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8453 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8454 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8456 if (GET_CODE (temp) == REG
8457 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8458 temp = gen_reg_rtx (mode);
8459 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8460 jumpifnot (TREE_OPERAND (exp, 0), op0);
8462 start_cleanup_deferral ();
8463 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8464 op1 = op0;
8466 else
8468 op1 = gen_label_rtx ();
8469 jumpifnot (TREE_OPERAND (exp, 0), op0);
8471 start_cleanup_deferral ();
8473 /* One branch of the cond can be void, if it never returns. For
8474 example A ? throw : E */
8475 if (temp != 0
8476 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8477 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8478 else
8479 expand_expr (TREE_OPERAND (exp, 1),
8480 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8481 end_cleanup_deferral ();
8482 emit_queue ();
8483 emit_jump_insn (gen_jump (op1));
8484 emit_barrier ();
8485 emit_label (op0);
8486 start_cleanup_deferral ();
8487 if (temp != 0
8488 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8489 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8490 else
8491 expand_expr (TREE_OPERAND (exp, 2),
8492 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8495 end_cleanup_deferral ();
8497 emit_queue ();
8498 emit_label (op1);
8499 OK_DEFER_POP;
8501 return temp;
8504 case TARGET_EXPR:
8506 /* Something needs to be initialized, but we didn't know
8507 where that thing was when building the tree. For example,
8508 it could be the return value of a function, or a parameter
8509 to a function which lays down in the stack, or a temporary
8510 variable which must be passed by reference.
8512 We guarantee that the expression will either be constructed
8513 or copied into our original target. */
8515 tree slot = TREE_OPERAND (exp, 0);
8516 tree cleanups = NULL_TREE;
8517 tree exp1;
8519 if (TREE_CODE (slot) != VAR_DECL)
8520 abort ();
8522 if (! ignore)
8523 target = original_target;
8525 /* Set this here so that if we get a target that refers to a
8526 register variable that's already been used, put_reg_into_stack
8527 knows that it should fix up those uses. */
8528 TREE_USED (slot) = 1;
8530 if (target == 0)
8532 if (DECL_RTL_SET_P (slot))
8534 target = DECL_RTL (slot);
8535 /* If we have already expanded the slot, so don't do
8536 it again. (mrs) */
8537 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8538 return target;
8540 else
8542 target = assign_temp (type, 2, 0, 1);
8543 /* All temp slots at this level must not conflict. */
8544 preserve_temp_slots (target);
8545 SET_DECL_RTL (slot, target);
8546 if (TREE_ADDRESSABLE (slot))
8547 put_var_into_stack (slot);
8549 /* Since SLOT is not known to the called function
8550 to belong to its stack frame, we must build an explicit
8551 cleanup. This case occurs when we must build up a reference
8552 to pass the reference as an argument. In this case,
8553 it is very likely that such a reference need not be
8554 built here. */
8556 if (TREE_OPERAND (exp, 2) == 0)
8557 TREE_OPERAND (exp, 2)
8558 = (*lang_hooks.maybe_build_cleanup) (slot);
8559 cleanups = TREE_OPERAND (exp, 2);
8562 else
8564 /* This case does occur, when expanding a parameter which
8565 needs to be constructed on the stack. The target
8566 is the actual stack address that we want to initialize.
8567 The function we call will perform the cleanup in this case. */
8569 /* If we have already assigned it space, use that space,
8570 not target that we were passed in, as our target
8571 parameter is only a hint. */
8572 if (DECL_RTL_SET_P (slot))
8574 target = DECL_RTL (slot);
8575 /* If we have already expanded the slot, so don't do
8576 it again. (mrs) */
8577 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8578 return target;
8580 else
8582 SET_DECL_RTL (slot, target);
8583 /* If we must have an addressable slot, then make sure that
8584 the RTL that we just stored in slot is OK. */
8585 if (TREE_ADDRESSABLE (slot))
8586 put_var_into_stack (slot);
8590 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8591 /* Mark it as expanded. */
8592 TREE_OPERAND (exp, 1) = NULL_TREE;
8594 store_expr (exp1, target, 0);
8596 expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
8598 return target;
8601 case INIT_EXPR:
8603 tree lhs = TREE_OPERAND (exp, 0);
8604 tree rhs = TREE_OPERAND (exp, 1);
8606 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8607 return temp;
8610 case MODIFY_EXPR:
8612 /* If lhs is complex, expand calls in rhs before computing it.
8613 That's so we don't compute a pointer and save it over a
8614 call. If lhs is simple, compute it first so we can give it
8615 as a target if the rhs is just a call. This avoids an
8616 extra temp and copy and that prevents a partial-subsumption
8617 which makes bad code. Actually we could treat
8618 component_ref's of vars like vars. */
8620 tree lhs = TREE_OPERAND (exp, 0);
8621 tree rhs = TREE_OPERAND (exp, 1);
8623 temp = 0;
8625 /* Check for |= or &= of a bitfield of size one into another bitfield
8626 of size 1. In this case, (unless we need the result of the
8627 assignment) we can do this more efficiently with a
8628 test followed by an assignment, if necessary.
8630 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8631 things change so we do, this code should be enhanced to
8632 support it. */
8633 if (ignore
8634 && TREE_CODE (lhs) == COMPONENT_REF
8635 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8636 || TREE_CODE (rhs) == BIT_AND_EXPR)
8637 && TREE_OPERAND (rhs, 0) == lhs
8638 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8639 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8640 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8642 rtx label = gen_label_rtx ();
8644 do_jump (TREE_OPERAND (rhs, 1),
8645 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8646 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8647 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8648 (TREE_CODE (rhs) == BIT_IOR_EXPR
8649 ? integer_one_node
8650 : integer_zero_node)),
8651 0, 0);
8652 do_pending_stack_adjust ();
8653 emit_label (label);
8654 return const0_rtx;
8657 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8659 return temp;
8662 case RETURN_EXPR:
8663 if (!TREE_OPERAND (exp, 0))
8664 expand_null_return ();
8665 else
8666 expand_return (TREE_OPERAND (exp, 0));
8667 return const0_rtx;
8669 case PREINCREMENT_EXPR:
8670 case PREDECREMENT_EXPR:
8671 return expand_increment (exp, 0, ignore);
8673 case POSTINCREMENT_EXPR:
8674 case POSTDECREMENT_EXPR:
8675 /* Faster to treat as pre-increment if result is not used. */
8676 return expand_increment (exp, ! ignore, ignore);
8678 case ADDR_EXPR:
8679 /* Are we taking the address of a nested function? */
8680 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8681 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8682 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8683 && ! TREE_STATIC (exp))
8685 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8686 op0 = force_operand (op0, target);
8688 /* If we are taking the address of something erroneous, just
8689 return a zero. */
8690 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8691 return const0_rtx;
8692 /* If we are taking the address of a constant and are at the
8693 top level, we have to use output_constant_def since we can't
8694 call force_const_mem at top level. */
8695 else if (cfun == 0
8696 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8697 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
8698 == 'c')))
8699 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
8700 else
8702 /* We make sure to pass const0_rtx down if we came in with
8703 ignore set, to avoid doing the cleanups twice for something. */
8704 op0 = expand_expr (TREE_OPERAND (exp, 0),
8705 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8706 (modifier == EXPAND_INITIALIZER
8707 ? modifier : EXPAND_CONST_ADDRESS));
8709 /* If we are going to ignore the result, OP0 will have been set
8710 to const0_rtx, so just return it. Don't get confused and
8711 think we are taking the address of the constant. */
8712 if (ignore)
8713 return op0;
8715 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8716 clever and returns a REG when given a MEM. */
8717 op0 = protect_from_queue (op0, 1);
8719 /* We would like the object in memory. If it is a constant, we can
8720 have it be statically allocated into memory. For a non-constant,
8721 we need to allocate some memory and store the value into it. */
8723 if (CONSTANT_P (op0))
8724 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8725 op0);
8726 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8727 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8728 || GET_CODE (op0) == PARALLEL)
8730 /* If the operand is a SAVE_EXPR, we can deal with this by
8731 forcing the SAVE_EXPR into memory. */
8732 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
8734 put_var_into_stack (TREE_OPERAND (exp, 0));
8735 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
8737 else
8739 /* If this object is in a register, it can't be BLKmode. */
8740 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8741 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8743 if (GET_CODE (op0) == PARALLEL)
8744 /* Handle calls that pass values in multiple
8745 non-contiguous locations. The Irix 6 ABI has examples
8746 of this. */
8747 emit_group_store (memloc, op0,
8748 int_size_in_bytes (inner_type));
8749 else
8750 emit_move_insn (memloc, op0);
8752 op0 = memloc;
8756 if (GET_CODE (op0) != MEM)
8757 abort ();
8759 mark_temp_addr_taken (op0);
8760 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8762 op0 = XEXP (op0, 0);
8763 #ifdef POINTERS_EXTEND_UNSIGNED
8764 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8765 && mode == ptr_mode)
8766 op0 = convert_memory_address (ptr_mode, op0);
8767 #endif
8768 return op0;
8771 /* If OP0 is not aligned as least as much as the type requires, we
8772 need to make a temporary, copy OP0 to it, and take the address of
8773 the temporary. We want to use the alignment of the type, not of
8774 the operand. Note that this is incorrect for FUNCTION_TYPE, but
8775 the test for BLKmode means that can't happen. The test for
8776 BLKmode is because we never make mis-aligned MEMs with
8777 non-BLKmode.
8779 We don't need to do this at all if the machine doesn't have
8780 strict alignment. */
8781 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
8782 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
8783 > MEM_ALIGN (op0))
8784 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
8786 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8787 rtx new
8788 = assign_stack_temp_for_type
8789 (TYPE_MODE (inner_type),
8790 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
8791 : int_size_in_bytes (inner_type),
8792 1, build_qualified_type (inner_type,
8793 (TYPE_QUALS (inner_type)
8794 | TYPE_QUAL_CONST)));
8796 if (TYPE_ALIGN_OK (inner_type))
8797 abort ();
8799 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)));
8800 op0 = new;
8803 op0 = force_operand (XEXP (op0, 0), target);
8806 if (flag_force_addr
8807 && GET_CODE (op0) != REG
8808 && modifier != EXPAND_CONST_ADDRESS
8809 && modifier != EXPAND_INITIALIZER
8810 && modifier != EXPAND_SUM)
8811 op0 = force_reg (Pmode, op0);
8813 if (GET_CODE (op0) == REG
8814 && ! REG_USERVAR_P (op0))
8815 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8817 #ifdef POINTERS_EXTEND_UNSIGNED
8818 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8819 && mode == ptr_mode)
8820 op0 = convert_memory_address (ptr_mode, op0);
8821 #endif
8823 return op0;
8825 case ENTRY_VALUE_EXPR:
8826 abort ();
8828 /* COMPLEX type for Extended Pascal & Fortran */
8829 case COMPLEX_EXPR:
8831 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8832 rtx insns;
8834 /* Get the rtx code of the operands. */
8835 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8836 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8838 if (! target)
8839 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8841 start_sequence ();
8843 /* Move the real (op0) and imaginary (op1) parts to their location. */
8844 emit_move_insn (gen_realpart (mode, target), op0);
8845 emit_move_insn (gen_imagpart (mode, target), op1);
8847 insns = get_insns ();
8848 end_sequence ();
8850 /* Complex construction should appear as a single unit. */
8851 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8852 each with a separate pseudo as destination.
8853 It's not correct for flow to treat them as a unit. */
8854 if (GET_CODE (target) != CONCAT)
8855 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8856 else
8857 emit_insns (insns);
8859 return target;
8862 case REALPART_EXPR:
8863 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8864 return gen_realpart (mode, op0);
8866 case IMAGPART_EXPR:
8867 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8868 return gen_imagpart (mode, op0);
8870 case CONJ_EXPR:
8872 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8873 rtx imag_t;
8874 rtx insns;
8876 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8878 if (! target)
8879 target = gen_reg_rtx (mode);
8881 start_sequence ();
8883 /* Store the realpart and the negated imagpart to target. */
8884 emit_move_insn (gen_realpart (partmode, target),
8885 gen_realpart (partmode, op0));
8887 imag_t = gen_imagpart (partmode, target);
8888 temp = expand_unop (partmode,
8889 ! unsignedp && flag_trapv
8890 && (GET_MODE_CLASS(partmode) == MODE_INT)
8891 ? negv_optab : neg_optab,
8892 gen_imagpart (partmode, op0), imag_t, 0);
8893 if (temp != imag_t)
8894 emit_move_insn (imag_t, temp);
8896 insns = get_insns ();
8897 end_sequence ();
8899 /* Conjugate should appear as a single unit
8900 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8901 each with a separate pseudo as destination.
8902 It's not correct for flow to treat them as a unit. */
8903 if (GET_CODE (target) != CONCAT)
8904 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8905 else
8906 emit_insns (insns);
8908 return target;
8911 case TRY_CATCH_EXPR:
8913 tree handler = TREE_OPERAND (exp, 1);
8915 expand_eh_region_start ();
8917 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8919 expand_eh_region_end_cleanup (handler);
8921 return op0;
8924 case TRY_FINALLY_EXPR:
8926 tree try_block = TREE_OPERAND (exp, 0);
8927 tree finally_block = TREE_OPERAND (exp, 1);
8928 rtx finally_label = gen_label_rtx ();
8929 rtx done_label = gen_label_rtx ();
8930 rtx return_link = gen_reg_rtx (Pmode);
8931 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8932 (tree) finally_label, (tree) return_link);
8933 TREE_SIDE_EFFECTS (cleanup) = 1;
8935 /* Start a new binding layer that will keep track of all cleanup
8936 actions to be performed. */
8937 expand_start_bindings (2);
8939 target_temp_slot_level = temp_slot_level;
8941 expand_decl_cleanup (NULL_TREE, cleanup);
8942 op0 = expand_expr (try_block, target, tmode, modifier);
8944 preserve_temp_slots (op0);
8945 expand_end_bindings (NULL_TREE, 0, 0);
8946 emit_jump (done_label);
8947 emit_label (finally_label);
8948 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8949 emit_indirect_jump (return_link);
8950 emit_label (done_label);
8951 return op0;
8954 case GOTO_SUBROUTINE_EXPR:
8956 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8957 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8958 rtx return_address = gen_label_rtx ();
8959 emit_move_insn (return_link,
8960 gen_rtx_LABEL_REF (Pmode, return_address));
8961 emit_jump (subr);
8962 emit_label (return_address);
8963 return const0_rtx;
8966 case VA_ARG_EXPR:
8967 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8969 case EXC_PTR_EXPR:
8970 return get_exception_pointer (cfun);
8972 case FDESC_EXPR:
8973 /* Function descriptors are not valid except for as
8974 initialization constants, and should not be expanded. */
8975 abort ();
8977 default:
8978 return (*lang_hooks.expand_expr) (exp, original_target, tmode, modifier);
8981 /* Here to do an ordinary binary operator, generating an instruction
8982 from the optab already placed in `this_optab'. */
8983 binop:
8984 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8985 subtarget = 0;
8986 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8987 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8988 binop2:
8989 temp = expand_binop (mode, this_optab, op0, op1, target,
8990 unsignedp, OPTAB_LIB_WIDEN);
8991 if (temp == 0)
8992 abort ();
8993 return temp;
8996 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
8997 when applied to the address of EXP produces an address known to be
8998 aligned more than BIGGEST_ALIGNMENT. */
9000 static int
9001 is_aligning_offset (offset, exp)
9002 tree offset;
9003 tree exp;
9005 /* Strip off any conversions and WITH_RECORD_EXPR nodes. */
9006 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9007 || TREE_CODE (offset) == NOP_EXPR
9008 || TREE_CODE (offset) == CONVERT_EXPR
9009 || TREE_CODE (offset) == WITH_RECORD_EXPR)
9010 offset = TREE_OPERAND (offset, 0);
9012 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9013 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9014 if (TREE_CODE (offset) != BIT_AND_EXPR
9015 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9016 || compare_tree_int (TREE_OPERAND (offset, 1), BIGGEST_ALIGNMENT) <= 0
9017 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9018 return 0;
9020 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9021 It must be NEGATE_EXPR. Then strip any more conversions. */
9022 offset = TREE_OPERAND (offset, 0);
9023 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9024 || TREE_CODE (offset) == NOP_EXPR
9025 || TREE_CODE (offset) == CONVERT_EXPR)
9026 offset = TREE_OPERAND (offset, 0);
9028 if (TREE_CODE (offset) != NEGATE_EXPR)
9029 return 0;
9031 offset = TREE_OPERAND (offset, 0);
9032 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9033 || TREE_CODE (offset) == NOP_EXPR
9034 || TREE_CODE (offset) == CONVERT_EXPR)
9035 offset = TREE_OPERAND (offset, 0);
9037 /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
9038 whose type is the same as EXP. */
9039 return (TREE_CODE (offset) == ADDR_EXPR
9040 && (TREE_OPERAND (offset, 0) == exp
9041 || (TREE_CODE (TREE_OPERAND (offset, 0)) == PLACEHOLDER_EXPR
9042 && (TREE_TYPE (TREE_OPERAND (offset, 0))
9043 == TREE_TYPE (exp)))));
9046 /* Return the tree node if an ARG corresponds to a string constant or zero
9047 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
9048 in bytes within the string that ARG is accessing. The type of the
9049 offset will be `sizetype'. */
9051 tree
9052 string_constant (arg, ptr_offset)
9053 tree arg;
9054 tree *ptr_offset;
9056 STRIP_NOPS (arg);
9058 if (TREE_CODE (arg) == ADDR_EXPR
9059 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9061 *ptr_offset = size_zero_node;
9062 return TREE_OPERAND (arg, 0);
9064 else if (TREE_CODE (arg) == PLUS_EXPR)
9066 tree arg0 = TREE_OPERAND (arg, 0);
9067 tree arg1 = TREE_OPERAND (arg, 1);
9069 STRIP_NOPS (arg0);
9070 STRIP_NOPS (arg1);
9072 if (TREE_CODE (arg0) == ADDR_EXPR
9073 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9075 *ptr_offset = convert (sizetype, arg1);
9076 return TREE_OPERAND (arg0, 0);
9078 else if (TREE_CODE (arg1) == ADDR_EXPR
9079 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9081 *ptr_offset = convert (sizetype, arg0);
9082 return TREE_OPERAND (arg1, 0);
9086 return 0;
9089 /* Expand code for a post- or pre- increment or decrement
9090 and return the RTX for the result.
9091 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9093 static rtx
9094 expand_increment (exp, post, ignore)
9095 tree exp;
9096 int post, ignore;
9098 rtx op0, op1;
9099 rtx temp, value;
9100 tree incremented = TREE_OPERAND (exp, 0);
9101 optab this_optab = add_optab;
9102 int icode;
9103 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9104 int op0_is_copy = 0;
9105 int single_insn = 0;
9106 /* 1 means we can't store into OP0 directly,
9107 because it is a subreg narrower than a word,
9108 and we don't dare clobber the rest of the word. */
9109 int bad_subreg = 0;
9111 /* Stabilize any component ref that might need to be
9112 evaluated more than once below. */
9113 if (!post
9114 || TREE_CODE (incremented) == BIT_FIELD_REF
9115 || (TREE_CODE (incremented) == COMPONENT_REF
9116 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9117 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9118 incremented = stabilize_reference (incremented);
9119 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9120 ones into save exprs so that they don't accidentally get evaluated
9121 more than once by the code below. */
9122 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9123 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9124 incremented = save_expr (incremented);
9126 /* Compute the operands as RTX.
9127 Note whether OP0 is the actual lvalue or a copy of it:
9128 I believe it is a copy iff it is a register or subreg
9129 and insns were generated in computing it. */
9131 temp = get_last_insn ();
9132 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9134 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9135 in place but instead must do sign- or zero-extension during assignment,
9136 so we copy it into a new register and let the code below use it as
9137 a copy.
9139 Note that we can safely modify this SUBREG since it is know not to be
9140 shared (it was made by the expand_expr call above). */
9142 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9144 if (post)
9145 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9146 else
9147 bad_subreg = 1;
9149 else if (GET_CODE (op0) == SUBREG
9150 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9152 /* We cannot increment this SUBREG in place. If we are
9153 post-incrementing, get a copy of the old value. Otherwise,
9154 just mark that we cannot increment in place. */
9155 if (post)
9156 op0 = copy_to_reg (op0);
9157 else
9158 bad_subreg = 1;
9161 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9162 && temp != get_last_insn ());
9163 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9165 /* Decide whether incrementing or decrementing. */
9166 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9167 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9168 this_optab = sub_optab;
9170 /* Convert decrement by a constant into a negative increment. */
9171 if (this_optab == sub_optab
9172 && GET_CODE (op1) == CONST_INT)
9174 op1 = GEN_INT (-INTVAL (op1));
9175 this_optab = add_optab;
9178 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9179 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9181 /* For a preincrement, see if we can do this with a single instruction. */
9182 if (!post)
9184 icode = (int) this_optab->handlers[(int) mode].insn_code;
9185 if (icode != (int) CODE_FOR_nothing
9186 /* Make sure that OP0 is valid for operands 0 and 1
9187 of the insn we want to queue. */
9188 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9189 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9190 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9191 single_insn = 1;
9194 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9195 then we cannot just increment OP0. We must therefore contrive to
9196 increment the original value. Then, for postincrement, we can return
9197 OP0 since it is a copy of the old value. For preincrement, expand here
9198 unless we can do it with a single insn.
9200 Likewise if storing directly into OP0 would clobber high bits
9201 we need to preserve (bad_subreg). */
9202 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9204 /* This is the easiest way to increment the value wherever it is.
9205 Problems with multiple evaluation of INCREMENTED are prevented
9206 because either (1) it is a component_ref or preincrement,
9207 in which case it was stabilized above, or (2) it is an array_ref
9208 with constant index in an array in a register, which is
9209 safe to reevaluate. */
9210 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9211 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9212 ? MINUS_EXPR : PLUS_EXPR),
9213 TREE_TYPE (exp),
9214 incremented,
9215 TREE_OPERAND (exp, 1));
9217 while (TREE_CODE (incremented) == NOP_EXPR
9218 || TREE_CODE (incremented) == CONVERT_EXPR)
9220 newexp = convert (TREE_TYPE (incremented), newexp);
9221 incremented = TREE_OPERAND (incremented, 0);
9224 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9225 return post ? op0 : temp;
9228 if (post)
9230 /* We have a true reference to the value in OP0.
9231 If there is an insn to add or subtract in this mode, queue it.
9232 Queueing the increment insn avoids the register shuffling
9233 that often results if we must increment now and first save
9234 the old value for subsequent use. */
9236 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9237 op0 = stabilize (op0);
9238 #endif
9240 icode = (int) this_optab->handlers[(int) mode].insn_code;
9241 if (icode != (int) CODE_FOR_nothing
9242 /* Make sure that OP0 is valid for operands 0 and 1
9243 of the insn we want to queue. */
9244 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9245 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9247 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9248 op1 = force_reg (mode, op1);
9250 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9252 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9254 rtx addr = (general_operand (XEXP (op0, 0), mode)
9255 ? force_reg (Pmode, XEXP (op0, 0))
9256 : copy_to_reg (XEXP (op0, 0)));
9257 rtx temp, result;
9259 op0 = replace_equiv_address (op0, addr);
9260 temp = force_reg (GET_MODE (op0), op0);
9261 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9262 op1 = force_reg (mode, op1);
9264 /* The increment queue is LIFO, thus we have to `queue'
9265 the instructions in reverse order. */
9266 enqueue_insn (op0, gen_move_insn (op0, temp));
9267 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9268 return result;
9272 /* Preincrement, or we can't increment with one simple insn. */
9273 if (post)
9274 /* Save a copy of the value before inc or dec, to return it later. */
9275 temp = value = copy_to_reg (op0);
9276 else
9277 /* Arrange to return the incremented value. */
9278 /* Copy the rtx because expand_binop will protect from the queue,
9279 and the results of that would be invalid for us to return
9280 if our caller does emit_queue before using our result. */
9281 temp = copy_rtx (value = op0);
9283 /* Increment however we can. */
9284 op1 = expand_binop (mode, this_optab, value, op1, op0,
9285 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9287 /* Make sure the value is stored into OP0. */
9288 if (op1 != op0)
9289 emit_move_insn (op0, op1);
9291 return temp;
9294 /* At the start of a function, record that we have no previously-pushed
9295 arguments waiting to be popped. */
9297 void
9298 init_pending_stack_adjust ()
9300 pending_stack_adjust = 0;
9303 /* When exiting from function, if safe, clear out any pending stack adjust
9304 so the adjustment won't get done.
9306 Note, if the current function calls alloca, then it must have a
9307 frame pointer regardless of the value of flag_omit_frame_pointer. */
9309 void
9310 clear_pending_stack_adjust ()
9312 #ifdef EXIT_IGNORE_STACK
9313 if (optimize > 0
9314 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9315 && EXIT_IGNORE_STACK
9316 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9317 && ! flag_inline_functions)
9319 stack_pointer_delta -= pending_stack_adjust,
9320 pending_stack_adjust = 0;
9322 #endif
9325 /* Pop any previously-pushed arguments that have not been popped yet. */
9327 void
9328 do_pending_stack_adjust ()
9330 if (inhibit_defer_pop == 0)
9332 if (pending_stack_adjust != 0)
9333 adjust_stack (GEN_INT (pending_stack_adjust));
9334 pending_stack_adjust = 0;
9338 /* Expand conditional expressions. */
9340 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9341 LABEL is an rtx of code CODE_LABEL, in this function and all the
9342 functions here. */
9344 void
9345 jumpifnot (exp, label)
9346 tree exp;
9347 rtx label;
9349 do_jump (exp, label, NULL_RTX);
9352 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9354 void
9355 jumpif (exp, label)
9356 tree exp;
9357 rtx label;
9359 do_jump (exp, NULL_RTX, label);
9362 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9363 the result is zero, or IF_TRUE_LABEL if the result is one.
9364 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9365 meaning fall through in that case.
9367 do_jump always does any pending stack adjust except when it does not
9368 actually perform a jump. An example where there is no jump
9369 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9371 This function is responsible for optimizing cases such as
9372 &&, || and comparison operators in EXP. */
9374 void
9375 do_jump (exp, if_false_label, if_true_label)
9376 tree exp;
9377 rtx if_false_label, if_true_label;
9379 enum tree_code code = TREE_CODE (exp);
9380 /* Some cases need to create a label to jump to
9381 in order to properly fall through.
9382 These cases set DROP_THROUGH_LABEL nonzero. */
9383 rtx drop_through_label = 0;
9384 rtx temp;
9385 int i;
9386 tree type;
9387 enum machine_mode mode;
9389 #ifdef MAX_INTEGER_COMPUTATION_MODE
9390 check_max_integer_computation_mode (exp);
9391 #endif
9393 emit_queue ();
9395 switch (code)
9397 case ERROR_MARK:
9398 break;
9400 case INTEGER_CST:
9401 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9402 if (temp)
9403 emit_jump (temp);
9404 break;
9406 #if 0
9407 /* This is not true with #pragma weak */
9408 case ADDR_EXPR:
9409 /* The address of something can never be zero. */
9410 if (if_true_label)
9411 emit_jump (if_true_label);
9412 break;
9413 #endif
9415 case NOP_EXPR:
9416 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9417 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9418 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
9419 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
9420 goto normal;
9421 case CONVERT_EXPR:
9422 /* If we are narrowing the operand, we have to do the compare in the
9423 narrower mode. */
9424 if ((TYPE_PRECISION (TREE_TYPE (exp))
9425 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9426 goto normal;
9427 case NON_LVALUE_EXPR:
9428 case REFERENCE_EXPR:
9429 case ABS_EXPR:
9430 case NEGATE_EXPR:
9431 case LROTATE_EXPR:
9432 case RROTATE_EXPR:
9433 /* These cannot change zero->non-zero or vice versa. */
9434 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9435 break;
9437 case WITH_RECORD_EXPR:
9438 /* Put the object on the placeholder list, recurse through our first
9439 operand, and pop the list. */
9440 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9441 placeholder_list);
9442 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9443 placeholder_list = TREE_CHAIN (placeholder_list);
9444 break;
9446 #if 0
9447 /* This is never less insns than evaluating the PLUS_EXPR followed by
9448 a test and can be longer if the test is eliminated. */
9449 case PLUS_EXPR:
9450 /* Reduce to minus. */
9451 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9452 TREE_OPERAND (exp, 0),
9453 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9454 TREE_OPERAND (exp, 1))));
9455 /* Process as MINUS. */
9456 #endif
9458 case MINUS_EXPR:
9459 /* Non-zero iff operands of minus differ. */
9460 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9461 TREE_OPERAND (exp, 0),
9462 TREE_OPERAND (exp, 1)),
9463 NE, NE, if_false_label, if_true_label);
9464 break;
9466 case BIT_AND_EXPR:
9467 /* If we are AND'ing with a small constant, do this comparison in the
9468 smallest type that fits. If the machine doesn't have comparisons
9469 that small, it will be converted back to the wider comparison.
9470 This helps if we are testing the sign bit of a narrower object.
9471 combine can't do this for us because it can't know whether a
9472 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9474 if (! SLOW_BYTE_ACCESS
9475 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9476 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9477 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9478 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9479 && (type = (*lang_hooks.types.type_for_mode) (mode, 1)) != 0
9480 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9481 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9482 != CODE_FOR_nothing))
9484 do_jump (convert (type, exp), if_false_label, if_true_label);
9485 break;
9487 goto normal;
9489 case TRUTH_NOT_EXPR:
9490 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9491 break;
9493 case TRUTH_ANDIF_EXPR:
9494 if (if_false_label == 0)
9495 if_false_label = drop_through_label = gen_label_rtx ();
9496 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9497 start_cleanup_deferral ();
9498 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9499 end_cleanup_deferral ();
9500 break;
9502 case TRUTH_ORIF_EXPR:
9503 if (if_true_label == 0)
9504 if_true_label = drop_through_label = gen_label_rtx ();
9505 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9506 start_cleanup_deferral ();
9507 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9508 end_cleanup_deferral ();
9509 break;
9511 case COMPOUND_EXPR:
9512 push_temp_slots ();
9513 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9514 preserve_temp_slots (NULL_RTX);
9515 free_temp_slots ();
9516 pop_temp_slots ();
9517 emit_queue ();
9518 do_pending_stack_adjust ();
9519 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9520 break;
9522 case COMPONENT_REF:
9523 case BIT_FIELD_REF:
9524 case ARRAY_REF:
9525 case ARRAY_RANGE_REF:
9527 HOST_WIDE_INT bitsize, bitpos;
9528 int unsignedp;
9529 enum machine_mode mode;
9530 tree type;
9531 tree offset;
9532 int volatilep = 0;
9534 /* Get description of this reference. We don't actually care
9535 about the underlying object here. */
9536 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9537 &unsignedp, &volatilep);
9539 type = (*lang_hooks.types.type_for_size) (bitsize, unsignedp);
9540 if (! SLOW_BYTE_ACCESS
9541 && type != 0 && bitsize >= 0
9542 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9543 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9544 != CODE_FOR_nothing))
9546 do_jump (convert (type, exp), if_false_label, if_true_label);
9547 break;
9549 goto normal;
9552 case COND_EXPR:
9553 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9554 if (integer_onep (TREE_OPERAND (exp, 1))
9555 && integer_zerop (TREE_OPERAND (exp, 2)))
9556 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9558 else if (integer_zerop (TREE_OPERAND (exp, 1))
9559 && integer_onep (TREE_OPERAND (exp, 2)))
9560 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9562 else
9564 rtx label1 = gen_label_rtx ();
9565 drop_through_label = gen_label_rtx ();
9567 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9569 start_cleanup_deferral ();
9570 /* Now the THEN-expression. */
9571 do_jump (TREE_OPERAND (exp, 1),
9572 if_false_label ? if_false_label : drop_through_label,
9573 if_true_label ? if_true_label : drop_through_label);
9574 /* In case the do_jump just above never jumps. */
9575 do_pending_stack_adjust ();
9576 emit_label (label1);
9578 /* Now the ELSE-expression. */
9579 do_jump (TREE_OPERAND (exp, 2),
9580 if_false_label ? if_false_label : drop_through_label,
9581 if_true_label ? if_true_label : drop_through_label);
9582 end_cleanup_deferral ();
9584 break;
9586 case EQ_EXPR:
9588 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9590 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9591 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9593 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9594 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9595 do_jump
9596 (fold
9597 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9598 fold (build (EQ_EXPR, TREE_TYPE (exp),
9599 fold (build1 (REALPART_EXPR,
9600 TREE_TYPE (inner_type),
9601 exp0)),
9602 fold (build1 (REALPART_EXPR,
9603 TREE_TYPE (inner_type),
9604 exp1)))),
9605 fold (build (EQ_EXPR, TREE_TYPE (exp),
9606 fold (build1 (IMAGPART_EXPR,
9607 TREE_TYPE (inner_type),
9608 exp0)),
9609 fold (build1 (IMAGPART_EXPR,
9610 TREE_TYPE (inner_type),
9611 exp1)))))),
9612 if_false_label, if_true_label);
9615 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9616 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9618 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9619 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9620 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9621 else
9622 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9623 break;
9626 case NE_EXPR:
9628 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9630 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9631 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9633 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9634 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9635 do_jump
9636 (fold
9637 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9638 fold (build (NE_EXPR, TREE_TYPE (exp),
9639 fold (build1 (REALPART_EXPR,
9640 TREE_TYPE (inner_type),
9641 exp0)),
9642 fold (build1 (REALPART_EXPR,
9643 TREE_TYPE (inner_type),
9644 exp1)))),
9645 fold (build (NE_EXPR, TREE_TYPE (exp),
9646 fold (build1 (IMAGPART_EXPR,
9647 TREE_TYPE (inner_type),
9648 exp0)),
9649 fold (build1 (IMAGPART_EXPR,
9650 TREE_TYPE (inner_type),
9651 exp1)))))),
9652 if_false_label, if_true_label);
9655 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9656 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9658 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9659 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9660 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9661 else
9662 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9663 break;
9666 case LT_EXPR:
9667 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9668 if (GET_MODE_CLASS (mode) == MODE_INT
9669 && ! can_compare_p (LT, mode, ccp_jump))
9670 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9671 else
9672 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9673 break;
9675 case LE_EXPR:
9676 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9677 if (GET_MODE_CLASS (mode) == MODE_INT
9678 && ! can_compare_p (LE, mode, ccp_jump))
9679 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9680 else
9681 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9682 break;
9684 case GT_EXPR:
9685 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9686 if (GET_MODE_CLASS (mode) == MODE_INT
9687 && ! can_compare_p (GT, mode, ccp_jump))
9688 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9689 else
9690 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9691 break;
9693 case GE_EXPR:
9694 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9695 if (GET_MODE_CLASS (mode) == MODE_INT
9696 && ! can_compare_p (GE, mode, ccp_jump))
9697 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9698 else
9699 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9700 break;
9702 case UNORDERED_EXPR:
9703 case ORDERED_EXPR:
9705 enum rtx_code cmp, rcmp;
9706 int do_rev;
9708 if (code == UNORDERED_EXPR)
9709 cmp = UNORDERED, rcmp = ORDERED;
9710 else
9711 cmp = ORDERED, rcmp = UNORDERED;
9712 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9714 do_rev = 0;
9715 if (! can_compare_p (cmp, mode, ccp_jump)
9716 && (can_compare_p (rcmp, mode, ccp_jump)
9717 /* If the target doesn't provide either UNORDERED or ORDERED
9718 comparisons, canonicalize on UNORDERED for the library. */
9719 || rcmp == UNORDERED))
9720 do_rev = 1;
9722 if (! do_rev)
9723 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9724 else
9725 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9727 break;
9730 enum rtx_code rcode1;
9731 enum tree_code tcode2;
9733 case UNLT_EXPR:
9734 rcode1 = UNLT;
9735 tcode2 = LT_EXPR;
9736 goto unordered_bcc;
9737 case UNLE_EXPR:
9738 rcode1 = UNLE;
9739 tcode2 = LE_EXPR;
9740 goto unordered_bcc;
9741 case UNGT_EXPR:
9742 rcode1 = UNGT;
9743 tcode2 = GT_EXPR;
9744 goto unordered_bcc;
9745 case UNGE_EXPR:
9746 rcode1 = UNGE;
9747 tcode2 = GE_EXPR;
9748 goto unordered_bcc;
9749 case UNEQ_EXPR:
9750 rcode1 = UNEQ;
9751 tcode2 = EQ_EXPR;
9752 goto unordered_bcc;
9754 unordered_bcc:
9755 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9756 if (can_compare_p (rcode1, mode, ccp_jump))
9757 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9758 if_true_label);
9759 else
9761 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9762 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9763 tree cmp0, cmp1;
9765 /* If the target doesn't support combined unordered
9766 compares, decompose into UNORDERED + comparison. */
9767 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9768 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9769 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9770 do_jump (exp, if_false_label, if_true_label);
9773 break;
9775 /* Special case:
9776 __builtin_expect (<test>, 0) and
9777 __builtin_expect (<test>, 1)
9779 We need to do this here, so that <test> is not converted to a SCC
9780 operation on machines that use condition code registers and COMPARE
9781 like the PowerPC, and then the jump is done based on whether the SCC
9782 operation produced a 1 or 0. */
9783 case CALL_EXPR:
9784 /* Check for a built-in function. */
9785 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
9787 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
9788 tree arglist = TREE_OPERAND (exp, 1);
9790 if (TREE_CODE (fndecl) == FUNCTION_DECL
9791 && DECL_BUILT_IN (fndecl)
9792 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
9793 && arglist != NULL_TREE
9794 && TREE_CHAIN (arglist) != NULL_TREE)
9796 rtx seq = expand_builtin_expect_jump (exp, if_false_label,
9797 if_true_label);
9799 if (seq != NULL_RTX)
9801 emit_insn (seq);
9802 return;
9806 /* fall through and generate the normal code. */
9808 default:
9809 normal:
9810 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9811 #if 0
9812 /* This is not needed any more and causes poor code since it causes
9813 comparisons and tests from non-SI objects to have different code
9814 sequences. */
9815 /* Copy to register to avoid generating bad insns by cse
9816 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9817 if (!cse_not_expected && GET_CODE (temp) == MEM)
9818 temp = copy_to_reg (temp);
9819 #endif
9820 do_pending_stack_adjust ();
9821 /* Do any postincrements in the expression that was tested. */
9822 emit_queue ();
9824 if (GET_CODE (temp) == CONST_INT
9825 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
9826 || GET_CODE (temp) == LABEL_REF)
9828 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9829 if (target)
9830 emit_jump (target);
9832 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9833 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9834 /* Note swapping the labels gives us not-equal. */
9835 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9836 else if (GET_MODE (temp) != VOIDmode)
9837 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9838 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9839 GET_MODE (temp), NULL_RTX,
9840 if_false_label, if_true_label);
9841 else
9842 abort ();
9845 if (drop_through_label)
9847 /* If do_jump produces code that might be jumped around,
9848 do any stack adjusts from that code, before the place
9849 where control merges in. */
9850 do_pending_stack_adjust ();
9851 emit_label (drop_through_label);
9855 /* Given a comparison expression EXP for values too wide to be compared
9856 with one insn, test the comparison and jump to the appropriate label.
9857 The code of EXP is ignored; we always test GT if SWAP is 0,
9858 and LT if SWAP is 1. */
9860 static void
9861 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9862 tree exp;
9863 int swap;
9864 rtx if_false_label, if_true_label;
9866 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9867 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9868 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9869 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9871 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9874 /* Compare OP0 with OP1, word at a time, in mode MODE.
9875 UNSIGNEDP says to do unsigned comparison.
9876 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9878 void
9879 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9880 enum machine_mode mode;
9881 int unsignedp;
9882 rtx op0, op1;
9883 rtx if_false_label, if_true_label;
9885 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9886 rtx drop_through_label = 0;
9887 int i;
9889 if (! if_true_label || ! if_false_label)
9890 drop_through_label = gen_label_rtx ();
9891 if (! if_true_label)
9892 if_true_label = drop_through_label;
9893 if (! if_false_label)
9894 if_false_label = drop_through_label;
9896 /* Compare a word at a time, high order first. */
9897 for (i = 0; i < nwords; i++)
9899 rtx op0_word, op1_word;
9901 if (WORDS_BIG_ENDIAN)
9903 op0_word = operand_subword_force (op0, i, mode);
9904 op1_word = operand_subword_force (op1, i, mode);
9906 else
9908 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9909 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9912 /* All but high-order word must be compared as unsigned. */
9913 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9914 (unsignedp || i > 0), word_mode, NULL_RTX,
9915 NULL_RTX, if_true_label);
9917 /* Consider lower words only if these are equal. */
9918 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9919 NULL_RTX, NULL_RTX, if_false_label);
9922 if (if_false_label)
9923 emit_jump (if_false_label);
9924 if (drop_through_label)
9925 emit_label (drop_through_label);
9928 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9929 with one insn, test the comparison and jump to the appropriate label. */
9931 static void
9932 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9933 tree exp;
9934 rtx if_false_label, if_true_label;
9936 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9937 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9938 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9939 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9940 int i;
9941 rtx drop_through_label = 0;
9943 if (! if_false_label)
9944 drop_through_label = if_false_label = gen_label_rtx ();
9946 for (i = 0; i < nwords; i++)
9947 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
9948 operand_subword_force (op1, i, mode),
9949 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9950 word_mode, NULL_RTX, if_false_label, NULL_RTX);
9952 if (if_true_label)
9953 emit_jump (if_true_label);
9954 if (drop_through_label)
9955 emit_label (drop_through_label);
9958 /* Jump according to whether OP0 is 0.
9959 We assume that OP0 has an integer mode that is too wide
9960 for the available compare insns. */
9962 void
9963 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9964 rtx op0;
9965 rtx if_false_label, if_true_label;
9967 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9968 rtx part;
9969 int i;
9970 rtx drop_through_label = 0;
9972 /* The fastest way of doing this comparison on almost any machine is to
9973 "or" all the words and compare the result. If all have to be loaded
9974 from memory and this is a very wide item, it's possible this may
9975 be slower, but that's highly unlikely. */
9977 part = gen_reg_rtx (word_mode);
9978 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
9979 for (i = 1; i < nwords && part != 0; i++)
9980 part = expand_binop (word_mode, ior_optab, part,
9981 operand_subword_force (op0, i, GET_MODE (op0)),
9982 part, 1, OPTAB_WIDEN);
9984 if (part != 0)
9986 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
9987 NULL_RTX, if_false_label, if_true_label);
9989 return;
9992 /* If we couldn't do the "or" simply, do this with a series of compares. */
9993 if (! if_false_label)
9994 drop_through_label = if_false_label = gen_label_rtx ();
9996 for (i = 0; i < nwords; i++)
9997 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
9998 const0_rtx, EQ, 1, word_mode, NULL_RTX,
9999 if_false_label, NULL_RTX);
10001 if (if_true_label)
10002 emit_jump (if_true_label);
10004 if (drop_through_label)
10005 emit_label (drop_through_label);
10008 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
10009 (including code to compute the values to be compared)
10010 and set (CC0) according to the result.
10011 The decision as to signed or unsigned comparison must be made by the caller.
10013 We force a stack adjustment unless there are currently
10014 things pushed on the stack that aren't yet used.
10016 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10017 compared. */
10020 compare_from_rtx (op0, op1, code, unsignedp, mode, size)
10021 rtx op0, op1;
10022 enum rtx_code code;
10023 int unsignedp;
10024 enum machine_mode mode;
10025 rtx size;
10027 rtx tem;
10029 /* If one operand is constant, make it the second one. Only do this
10030 if the other operand is not constant as well. */
10032 if (swap_commutative_operands_p (op0, op1))
10034 tem = op0;
10035 op0 = op1;
10036 op1 = tem;
10037 code = swap_condition (code);
10040 if (flag_force_mem)
10042 op0 = force_not_mem (op0);
10043 op1 = force_not_mem (op1);
10046 do_pending_stack_adjust ();
10048 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10049 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10050 return tem;
10052 #if 0
10053 /* There's no need to do this now that combine.c can eliminate lots of
10054 sign extensions. This can be less efficient in certain cases on other
10055 machines. */
10057 /* If this is a signed equality comparison, we can do it as an
10058 unsigned comparison since zero-extension is cheaper than sign
10059 extension and comparisons with zero are done as unsigned. This is
10060 the case even on machines that can do fast sign extension, since
10061 zero-extension is easier to combine with other operations than
10062 sign-extension is. If we are comparing against a constant, we must
10063 convert it to what it would look like unsigned. */
10064 if ((code == EQ || code == NE) && ! unsignedp
10065 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10067 if (GET_CODE (op1) == CONST_INT
10068 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10069 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10070 unsignedp = 1;
10072 #endif
10074 emit_cmp_insn (op0, op1, code, size, mode, unsignedp);
10076 #if HAVE_cc0
10077 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
10078 #else
10079 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
10080 #endif
10083 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
10084 The decision as to signed or unsigned comparison must be made by the caller.
10086 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10087 compared. */
10089 void
10090 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size,
10091 if_false_label, if_true_label)
10092 rtx op0, op1;
10093 enum rtx_code code;
10094 int unsignedp;
10095 enum machine_mode mode;
10096 rtx size;
10097 rtx if_false_label, if_true_label;
10099 rtx tem;
10100 int dummy_true_label = 0;
10102 /* Reverse the comparison if that is safe and we want to jump if it is
10103 false. */
10104 if (! if_true_label && ! FLOAT_MODE_P (mode))
10106 if_true_label = if_false_label;
10107 if_false_label = 0;
10108 code = reverse_condition (code);
10111 /* If one operand is constant, make it the second one. Only do this
10112 if the other operand is not constant as well. */
10114 if (swap_commutative_operands_p (op0, op1))
10116 tem = op0;
10117 op0 = op1;
10118 op1 = tem;
10119 code = swap_condition (code);
10122 if (flag_force_mem)
10124 op0 = force_not_mem (op0);
10125 op1 = force_not_mem (op1);
10128 do_pending_stack_adjust ();
10130 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10131 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10133 if (tem == const_true_rtx)
10135 if (if_true_label)
10136 emit_jump (if_true_label);
10138 else
10140 if (if_false_label)
10141 emit_jump (if_false_label);
10143 return;
10146 #if 0
10147 /* There's no need to do this now that combine.c can eliminate lots of
10148 sign extensions. This can be less efficient in certain cases on other
10149 machines. */
10151 /* If this is a signed equality comparison, we can do it as an
10152 unsigned comparison since zero-extension is cheaper than sign
10153 extension and comparisons with zero are done as unsigned. This is
10154 the case even on machines that can do fast sign extension, since
10155 zero-extension is easier to combine with other operations than
10156 sign-extension is. If we are comparing against a constant, we must
10157 convert it to what it would look like unsigned. */
10158 if ((code == EQ || code == NE) && ! unsignedp
10159 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10161 if (GET_CODE (op1) == CONST_INT
10162 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10163 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10164 unsignedp = 1;
10166 #endif
10168 if (! if_true_label)
10170 dummy_true_label = 1;
10171 if_true_label = gen_label_rtx ();
10174 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp,
10175 if_true_label);
10177 if (if_false_label)
10178 emit_jump (if_false_label);
10179 if (dummy_true_label)
10180 emit_label (if_true_label);
10183 /* Generate code for a comparison expression EXP (including code to compute
10184 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10185 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10186 generated code will drop through.
10187 SIGNED_CODE should be the rtx operation for this comparison for
10188 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10190 We force a stack adjustment unless there are currently
10191 things pushed on the stack that aren't yet used. */
10193 static void
10194 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10195 if_true_label)
10196 tree exp;
10197 enum rtx_code signed_code, unsigned_code;
10198 rtx if_false_label, if_true_label;
10200 rtx op0, op1;
10201 tree type;
10202 enum machine_mode mode;
10203 int unsignedp;
10204 enum rtx_code code;
10206 /* Don't crash if the comparison was erroneous. */
10207 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10208 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10209 return;
10211 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10212 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
10213 return;
10215 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10216 mode = TYPE_MODE (type);
10217 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
10218 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
10219 || (GET_MODE_BITSIZE (mode)
10220 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
10221 1)))))))
10223 /* op0 might have been replaced by promoted constant, in which
10224 case the type of second argument should be used. */
10225 type = TREE_TYPE (TREE_OPERAND (exp, 1));
10226 mode = TYPE_MODE (type);
10228 unsignedp = TREE_UNSIGNED (type);
10229 code = unsignedp ? unsigned_code : signed_code;
10231 #ifdef HAVE_canonicalize_funcptr_for_compare
10232 /* If function pointers need to be "canonicalized" before they can
10233 be reliably compared, then canonicalize them. */
10234 if (HAVE_canonicalize_funcptr_for_compare
10235 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10236 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10237 == FUNCTION_TYPE))
10239 rtx new_op0 = gen_reg_rtx (mode);
10241 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10242 op0 = new_op0;
10245 if (HAVE_canonicalize_funcptr_for_compare
10246 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10247 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10248 == FUNCTION_TYPE))
10250 rtx new_op1 = gen_reg_rtx (mode);
10252 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10253 op1 = new_op1;
10255 #endif
10257 /* Do any postincrements in the expression that was tested. */
10258 emit_queue ();
10260 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10261 ((mode == BLKmode)
10262 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10263 if_false_label, if_true_label);
10266 /* Generate code to calculate EXP using a store-flag instruction
10267 and return an rtx for the result. EXP is either a comparison
10268 or a TRUTH_NOT_EXPR whose operand is a comparison.
10270 If TARGET is nonzero, store the result there if convenient.
10272 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10273 cheap.
10275 Return zero if there is no suitable set-flag instruction
10276 available on this machine.
10278 Once expand_expr has been called on the arguments of the comparison,
10279 we are committed to doing the store flag, since it is not safe to
10280 re-evaluate the expression. We emit the store-flag insn by calling
10281 emit_store_flag, but only expand the arguments if we have a reason
10282 to believe that emit_store_flag will be successful. If we think that
10283 it will, but it isn't, we have to simulate the store-flag with a
10284 set/jump/set sequence. */
10286 static rtx
10287 do_store_flag (exp, target, mode, only_cheap)
10288 tree exp;
10289 rtx target;
10290 enum machine_mode mode;
10291 int only_cheap;
10293 enum rtx_code code;
10294 tree arg0, arg1, type;
10295 tree tem;
10296 enum machine_mode operand_mode;
10297 int invert = 0;
10298 int unsignedp;
10299 rtx op0, op1;
10300 enum insn_code icode;
10301 rtx subtarget = target;
10302 rtx result, label;
10304 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10305 result at the end. We can't simply invert the test since it would
10306 have already been inverted if it were valid. This case occurs for
10307 some floating-point comparisons. */
10309 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10310 invert = 1, exp = TREE_OPERAND (exp, 0);
10312 arg0 = TREE_OPERAND (exp, 0);
10313 arg1 = TREE_OPERAND (exp, 1);
10315 /* Don't crash if the comparison was erroneous. */
10316 if (arg0 == error_mark_node || arg1 == error_mark_node)
10317 return const0_rtx;
10319 type = TREE_TYPE (arg0);
10320 operand_mode = TYPE_MODE (type);
10321 unsignedp = TREE_UNSIGNED (type);
10323 /* We won't bother with BLKmode store-flag operations because it would mean
10324 passing a lot of information to emit_store_flag. */
10325 if (operand_mode == BLKmode)
10326 return 0;
10328 /* We won't bother with store-flag operations involving function pointers
10329 when function pointers must be canonicalized before comparisons. */
10330 #ifdef HAVE_canonicalize_funcptr_for_compare
10331 if (HAVE_canonicalize_funcptr_for_compare
10332 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10333 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10334 == FUNCTION_TYPE))
10335 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10336 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10337 == FUNCTION_TYPE))))
10338 return 0;
10339 #endif
10341 STRIP_NOPS (arg0);
10342 STRIP_NOPS (arg1);
10344 /* Get the rtx comparison code to use. We know that EXP is a comparison
10345 operation of some type. Some comparisons against 1 and -1 can be
10346 converted to comparisons with zero. Do so here so that the tests
10347 below will be aware that we have a comparison with zero. These
10348 tests will not catch constants in the first operand, but constants
10349 are rarely passed as the first operand. */
10351 switch (TREE_CODE (exp))
10353 case EQ_EXPR:
10354 code = EQ;
10355 break;
10356 case NE_EXPR:
10357 code = NE;
10358 break;
10359 case LT_EXPR:
10360 if (integer_onep (arg1))
10361 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10362 else
10363 code = unsignedp ? LTU : LT;
10364 break;
10365 case LE_EXPR:
10366 if (! unsignedp && integer_all_onesp (arg1))
10367 arg1 = integer_zero_node, code = LT;
10368 else
10369 code = unsignedp ? LEU : LE;
10370 break;
10371 case GT_EXPR:
10372 if (! unsignedp && integer_all_onesp (arg1))
10373 arg1 = integer_zero_node, code = GE;
10374 else
10375 code = unsignedp ? GTU : GT;
10376 break;
10377 case GE_EXPR:
10378 if (integer_onep (arg1))
10379 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10380 else
10381 code = unsignedp ? GEU : GE;
10382 break;
10384 case UNORDERED_EXPR:
10385 code = UNORDERED;
10386 break;
10387 case ORDERED_EXPR:
10388 code = ORDERED;
10389 break;
10390 case UNLT_EXPR:
10391 code = UNLT;
10392 break;
10393 case UNLE_EXPR:
10394 code = UNLE;
10395 break;
10396 case UNGT_EXPR:
10397 code = UNGT;
10398 break;
10399 case UNGE_EXPR:
10400 code = UNGE;
10401 break;
10402 case UNEQ_EXPR:
10403 code = UNEQ;
10404 break;
10406 default:
10407 abort ();
10410 /* Put a constant second. */
10411 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10413 tem = arg0; arg0 = arg1; arg1 = tem;
10414 code = swap_condition (code);
10417 /* If this is an equality or inequality test of a single bit, we can
10418 do this by shifting the bit being tested to the low-order bit and
10419 masking the result with the constant 1. If the condition was EQ,
10420 we xor it with 1. This does not require an scc insn and is faster
10421 than an scc insn even if we have it. */
10423 if ((code == NE || code == EQ)
10424 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10425 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10427 tree inner = TREE_OPERAND (arg0, 0);
10428 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10429 int ops_unsignedp;
10431 /* If INNER is a right shift of a constant and it plus BITNUM does
10432 not overflow, adjust BITNUM and INNER. */
10434 if (TREE_CODE (inner) == RSHIFT_EXPR
10435 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10436 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10437 && bitnum < TYPE_PRECISION (type)
10438 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10439 bitnum - TYPE_PRECISION (type)))
10441 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10442 inner = TREE_OPERAND (inner, 0);
10445 /* If we are going to be able to omit the AND below, we must do our
10446 operations as unsigned. If we must use the AND, we have a choice.
10447 Normally unsigned is faster, but for some machines signed is. */
10448 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10449 #ifdef LOAD_EXTEND_OP
10450 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10451 #else
10453 #endif
10456 if (! get_subtarget (subtarget)
10457 || GET_MODE (subtarget) != operand_mode
10458 || ! safe_from_p (subtarget, inner, 1))
10459 subtarget = 0;
10461 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10463 if (bitnum != 0)
10464 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10465 size_int (bitnum), subtarget, ops_unsignedp);
10467 if (GET_MODE (op0) != mode)
10468 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10470 if ((code == EQ && ! invert) || (code == NE && invert))
10471 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10472 ops_unsignedp, OPTAB_LIB_WIDEN);
10474 /* Put the AND last so it can combine with more things. */
10475 if (bitnum != TYPE_PRECISION (type) - 1)
10476 op0 = expand_and (mode, op0, const1_rtx, subtarget);
10478 return op0;
10481 /* Now see if we are likely to be able to do this. Return if not. */
10482 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10483 return 0;
10485 icode = setcc_gen_code[(int) code];
10486 if (icode == CODE_FOR_nothing
10487 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10489 /* We can only do this if it is one of the special cases that
10490 can be handled without an scc insn. */
10491 if ((code == LT && integer_zerop (arg1))
10492 || (! only_cheap && code == GE && integer_zerop (arg1)))
10494 else if (BRANCH_COST >= 0
10495 && ! only_cheap && (code == NE || code == EQ)
10496 && TREE_CODE (type) != REAL_TYPE
10497 && ((abs_optab->handlers[(int) operand_mode].insn_code
10498 != CODE_FOR_nothing)
10499 || (ffs_optab->handlers[(int) operand_mode].insn_code
10500 != CODE_FOR_nothing)))
10502 else
10503 return 0;
10506 if (! get_subtarget (target)
10507 || GET_MODE (subtarget) != operand_mode
10508 || ! safe_from_p (subtarget, arg1, 1))
10509 subtarget = 0;
10511 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10512 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10514 if (target == 0)
10515 target = gen_reg_rtx (mode);
10517 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10518 because, if the emit_store_flag does anything it will succeed and
10519 OP0 and OP1 will not be used subsequently. */
10521 result = emit_store_flag (target, code,
10522 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10523 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10524 operand_mode, unsignedp, 1);
10526 if (result)
10528 if (invert)
10529 result = expand_binop (mode, xor_optab, result, const1_rtx,
10530 result, 0, OPTAB_LIB_WIDEN);
10531 return result;
10534 /* If this failed, we have to do this with set/compare/jump/set code. */
10535 if (GET_CODE (target) != REG
10536 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10537 target = gen_reg_rtx (GET_MODE (target));
10539 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10540 result = compare_from_rtx (op0, op1, code, unsignedp,
10541 operand_mode, NULL_RTX);
10542 if (GET_CODE (result) == CONST_INT)
10543 return (((result == const0_rtx && ! invert)
10544 || (result != const0_rtx && invert))
10545 ? const0_rtx : const1_rtx);
10547 /* The code of RESULT may not match CODE if compare_from_rtx
10548 decided to swap its operands and reverse the original code.
10550 We know that compare_from_rtx returns either a CONST_INT or
10551 a new comparison code, so it is safe to just extract the
10552 code from RESULT. */
10553 code = GET_CODE (result);
10555 label = gen_label_rtx ();
10556 if (bcc_gen_fctn[(int) code] == 0)
10557 abort ();
10559 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10560 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10561 emit_label (label);
10563 return target;
10567 /* Stubs in case we haven't got a casesi insn. */
10568 #ifndef HAVE_casesi
10569 # define HAVE_casesi 0
10570 # define gen_casesi(a, b, c, d, e) (0)
10571 # define CODE_FOR_casesi CODE_FOR_nothing
10572 #endif
10574 /* If the machine does not have a case insn that compares the bounds,
10575 this means extra overhead for dispatch tables, which raises the
10576 threshold for using them. */
10577 #ifndef CASE_VALUES_THRESHOLD
10578 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10579 #endif /* CASE_VALUES_THRESHOLD */
10581 unsigned int
10582 case_values_threshold ()
10584 return CASE_VALUES_THRESHOLD;
10587 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10588 0 otherwise (i.e. if there is no casesi instruction). */
10590 try_casesi (index_type, index_expr, minval, range,
10591 table_label, default_label)
10592 tree index_type, index_expr, minval, range;
10593 rtx table_label ATTRIBUTE_UNUSED;
10594 rtx default_label;
10596 enum machine_mode index_mode = SImode;
10597 int index_bits = GET_MODE_BITSIZE (index_mode);
10598 rtx op1, op2, index;
10599 enum machine_mode op_mode;
10601 if (! HAVE_casesi)
10602 return 0;
10604 /* Convert the index to SImode. */
10605 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10607 enum machine_mode omode = TYPE_MODE (index_type);
10608 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
10610 /* We must handle the endpoints in the original mode. */
10611 index_expr = build (MINUS_EXPR, index_type,
10612 index_expr, minval);
10613 minval = integer_zero_node;
10614 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10615 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10616 omode, 1, default_label);
10617 /* Now we can safely truncate. */
10618 index = convert_to_mode (index_mode, index, 0);
10620 else
10622 if (TYPE_MODE (index_type) != index_mode)
10624 index_expr = convert ((*lang_hooks.types.type_for_size)
10625 (index_bits, 0), index_expr);
10626 index_type = TREE_TYPE (index_expr);
10629 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10631 emit_queue ();
10632 index = protect_from_queue (index, 0);
10633 do_pending_stack_adjust ();
10635 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10636 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10637 (index, op_mode))
10638 index = copy_to_mode_reg (op_mode, index);
10640 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10642 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10643 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10644 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
10645 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10646 (op1, op_mode))
10647 op1 = copy_to_mode_reg (op_mode, op1);
10649 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10651 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10652 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10653 op2, TREE_UNSIGNED (TREE_TYPE (range)));
10654 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10655 (op2, op_mode))
10656 op2 = copy_to_mode_reg (op_mode, op2);
10658 emit_jump_insn (gen_casesi (index, op1, op2,
10659 table_label, default_label));
10660 return 1;
10663 /* Attempt to generate a tablejump instruction; same concept. */
10664 #ifndef HAVE_tablejump
10665 #define HAVE_tablejump 0
10666 #define gen_tablejump(x, y) (0)
10667 #endif
10669 /* Subroutine of the next function.
10671 INDEX is the value being switched on, with the lowest value
10672 in the table already subtracted.
10673 MODE is its expected mode (needed if INDEX is constant).
10674 RANGE is the length of the jump table.
10675 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10677 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10678 index value is out of range. */
10680 static void
10681 do_tablejump (index, mode, range, table_label, default_label)
10682 rtx index, range, table_label, default_label;
10683 enum machine_mode mode;
10685 rtx temp, vector;
10687 /* Do an unsigned comparison (in the proper mode) between the index
10688 expression and the value which represents the length of the range.
10689 Since we just finished subtracting the lower bound of the range
10690 from the index expression, this comparison allows us to simultaneously
10691 check that the original index expression value is both greater than
10692 or equal to the minimum value of the range and less than or equal to
10693 the maximum value of the range. */
10695 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10696 default_label);
10698 /* If index is in range, it must fit in Pmode.
10699 Convert to Pmode so we can index with it. */
10700 if (mode != Pmode)
10701 index = convert_to_mode (Pmode, index, 1);
10703 /* Don't let a MEM slip thru, because then INDEX that comes
10704 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10705 and break_out_memory_refs will go to work on it and mess it up. */
10706 #ifdef PIC_CASE_VECTOR_ADDRESS
10707 if (flag_pic && GET_CODE (index) != REG)
10708 index = copy_to_mode_reg (Pmode, index);
10709 #endif
10711 /* If flag_force_addr were to affect this address
10712 it could interfere with the tricky assumptions made
10713 about addresses that contain label-refs,
10714 which may be valid only very near the tablejump itself. */
10715 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10716 GET_MODE_SIZE, because this indicates how large insns are. The other
10717 uses should all be Pmode, because they are addresses. This code
10718 could fail if addresses and insns are not the same size. */
10719 index = gen_rtx_PLUS (Pmode,
10720 gen_rtx_MULT (Pmode, index,
10721 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10722 gen_rtx_LABEL_REF (Pmode, table_label));
10723 #ifdef PIC_CASE_VECTOR_ADDRESS
10724 if (flag_pic)
10725 index = PIC_CASE_VECTOR_ADDRESS (index);
10726 else
10727 #endif
10728 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10729 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10730 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10731 RTX_UNCHANGING_P (vector) = 1;
10732 convert_move (temp, vector, 0);
10734 emit_jump_insn (gen_tablejump (temp, table_label));
10736 /* If we are generating PIC code or if the table is PC-relative, the
10737 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10738 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10739 emit_barrier ();
10743 try_tablejump (index_type, index_expr, minval, range,
10744 table_label, default_label)
10745 tree index_type, index_expr, minval, range;
10746 rtx table_label, default_label;
10748 rtx index;
10750 if (! HAVE_tablejump)
10751 return 0;
10753 index_expr = fold (build (MINUS_EXPR, index_type,
10754 convert (index_type, index_expr),
10755 convert (index_type, minval)));
10756 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10757 emit_queue ();
10758 index = protect_from_queue (index, 0);
10759 do_pending_stack_adjust ();
10761 do_tablejump (index, TYPE_MODE (index_type),
10762 convert_modes (TYPE_MODE (index_type),
10763 TYPE_MODE (TREE_TYPE (range)),
10764 expand_expr (range, NULL_RTX,
10765 VOIDmode, 0),
10766 TREE_UNSIGNED (TREE_TYPE (range))),
10767 table_label, default_label);
10768 return 1;
10771 #include "gt-expr.h"