* cgraph.c (cgraph_node_name): New function.
[official-gcc.git] / gcc / expr.c
blob6a3005857161f7702cf95b831711afb0d7c3d230
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "flags.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "except.h"
34 #include "function.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38 #include "expr.h"
39 #include "optabs.h"
40 #include "libfuncs.h"
41 #include "recog.h"
42 #include "reload.h"
43 #include "output.h"
44 #include "typeclass.h"
45 #include "toplev.h"
46 #include "ggc.h"
47 #include "langhooks.h"
48 #include "intl.h"
49 #include "tm_p.h"
51 /* Decide whether a function's arguments should be processed
52 from first to last or from last to first.
54 They should if the stack and args grow in opposite directions, but
55 only if we have push insns. */
57 #ifdef PUSH_ROUNDING
59 #ifndef PUSH_ARGS_REVERSED
60 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
61 #define PUSH_ARGS_REVERSED /* If it's last to first. */
62 #endif
63 #endif
65 #endif
67 #ifndef STACK_PUSH_CODE
68 #ifdef STACK_GROWS_DOWNWARD
69 #define STACK_PUSH_CODE PRE_DEC
70 #else
71 #define STACK_PUSH_CODE PRE_INC
72 #endif
73 #endif
75 /* Assume that case vectors are not pc-relative. */
76 #ifndef CASE_VECTOR_PC_RELATIVE
77 #define CASE_VECTOR_PC_RELATIVE 0
78 #endif
80 /* Convert defined/undefined to boolean. */
81 #ifdef TARGET_MEM_FUNCTIONS
82 #undef TARGET_MEM_FUNCTIONS
83 #define TARGET_MEM_FUNCTIONS 1
84 #else
85 #define TARGET_MEM_FUNCTIONS 0
86 #endif
89 /* If this is nonzero, we do not bother generating VOLATILE
90 around volatile memory references, and we are willing to
91 output indirect addresses. If cse is to follow, we reject
92 indirect addresses so a useful potential cse is generated;
93 if it is used only once, instruction combination will produce
94 the same indirect address eventually. */
95 int cse_not_expected;
97 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
98 tree placeholder_list = 0;
100 /* This structure is used by move_by_pieces to describe the move to
101 be performed. */
102 struct move_by_pieces
104 rtx to;
105 rtx to_addr;
106 int autinc_to;
107 int explicit_inc_to;
108 rtx from;
109 rtx from_addr;
110 int autinc_from;
111 int explicit_inc_from;
112 unsigned HOST_WIDE_INT len;
113 HOST_WIDE_INT offset;
114 int reverse;
117 /* This structure is used by store_by_pieces to describe the clear to
118 be performed. */
120 struct store_by_pieces
122 rtx to;
123 rtx to_addr;
124 int autinc_to;
125 int explicit_inc_to;
126 unsigned HOST_WIDE_INT len;
127 HOST_WIDE_INT offset;
128 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
129 void *constfundata;
130 int reverse;
133 static rtx enqueue_insn (rtx, rtx);
134 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
135 unsigned int);
136 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
137 struct move_by_pieces *);
138 static bool block_move_libcall_safe_for_call_parm (void);
139 static bool emit_block_move_via_movstr (rtx, rtx, rtx, unsigned);
140 static rtx emit_block_move_via_libcall (rtx, rtx, rtx);
141 static tree emit_block_move_libcall_fn (int);
142 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
143 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
144 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
145 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
146 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
147 struct store_by_pieces *);
148 static bool clear_storage_via_clrstr (rtx, rtx, unsigned);
149 static rtx clear_storage_via_libcall (rtx, rtx);
150 static tree clear_storage_libcall_fn (int);
151 static rtx compress_float_constant (rtx, rtx);
152 static rtx get_subtarget (rtx);
153 static int is_zeros_p (tree);
154 static int mostly_zeros_p (tree);
155 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
156 HOST_WIDE_INT, enum machine_mode,
157 tree, tree, int, int);
158 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
159 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
160 tree, enum machine_mode, int, tree, int);
161 static rtx var_rtx (tree);
163 static unsigned HOST_WIDE_INT highest_pow2_factor (tree);
164 static unsigned HOST_WIDE_INT highest_pow2_factor_for_type (tree, tree);
166 static int is_aligning_offset (tree, tree);
167 static rtx expand_increment (tree, int, int);
168 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
169 #ifdef PUSH_ROUNDING
170 static void emit_single_push_insn (enum machine_mode, rtx, tree);
171 #endif
172 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
173 static rtx const_vector_from_tree (tree);
175 /* Record for each mode whether we can move a register directly to or
176 from an object of that mode in memory. If we can't, we won't try
177 to use that mode directly when accessing a field of that mode. */
179 static char direct_load[NUM_MACHINE_MODES];
180 static char direct_store[NUM_MACHINE_MODES];
182 /* Record for each mode whether we can float-extend from memory. */
184 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
186 /* If a memory-to-memory move would take MOVE_RATIO or more simple
187 move-instruction sequences, we will do a movstr or libcall instead. */
189 #ifndef MOVE_RATIO
190 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
191 #define MOVE_RATIO 2
192 #else
193 /* If we are optimizing for space (-Os), cut down the default move ratio. */
194 #define MOVE_RATIO (optimize_size ? 3 : 15)
195 #endif
196 #endif
198 /* This macro is used to determine whether move_by_pieces should be called
199 to perform a structure copy. */
200 #ifndef MOVE_BY_PIECES_P
201 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
202 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
203 #endif
205 /* If a clear memory operation would take CLEAR_RATIO or more simple
206 move-instruction sequences, we will do a clrstr or libcall instead. */
208 #ifndef CLEAR_RATIO
209 #if defined (HAVE_clrstrqi) || defined (HAVE_clrstrhi) || defined (HAVE_clrstrsi) || defined (HAVE_clrstrdi) || defined (HAVE_clrstrti)
210 #define CLEAR_RATIO 2
211 #else
212 /* If we are optimizing for space, cut down the default clear ratio. */
213 #define CLEAR_RATIO (optimize_size ? 3 : 15)
214 #endif
215 #endif
217 /* This macro is used to determine whether clear_by_pieces should be
218 called to clear storage. */
219 #ifndef CLEAR_BY_PIECES_P
220 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
221 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
222 #endif
224 /* This macro is used to determine whether store_by_pieces should be
225 called to "memset" storage with byte values other than zero, or
226 to "memcpy" storage when the source is a constant string. */
227 #ifndef STORE_BY_PIECES_P
228 #define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
229 #endif
231 /* This array records the insn_code of insns to perform block moves. */
232 enum insn_code movstr_optab[NUM_MACHINE_MODES];
234 /* This array records the insn_code of insns to perform block clears. */
235 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
237 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
239 #ifndef SLOW_UNALIGNED_ACCESS
240 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
241 #endif
243 /* This is run once per compilation to set up which modes can be used
244 directly in memory and to initialize the block move optab. */
246 void
247 init_expr_once (void)
249 rtx insn, pat;
250 enum machine_mode mode;
251 int num_clobbers;
252 rtx mem, mem1;
253 rtx reg;
255 /* Try indexing by frame ptr and try by stack ptr.
256 It is known that on the Convex the stack ptr isn't a valid index.
257 With luck, one or the other is valid on any machine. */
258 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
259 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
261 /* A scratch register we can modify in-place below to avoid
262 useless RTL allocations. */
263 reg = gen_rtx_REG (VOIDmode, -1);
265 insn = rtx_alloc (INSN);
266 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
267 PATTERN (insn) = pat;
269 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
270 mode = (enum machine_mode) ((int) mode + 1))
272 int regno;
274 direct_load[(int) mode] = direct_store[(int) mode] = 0;
275 PUT_MODE (mem, mode);
276 PUT_MODE (mem1, mode);
277 PUT_MODE (reg, mode);
279 /* See if there is some register that can be used in this mode and
280 directly loaded or stored from memory. */
282 if (mode != VOIDmode && mode != BLKmode)
283 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
284 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
285 regno++)
287 if (! HARD_REGNO_MODE_OK (regno, mode))
288 continue;
290 REGNO (reg) = regno;
292 SET_SRC (pat) = mem;
293 SET_DEST (pat) = reg;
294 if (recog (pat, insn, &num_clobbers) >= 0)
295 direct_load[(int) mode] = 1;
297 SET_SRC (pat) = mem1;
298 SET_DEST (pat) = reg;
299 if (recog (pat, insn, &num_clobbers) >= 0)
300 direct_load[(int) mode] = 1;
302 SET_SRC (pat) = reg;
303 SET_DEST (pat) = mem;
304 if (recog (pat, insn, &num_clobbers) >= 0)
305 direct_store[(int) mode] = 1;
307 SET_SRC (pat) = reg;
308 SET_DEST (pat) = mem1;
309 if (recog (pat, insn, &num_clobbers) >= 0)
310 direct_store[(int) mode] = 1;
314 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
316 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
317 mode = GET_MODE_WIDER_MODE (mode))
319 enum machine_mode srcmode;
320 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
321 srcmode = GET_MODE_WIDER_MODE (srcmode))
323 enum insn_code ic;
325 ic = can_extend_p (mode, srcmode, 0);
326 if (ic == CODE_FOR_nothing)
327 continue;
329 PUT_MODE (mem, srcmode);
331 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
332 float_extend_from_mem[mode][srcmode] = true;
337 /* This is run at the start of compiling a function. */
339 void
340 init_expr (void)
342 cfun->expr = (struct expr_status *) ggc_alloc (sizeof (struct expr_status));
344 pending_chain = 0;
345 pending_stack_adjust = 0;
346 stack_pointer_delta = 0;
347 inhibit_defer_pop = 0;
348 saveregs_value = 0;
349 apply_args_value = 0;
350 forced_labels = 0;
353 /* Small sanity check that the queue is empty at the end of a function. */
355 void
356 finish_expr_for_function (void)
358 if (pending_chain)
359 abort ();
362 /* Manage the queue of increment instructions to be output
363 for POSTINCREMENT_EXPR expressions, etc. */
365 /* Queue up to increment (or change) VAR later. BODY says how:
366 BODY should be the same thing you would pass to emit_insn
367 to increment right away. It will go to emit_insn later on.
369 The value is a QUEUED expression to be used in place of VAR
370 where you want to guarantee the pre-incrementation value of VAR. */
372 static rtx
373 enqueue_insn (rtx var, rtx body)
375 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
376 body, pending_chain);
377 return pending_chain;
380 /* Use protect_from_queue to convert a QUEUED expression
381 into something that you can put immediately into an instruction.
382 If the queued incrementation has not happened yet,
383 protect_from_queue returns the variable itself.
384 If the incrementation has happened, protect_from_queue returns a temp
385 that contains a copy of the old value of the variable.
387 Any time an rtx which might possibly be a QUEUED is to be put
388 into an instruction, it must be passed through protect_from_queue first.
389 QUEUED expressions are not meaningful in instructions.
391 Do not pass a value through protect_from_queue and then hold
392 on to it for a while before putting it in an instruction!
393 If the queue is flushed in between, incorrect code will result. */
396 protect_from_queue (rtx x, int modify)
398 RTX_CODE code = GET_CODE (x);
400 #if 0 /* A QUEUED can hang around after the queue is forced out. */
401 /* Shortcut for most common case. */
402 if (pending_chain == 0)
403 return x;
404 #endif
406 if (code != QUEUED)
408 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
409 use of autoincrement. Make a copy of the contents of the memory
410 location rather than a copy of the address, but not if the value is
411 of mode BLKmode. Don't modify X in place since it might be
412 shared. */
413 if (code == MEM && GET_MODE (x) != BLKmode
414 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
416 rtx y = XEXP (x, 0);
417 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
419 if (QUEUED_INSN (y))
421 rtx temp = gen_reg_rtx (GET_MODE (x));
423 emit_insn_before (gen_move_insn (temp, new),
424 QUEUED_INSN (y));
425 return temp;
428 /* Copy the address into a pseudo, so that the returned value
429 remains correct across calls to emit_queue. */
430 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
433 /* Otherwise, recursively protect the subexpressions of all
434 the kinds of rtx's that can contain a QUEUED. */
435 if (code == MEM)
437 rtx tem = protect_from_queue (XEXP (x, 0), 0);
438 if (tem != XEXP (x, 0))
440 x = copy_rtx (x);
441 XEXP (x, 0) = tem;
444 else if (code == PLUS || code == MULT)
446 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
447 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
448 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
450 x = copy_rtx (x);
451 XEXP (x, 0) = new0;
452 XEXP (x, 1) = new1;
455 return x;
457 /* If the increment has not happened, use the variable itself. Copy it
458 into a new pseudo so that the value remains correct across calls to
459 emit_queue. */
460 if (QUEUED_INSN (x) == 0)
461 return copy_to_reg (QUEUED_VAR (x));
462 /* If the increment has happened and a pre-increment copy exists,
463 use that copy. */
464 if (QUEUED_COPY (x) != 0)
465 return QUEUED_COPY (x);
466 /* The increment has happened but we haven't set up a pre-increment copy.
467 Set one up now, and use it. */
468 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
469 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
470 QUEUED_INSN (x));
471 return QUEUED_COPY (x);
474 /* Return nonzero if X contains a QUEUED expression:
475 if it contains anything that will be altered by a queued increment.
476 We handle only combinations of MEM, PLUS, MINUS and MULT operators
477 since memory addresses generally contain only those. */
480 queued_subexp_p (rtx x)
482 enum rtx_code code = GET_CODE (x);
483 switch (code)
485 case QUEUED:
486 return 1;
487 case MEM:
488 return queued_subexp_p (XEXP (x, 0));
489 case MULT:
490 case PLUS:
491 case MINUS:
492 return (queued_subexp_p (XEXP (x, 0))
493 || queued_subexp_p (XEXP (x, 1)));
494 default:
495 return 0;
499 /* Perform all the pending incrementations. */
501 void
502 emit_queue (void)
504 rtx p;
505 while ((p = pending_chain))
507 rtx body = QUEUED_BODY (p);
509 switch (GET_CODE (body))
511 case INSN:
512 case JUMP_INSN:
513 case CALL_INSN:
514 case CODE_LABEL:
515 case BARRIER:
516 case NOTE:
517 QUEUED_INSN (p) = body;
518 emit_insn (body);
519 break;
521 #ifdef ENABLE_CHECKING
522 case SEQUENCE:
523 abort ();
524 break;
525 #endif
527 default:
528 QUEUED_INSN (p) = emit_insn (body);
529 break;
532 pending_chain = QUEUED_NEXT (p);
536 /* Copy data from FROM to TO, where the machine modes are not the same.
537 Both modes may be integer, or both may be floating.
538 UNSIGNEDP should be nonzero if FROM is an unsigned type.
539 This causes zero-extension instead of sign-extension. */
541 void
542 convert_move (rtx to, rtx from, int unsignedp)
544 enum machine_mode to_mode = GET_MODE (to);
545 enum machine_mode from_mode = GET_MODE (from);
546 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
547 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
548 enum insn_code code;
549 rtx libcall;
551 /* rtx code for making an equivalent value. */
552 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
553 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
555 to = protect_from_queue (to, 1);
556 from = protect_from_queue (from, 0);
558 if (to_real != from_real)
559 abort ();
561 /* If FROM is a SUBREG that indicates that we have already done at least
562 the required extension, strip it. We don't handle such SUBREGs as
563 TO here. */
565 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
566 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
567 >= GET_MODE_SIZE (to_mode))
568 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
569 from = gen_lowpart (to_mode, from), from_mode = to_mode;
571 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
572 abort ();
574 if (to_mode == from_mode
575 || (from_mode == VOIDmode && CONSTANT_P (from)))
577 emit_move_insn (to, from);
578 return;
581 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
583 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
584 abort ();
586 if (VECTOR_MODE_P (to_mode))
587 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
588 else
589 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
591 emit_move_insn (to, from);
592 return;
595 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
597 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
598 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
599 return;
602 if (to_real != from_real)
603 abort ();
605 if (to_real)
607 rtx value, insns;
609 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
611 /* Try converting directly if the insn is supported. */
612 if ((code = can_extend_p (to_mode, from_mode, 0))
613 != CODE_FOR_nothing)
615 emit_unop_insn (code, to, from, UNKNOWN);
616 return;
620 #ifdef HAVE_trunchfqf2
621 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
623 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
624 return;
626 #endif
627 #ifdef HAVE_trunctqfqf2
628 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
630 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
631 return;
633 #endif
634 #ifdef HAVE_truncsfqf2
635 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
637 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
638 return;
640 #endif
641 #ifdef HAVE_truncdfqf2
642 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
644 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
645 return;
647 #endif
648 #ifdef HAVE_truncxfqf2
649 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
651 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
652 return;
654 #endif
655 #ifdef HAVE_trunctfqf2
656 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
658 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
659 return;
661 #endif
663 #ifdef HAVE_trunctqfhf2
664 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
666 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
667 return;
669 #endif
670 #ifdef HAVE_truncsfhf2
671 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
673 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
674 return;
676 #endif
677 #ifdef HAVE_truncdfhf2
678 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
680 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
681 return;
683 #endif
684 #ifdef HAVE_truncxfhf2
685 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
687 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
688 return;
690 #endif
691 #ifdef HAVE_trunctfhf2
692 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
694 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
695 return;
697 #endif
699 #ifdef HAVE_truncsftqf2
700 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
702 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
703 return;
705 #endif
706 #ifdef HAVE_truncdftqf2
707 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
709 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
710 return;
712 #endif
713 #ifdef HAVE_truncxftqf2
714 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
716 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
717 return;
719 #endif
720 #ifdef HAVE_trunctftqf2
721 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
723 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
724 return;
726 #endif
728 #ifdef HAVE_truncdfsf2
729 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
731 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
732 return;
734 #endif
735 #ifdef HAVE_truncxfsf2
736 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
738 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
739 return;
741 #endif
742 #ifdef HAVE_trunctfsf2
743 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
745 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
746 return;
748 #endif
749 #ifdef HAVE_truncxfdf2
750 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
752 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
753 return;
755 #endif
756 #ifdef HAVE_trunctfdf2
757 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
759 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
760 return;
762 #endif
764 libcall = (rtx) 0;
765 switch (from_mode)
767 case SFmode:
768 switch (to_mode)
770 case DFmode:
771 libcall = extendsfdf2_libfunc;
772 break;
774 case XFmode:
775 libcall = extendsfxf2_libfunc;
776 break;
778 case TFmode:
779 libcall = extendsftf2_libfunc;
780 break;
782 default:
783 break;
785 break;
787 case DFmode:
788 switch (to_mode)
790 case SFmode:
791 libcall = truncdfsf2_libfunc;
792 break;
794 case XFmode:
795 libcall = extenddfxf2_libfunc;
796 break;
798 case TFmode:
799 libcall = extenddftf2_libfunc;
800 break;
802 default:
803 break;
805 break;
807 case XFmode:
808 switch (to_mode)
810 case SFmode:
811 libcall = truncxfsf2_libfunc;
812 break;
814 case DFmode:
815 libcall = truncxfdf2_libfunc;
816 break;
818 default:
819 break;
821 break;
823 case TFmode:
824 switch (to_mode)
826 case SFmode:
827 libcall = trunctfsf2_libfunc;
828 break;
830 case DFmode:
831 libcall = trunctfdf2_libfunc;
832 break;
834 default:
835 break;
837 break;
839 default:
840 break;
843 if (libcall == (rtx) 0)
844 /* This conversion is not implemented yet. */
845 abort ();
847 start_sequence ();
848 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
849 1, from, from_mode);
850 insns = get_insns ();
851 end_sequence ();
852 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
853 from));
854 return;
857 /* Now both modes are integers. */
859 /* Handle expanding beyond a word. */
860 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
861 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
863 rtx insns;
864 rtx lowpart;
865 rtx fill_value;
866 rtx lowfrom;
867 int i;
868 enum machine_mode lowpart_mode;
869 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
871 /* Try converting directly if the insn is supported. */
872 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
873 != CODE_FOR_nothing)
875 /* If FROM is a SUBREG, put it into a register. Do this
876 so that we always generate the same set of insns for
877 better cse'ing; if an intermediate assignment occurred,
878 we won't be doing the operation directly on the SUBREG. */
879 if (optimize > 0 && GET_CODE (from) == SUBREG)
880 from = force_reg (from_mode, from);
881 emit_unop_insn (code, to, from, equiv_code);
882 return;
884 /* Next, try converting via full word. */
885 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
886 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
887 != CODE_FOR_nothing))
889 if (GET_CODE (to) == REG)
890 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
891 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
892 emit_unop_insn (code, to,
893 gen_lowpart (word_mode, to), equiv_code);
894 return;
897 /* No special multiword conversion insn; do it by hand. */
898 start_sequence ();
900 /* Since we will turn this into a no conflict block, we must ensure
901 that the source does not overlap the target. */
903 if (reg_overlap_mentioned_p (to, from))
904 from = force_reg (from_mode, from);
906 /* Get a copy of FROM widened to a word, if necessary. */
907 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
908 lowpart_mode = word_mode;
909 else
910 lowpart_mode = from_mode;
912 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
914 lowpart = gen_lowpart (lowpart_mode, to);
915 emit_move_insn (lowpart, lowfrom);
917 /* Compute the value to put in each remaining word. */
918 if (unsignedp)
919 fill_value = const0_rtx;
920 else
922 #ifdef HAVE_slt
923 if (HAVE_slt
924 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
925 && STORE_FLAG_VALUE == -1)
927 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
928 lowpart_mode, 0);
929 fill_value = gen_reg_rtx (word_mode);
930 emit_insn (gen_slt (fill_value));
932 else
933 #endif
935 fill_value
936 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
937 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
938 NULL_RTX, 0);
939 fill_value = convert_to_mode (word_mode, fill_value, 1);
943 /* Fill the remaining words. */
944 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
946 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
947 rtx subword = operand_subword (to, index, 1, to_mode);
949 if (subword == 0)
950 abort ();
952 if (fill_value != subword)
953 emit_move_insn (subword, fill_value);
956 insns = get_insns ();
957 end_sequence ();
959 emit_no_conflict_block (insns, to, from, NULL_RTX,
960 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
961 return;
964 /* Truncating multi-word to a word or less. */
965 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
966 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
968 if (!((GET_CODE (from) == MEM
969 && ! MEM_VOLATILE_P (from)
970 && direct_load[(int) to_mode]
971 && ! mode_dependent_address_p (XEXP (from, 0)))
972 || GET_CODE (from) == REG
973 || GET_CODE (from) == SUBREG))
974 from = force_reg (from_mode, from);
975 convert_move (to, gen_lowpart (word_mode, from), 0);
976 return;
979 /* Handle pointer conversion. */ /* SPEE 900220. */
980 if (to_mode == PQImode)
982 if (from_mode != QImode)
983 from = convert_to_mode (QImode, from, unsignedp);
985 #ifdef HAVE_truncqipqi2
986 if (HAVE_truncqipqi2)
988 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
989 return;
991 #endif /* HAVE_truncqipqi2 */
992 abort ();
995 if (from_mode == PQImode)
997 if (to_mode != QImode)
999 from = convert_to_mode (QImode, from, unsignedp);
1000 from_mode = QImode;
1002 else
1004 #ifdef HAVE_extendpqiqi2
1005 if (HAVE_extendpqiqi2)
1007 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
1008 return;
1010 #endif /* HAVE_extendpqiqi2 */
1011 abort ();
1015 if (to_mode == PSImode)
1017 if (from_mode != SImode)
1018 from = convert_to_mode (SImode, from, unsignedp);
1020 #ifdef HAVE_truncsipsi2
1021 if (HAVE_truncsipsi2)
1023 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
1024 return;
1026 #endif /* HAVE_truncsipsi2 */
1027 abort ();
1030 if (from_mode == PSImode)
1032 if (to_mode != SImode)
1034 from = convert_to_mode (SImode, from, unsignedp);
1035 from_mode = SImode;
1037 else
1039 #ifdef HAVE_extendpsisi2
1040 if (! unsignedp && HAVE_extendpsisi2)
1042 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1043 return;
1045 #endif /* HAVE_extendpsisi2 */
1046 #ifdef HAVE_zero_extendpsisi2
1047 if (unsignedp && HAVE_zero_extendpsisi2)
1049 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1050 return;
1052 #endif /* HAVE_zero_extendpsisi2 */
1053 abort ();
1057 if (to_mode == PDImode)
1059 if (from_mode != DImode)
1060 from = convert_to_mode (DImode, from, unsignedp);
1062 #ifdef HAVE_truncdipdi2
1063 if (HAVE_truncdipdi2)
1065 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1066 return;
1068 #endif /* HAVE_truncdipdi2 */
1069 abort ();
1072 if (from_mode == PDImode)
1074 if (to_mode != DImode)
1076 from = convert_to_mode (DImode, from, unsignedp);
1077 from_mode = DImode;
1079 else
1081 #ifdef HAVE_extendpdidi2
1082 if (HAVE_extendpdidi2)
1084 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1085 return;
1087 #endif /* HAVE_extendpdidi2 */
1088 abort ();
1092 /* Now follow all the conversions between integers
1093 no more than a word long. */
1095 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1096 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1097 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1098 GET_MODE_BITSIZE (from_mode)))
1100 if (!((GET_CODE (from) == MEM
1101 && ! MEM_VOLATILE_P (from)
1102 && direct_load[(int) to_mode]
1103 && ! mode_dependent_address_p (XEXP (from, 0)))
1104 || GET_CODE (from) == REG
1105 || GET_CODE (from) == SUBREG))
1106 from = force_reg (from_mode, from);
1107 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1108 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1109 from = copy_to_reg (from);
1110 emit_move_insn (to, gen_lowpart (to_mode, from));
1111 return;
1114 /* Handle extension. */
1115 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1117 /* Convert directly if that works. */
1118 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1119 != CODE_FOR_nothing)
1121 if (flag_force_mem)
1122 from = force_not_mem (from);
1124 emit_unop_insn (code, to, from, equiv_code);
1125 return;
1127 else
1129 enum machine_mode intermediate;
1130 rtx tmp;
1131 tree shift_amount;
1133 /* Search for a mode to convert via. */
1134 for (intermediate = from_mode; intermediate != VOIDmode;
1135 intermediate = GET_MODE_WIDER_MODE (intermediate))
1136 if (((can_extend_p (to_mode, intermediate, unsignedp)
1137 != CODE_FOR_nothing)
1138 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1139 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1140 GET_MODE_BITSIZE (intermediate))))
1141 && (can_extend_p (intermediate, from_mode, unsignedp)
1142 != CODE_FOR_nothing))
1144 convert_move (to, convert_to_mode (intermediate, from,
1145 unsignedp), unsignedp);
1146 return;
1149 /* No suitable intermediate mode.
1150 Generate what we need with shifts. */
1151 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1152 - GET_MODE_BITSIZE (from_mode), 0);
1153 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1154 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1155 to, unsignedp);
1156 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1157 to, unsignedp);
1158 if (tmp != to)
1159 emit_move_insn (to, tmp);
1160 return;
1164 /* Support special truncate insns for certain modes. */
1166 if (from_mode == DImode && to_mode == SImode)
1168 #ifdef HAVE_truncdisi2
1169 if (HAVE_truncdisi2)
1171 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1172 return;
1174 #endif
1175 convert_move (to, force_reg (from_mode, from), unsignedp);
1176 return;
1179 if (from_mode == DImode && to_mode == HImode)
1181 #ifdef HAVE_truncdihi2
1182 if (HAVE_truncdihi2)
1184 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1185 return;
1187 #endif
1188 convert_move (to, force_reg (from_mode, from), unsignedp);
1189 return;
1192 if (from_mode == DImode && to_mode == QImode)
1194 #ifdef HAVE_truncdiqi2
1195 if (HAVE_truncdiqi2)
1197 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1198 return;
1200 #endif
1201 convert_move (to, force_reg (from_mode, from), unsignedp);
1202 return;
1205 if (from_mode == SImode && to_mode == HImode)
1207 #ifdef HAVE_truncsihi2
1208 if (HAVE_truncsihi2)
1210 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1211 return;
1213 #endif
1214 convert_move (to, force_reg (from_mode, from), unsignedp);
1215 return;
1218 if (from_mode == SImode && to_mode == QImode)
1220 #ifdef HAVE_truncsiqi2
1221 if (HAVE_truncsiqi2)
1223 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1224 return;
1226 #endif
1227 convert_move (to, force_reg (from_mode, from), unsignedp);
1228 return;
1231 if (from_mode == HImode && to_mode == QImode)
1233 #ifdef HAVE_trunchiqi2
1234 if (HAVE_trunchiqi2)
1236 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1237 return;
1239 #endif
1240 convert_move (to, force_reg (from_mode, from), unsignedp);
1241 return;
1244 if (from_mode == TImode && to_mode == DImode)
1246 #ifdef HAVE_trunctidi2
1247 if (HAVE_trunctidi2)
1249 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1250 return;
1252 #endif
1253 convert_move (to, force_reg (from_mode, from), unsignedp);
1254 return;
1257 if (from_mode == TImode && to_mode == SImode)
1259 #ifdef HAVE_trunctisi2
1260 if (HAVE_trunctisi2)
1262 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1263 return;
1265 #endif
1266 convert_move (to, force_reg (from_mode, from), unsignedp);
1267 return;
1270 if (from_mode == TImode && to_mode == HImode)
1272 #ifdef HAVE_trunctihi2
1273 if (HAVE_trunctihi2)
1275 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1276 return;
1278 #endif
1279 convert_move (to, force_reg (from_mode, from), unsignedp);
1280 return;
1283 if (from_mode == TImode && to_mode == QImode)
1285 #ifdef HAVE_trunctiqi2
1286 if (HAVE_trunctiqi2)
1288 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1289 return;
1291 #endif
1292 convert_move (to, force_reg (from_mode, from), unsignedp);
1293 return;
1296 /* Handle truncation of volatile memrefs, and so on;
1297 the things that couldn't be truncated directly,
1298 and for which there was no special instruction. */
1299 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1301 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1302 emit_move_insn (to, temp);
1303 return;
1306 /* Mode combination is not recognized. */
1307 abort ();
1310 /* Return an rtx for a value that would result
1311 from converting X to mode MODE.
1312 Both X and MODE may be floating, or both integer.
1313 UNSIGNEDP is nonzero if X is an unsigned value.
1314 This can be done by referring to a part of X in place
1315 or by copying to a new temporary with conversion.
1317 This function *must not* call protect_from_queue
1318 except when putting X into an insn (in which case convert_move does it). */
1321 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
1323 return convert_modes (mode, VOIDmode, x, unsignedp);
1326 /* Return an rtx for a value that would result
1327 from converting X from mode OLDMODE to mode MODE.
1328 Both modes may be floating, or both integer.
1329 UNSIGNEDP is nonzero if X is an unsigned value.
1331 This can be done by referring to a part of X in place
1332 or by copying to a new temporary with conversion.
1334 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1336 This function *must not* call protect_from_queue
1337 except when putting X into an insn (in which case convert_move does it). */
1340 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
1342 rtx temp;
1344 /* If FROM is a SUBREG that indicates that we have already done at least
1345 the required extension, strip it. */
1347 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1348 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1349 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1350 x = gen_lowpart (mode, x);
1352 if (GET_MODE (x) != VOIDmode)
1353 oldmode = GET_MODE (x);
1355 if (mode == oldmode)
1356 return x;
1358 /* There is one case that we must handle specially: If we are converting
1359 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1360 we are to interpret the constant as unsigned, gen_lowpart will do
1361 the wrong if the constant appears negative. What we want to do is
1362 make the high-order word of the constant zero, not all ones. */
1364 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1365 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1366 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1368 HOST_WIDE_INT val = INTVAL (x);
1370 if (oldmode != VOIDmode
1371 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1373 int width = GET_MODE_BITSIZE (oldmode);
1375 /* We need to zero extend VAL. */
1376 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1379 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1382 /* We can do this with a gen_lowpart if both desired and current modes
1383 are integer, and this is either a constant integer, a register, or a
1384 non-volatile MEM. Except for the constant case where MODE is no
1385 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1387 if ((GET_CODE (x) == CONST_INT
1388 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1389 || (GET_MODE_CLASS (mode) == MODE_INT
1390 && GET_MODE_CLASS (oldmode) == MODE_INT
1391 && (GET_CODE (x) == CONST_DOUBLE
1392 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1393 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1394 && direct_load[(int) mode])
1395 || (GET_CODE (x) == REG
1396 && (! HARD_REGISTER_P (x)
1397 || HARD_REGNO_MODE_OK (REGNO (x), mode))
1398 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1399 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1401 /* ?? If we don't know OLDMODE, we have to assume here that
1402 X does not need sign- or zero-extension. This may not be
1403 the case, but it's the best we can do. */
1404 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1405 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1407 HOST_WIDE_INT val = INTVAL (x);
1408 int width = GET_MODE_BITSIZE (oldmode);
1410 /* We must sign or zero-extend in this case. Start by
1411 zero-extending, then sign extend if we need to. */
1412 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1413 if (! unsignedp
1414 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1415 val |= (HOST_WIDE_INT) (-1) << width;
1417 return gen_int_mode (val, mode);
1420 return gen_lowpart (mode, x);
1423 temp = gen_reg_rtx (mode);
1424 convert_move (temp, x, unsignedp);
1425 return temp;
1428 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1429 store efficiently. Due to internal GCC limitations, this is
1430 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1431 for an immediate constant. */
1433 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1435 /* Determine whether the LEN bytes can be moved by using several move
1436 instructions. Return nonzero if a call to move_by_pieces should
1437 succeed. */
1440 can_move_by_pieces (unsigned HOST_WIDE_INT len,
1441 unsigned int align ATTRIBUTE_UNUSED)
1443 return MOVE_BY_PIECES_P (len, align);
1446 /* Generate several move instructions to copy LEN bytes from block FROM to
1447 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1448 and TO through protect_from_queue before calling.
1450 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1451 used to push FROM to the stack.
1453 ALIGN is maximum stack alignment we can assume.
1455 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
1456 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
1457 stpcpy. */
1460 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
1461 unsigned int align, int endp)
1463 struct move_by_pieces data;
1464 rtx to_addr, from_addr = XEXP (from, 0);
1465 unsigned int max_size = MOVE_MAX_PIECES + 1;
1466 enum machine_mode mode = VOIDmode, tmode;
1467 enum insn_code icode;
1469 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
1471 data.offset = 0;
1472 data.from_addr = from_addr;
1473 if (to)
1475 to_addr = XEXP (to, 0);
1476 data.to = to;
1477 data.autinc_to
1478 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1479 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1480 data.reverse
1481 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1483 else
1485 to_addr = NULL_RTX;
1486 data.to = NULL_RTX;
1487 data.autinc_to = 1;
1488 #ifdef STACK_GROWS_DOWNWARD
1489 data.reverse = 1;
1490 #else
1491 data.reverse = 0;
1492 #endif
1494 data.to_addr = to_addr;
1495 data.from = from;
1496 data.autinc_from
1497 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1498 || GET_CODE (from_addr) == POST_INC
1499 || GET_CODE (from_addr) == POST_DEC);
1501 data.explicit_inc_from = 0;
1502 data.explicit_inc_to = 0;
1503 if (data.reverse) data.offset = len;
1504 data.len = len;
1506 /* If copying requires more than two move insns,
1507 copy addresses to registers (to make displacements shorter)
1508 and use post-increment if available. */
1509 if (!(data.autinc_from && data.autinc_to)
1510 && move_by_pieces_ninsns (len, align) > 2)
1512 /* Find the mode of the largest move... */
1513 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1514 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1515 if (GET_MODE_SIZE (tmode) < max_size)
1516 mode = tmode;
1518 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1520 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1521 data.autinc_from = 1;
1522 data.explicit_inc_from = -1;
1524 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1526 data.from_addr = copy_addr_to_reg (from_addr);
1527 data.autinc_from = 1;
1528 data.explicit_inc_from = 1;
1530 if (!data.autinc_from && CONSTANT_P (from_addr))
1531 data.from_addr = copy_addr_to_reg (from_addr);
1532 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1534 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1535 data.autinc_to = 1;
1536 data.explicit_inc_to = -1;
1538 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1540 data.to_addr = copy_addr_to_reg (to_addr);
1541 data.autinc_to = 1;
1542 data.explicit_inc_to = 1;
1544 if (!data.autinc_to && CONSTANT_P (to_addr))
1545 data.to_addr = copy_addr_to_reg (to_addr);
1548 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1549 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1550 align = MOVE_MAX * BITS_PER_UNIT;
1552 /* First move what we can in the largest integer mode, then go to
1553 successively smaller modes. */
1555 while (max_size > 1)
1557 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1558 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1559 if (GET_MODE_SIZE (tmode) < max_size)
1560 mode = tmode;
1562 if (mode == VOIDmode)
1563 break;
1565 icode = mov_optab->handlers[(int) mode].insn_code;
1566 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1567 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1569 max_size = GET_MODE_SIZE (mode);
1572 /* The code above should have handled everything. */
1573 if (data.len > 0)
1574 abort ();
1576 if (endp)
1578 rtx to1;
1580 if (data.reverse)
1581 abort ();
1582 if (data.autinc_to)
1584 if (endp == 2)
1586 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1587 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1588 else
1589 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1590 -1));
1592 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1593 data.offset);
1595 else
1597 if (endp == 2)
1598 --data.offset;
1599 to1 = adjust_address (data.to, QImode, data.offset);
1601 return to1;
1603 else
1604 return data.to;
1607 /* Return number of insns required to move L bytes by pieces.
1608 ALIGN (in bits) is maximum alignment we can assume. */
1610 static unsigned HOST_WIDE_INT
1611 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align)
1613 unsigned HOST_WIDE_INT n_insns = 0;
1614 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1616 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1617 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1618 align = MOVE_MAX * BITS_PER_UNIT;
1620 while (max_size > 1)
1622 enum machine_mode mode = VOIDmode, tmode;
1623 enum insn_code icode;
1625 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1626 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1627 if (GET_MODE_SIZE (tmode) < max_size)
1628 mode = tmode;
1630 if (mode == VOIDmode)
1631 break;
1633 icode = mov_optab->handlers[(int) mode].insn_code;
1634 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1635 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1637 max_size = GET_MODE_SIZE (mode);
1640 if (l)
1641 abort ();
1642 return n_insns;
1645 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1646 with move instructions for mode MODE. GENFUN is the gen_... function
1647 to make a move insn for that mode. DATA has all the other info. */
1649 static void
1650 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1651 struct move_by_pieces *data)
1653 unsigned int size = GET_MODE_SIZE (mode);
1654 rtx to1 = NULL_RTX, from1;
1656 while (data->len >= size)
1658 if (data->reverse)
1659 data->offset -= size;
1661 if (data->to)
1663 if (data->autinc_to)
1664 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1665 data->offset);
1666 else
1667 to1 = adjust_address (data->to, mode, data->offset);
1670 if (data->autinc_from)
1671 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1672 data->offset);
1673 else
1674 from1 = adjust_address (data->from, mode, data->offset);
1676 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1677 emit_insn (gen_add2_insn (data->to_addr,
1678 GEN_INT (-(HOST_WIDE_INT)size)));
1679 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1680 emit_insn (gen_add2_insn (data->from_addr,
1681 GEN_INT (-(HOST_WIDE_INT)size)));
1683 if (data->to)
1684 emit_insn ((*genfun) (to1, from1));
1685 else
1687 #ifdef PUSH_ROUNDING
1688 emit_single_push_insn (mode, from1, NULL);
1689 #else
1690 abort ();
1691 #endif
1694 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1695 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1696 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1697 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1699 if (! data->reverse)
1700 data->offset += size;
1702 data->len -= size;
1706 /* Emit code to move a block Y to a block X. This may be done with
1707 string-move instructions, with multiple scalar move instructions,
1708 or with a library call.
1710 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1711 SIZE is an rtx that says how long they are.
1712 ALIGN is the maximum alignment we can assume they have.
1713 METHOD describes what kind of copy this is, and what mechanisms may be used.
1715 Return the address of the new block, if memcpy is called and returns it,
1716 0 otherwise. */
1719 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1721 bool may_use_call;
1722 rtx retval = 0;
1723 unsigned int align;
1725 switch (method)
1727 case BLOCK_OP_NORMAL:
1728 may_use_call = true;
1729 break;
1731 case BLOCK_OP_CALL_PARM:
1732 may_use_call = block_move_libcall_safe_for_call_parm ();
1734 /* Make inhibit_defer_pop nonzero around the library call
1735 to force it to pop the arguments right away. */
1736 NO_DEFER_POP;
1737 break;
1739 case BLOCK_OP_NO_LIBCALL:
1740 may_use_call = false;
1741 break;
1743 default:
1744 abort ();
1747 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1749 if (GET_MODE (x) != BLKmode)
1750 abort ();
1751 if (GET_MODE (y) != BLKmode)
1752 abort ();
1754 x = protect_from_queue (x, 1);
1755 y = protect_from_queue (y, 0);
1756 size = protect_from_queue (size, 0);
1758 if (GET_CODE (x) != MEM)
1759 abort ();
1760 if (GET_CODE (y) != MEM)
1761 abort ();
1762 if (size == 0)
1763 abort ();
1765 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1766 can be incorrect is coming from __builtin_memcpy. */
1767 if (GET_CODE (size) == CONST_INT)
1769 x = shallow_copy_rtx (x);
1770 y = shallow_copy_rtx (y);
1771 set_mem_size (x, size);
1772 set_mem_size (y, size);
1775 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1776 move_by_pieces (x, y, INTVAL (size), align, 0);
1777 else if (emit_block_move_via_movstr (x, y, size, align))
1779 else if (may_use_call)
1780 retval = emit_block_move_via_libcall (x, y, size);
1781 else
1782 emit_block_move_via_loop (x, y, size, align);
1784 if (method == BLOCK_OP_CALL_PARM)
1785 OK_DEFER_POP;
1787 return retval;
1790 /* A subroutine of emit_block_move. Returns true if calling the
1791 block move libcall will not clobber any parameters which may have
1792 already been placed on the stack. */
1794 static bool
1795 block_move_libcall_safe_for_call_parm (void)
1797 if (PUSH_ARGS)
1798 return true;
1799 else
1801 /* Check to see whether memcpy takes all register arguments. */
1802 static enum {
1803 takes_regs_uninit, takes_regs_no, takes_regs_yes
1804 } takes_regs = takes_regs_uninit;
1806 switch (takes_regs)
1808 case takes_regs_uninit:
1810 CUMULATIVE_ARGS args_so_far;
1811 tree fn, arg;
1813 fn = emit_block_move_libcall_fn (false);
1814 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0);
1816 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1817 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1819 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1820 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1821 if (!tmp || !REG_P (tmp))
1822 goto fail_takes_regs;
1823 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1824 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1825 NULL_TREE, 1))
1826 goto fail_takes_regs;
1827 #endif
1828 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1831 takes_regs = takes_regs_yes;
1832 /* FALLTHRU */
1834 case takes_regs_yes:
1835 return true;
1837 fail_takes_regs:
1838 takes_regs = takes_regs_no;
1839 /* FALLTHRU */
1840 case takes_regs_no:
1841 return false;
1843 default:
1844 abort ();
1849 /* A subroutine of emit_block_move. Expand a movstr pattern;
1850 return true if successful. */
1852 static bool
1853 emit_block_move_via_movstr (rtx x, rtx y, rtx size, unsigned int align)
1855 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1856 enum machine_mode mode;
1858 /* Since this is a move insn, we don't care about volatility. */
1859 volatile_ok = 1;
1861 /* Try the most limited insn first, because there's no point
1862 including more than one in the machine description unless
1863 the more limited one has some advantage. */
1865 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1866 mode = GET_MODE_WIDER_MODE (mode))
1868 enum insn_code code = movstr_optab[(int) mode];
1869 insn_operand_predicate_fn pred;
1871 if (code != CODE_FOR_nothing
1872 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1873 here because if SIZE is less than the mode mask, as it is
1874 returned by the macro, it will definitely be less than the
1875 actual mode mask. */
1876 && ((GET_CODE (size) == CONST_INT
1877 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1878 <= (GET_MODE_MASK (mode) >> 1)))
1879 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1880 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1881 || (*pred) (x, BLKmode))
1882 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1883 || (*pred) (y, BLKmode))
1884 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1885 || (*pred) (opalign, VOIDmode)))
1887 rtx op2;
1888 rtx last = get_last_insn ();
1889 rtx pat;
1891 op2 = convert_to_mode (mode, size, 1);
1892 pred = insn_data[(int) code].operand[2].predicate;
1893 if (pred != 0 && ! (*pred) (op2, mode))
1894 op2 = copy_to_mode_reg (mode, op2);
1896 /* ??? When called via emit_block_move_for_call, it'd be
1897 nice if there were some way to inform the backend, so
1898 that it doesn't fail the expansion because it thinks
1899 emitting the libcall would be more efficient. */
1901 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1902 if (pat)
1904 emit_insn (pat);
1905 volatile_ok = 0;
1906 return true;
1908 else
1909 delete_insns_since (last);
1913 volatile_ok = 0;
1914 return false;
1917 /* A subroutine of emit_block_move. Expand a call to memcpy or bcopy.
1918 Return the return value from memcpy, 0 otherwise. */
1920 static rtx
1921 emit_block_move_via_libcall (rtx dst, rtx src, rtx size)
1923 rtx dst_addr, src_addr;
1924 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1925 enum machine_mode size_mode;
1926 rtx retval;
1928 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
1930 It is unsafe to save the value generated by protect_from_queue and reuse
1931 it later. Consider what happens if emit_queue is called before the
1932 return value from protect_from_queue is used.
1934 Expansion of the CALL_EXPR below will call emit_queue before we are
1935 finished emitting RTL for argument setup. So if we are not careful we
1936 could get the wrong value for an argument.
1938 To avoid this problem we go ahead and emit code to copy the addresses of
1939 DST and SRC and SIZE into new pseudos. We can then place those new
1940 pseudos into an RTL_EXPR and use them later, even after a call to
1941 emit_queue.
1943 Note this is not strictly needed for library calls since they do not call
1944 emit_queue before loading their arguments. However, we may need to have
1945 library calls call emit_queue in the future since failing to do so could
1946 cause problems for targets which define SMALL_REGISTER_CLASSES and pass
1947 arguments in registers. */
1949 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1950 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1952 #ifdef POINTERS_EXTEND_UNSIGNED
1953 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1954 src_addr = convert_memory_address (ptr_mode, src_addr);
1955 #endif
1957 dst_tree = make_tree (ptr_type_node, dst_addr);
1958 src_tree = make_tree (ptr_type_node, src_addr);
1960 if (TARGET_MEM_FUNCTIONS)
1961 size_mode = TYPE_MODE (sizetype);
1962 else
1963 size_mode = TYPE_MODE (unsigned_type_node);
1965 size = convert_to_mode (size_mode, size, 1);
1966 size = copy_to_mode_reg (size_mode, size);
1968 /* It is incorrect to use the libcall calling conventions to call
1969 memcpy in this context. This could be a user call to memcpy and
1970 the user may wish to examine the return value from memcpy. For
1971 targets where libcalls and normal calls have different conventions
1972 for returning pointers, we could end up generating incorrect code.
1974 For convenience, we generate the call to bcopy this way as well. */
1976 if (TARGET_MEM_FUNCTIONS)
1977 size_tree = make_tree (sizetype, size);
1978 else
1979 size_tree = make_tree (unsigned_type_node, size);
1981 fn = emit_block_move_libcall_fn (true);
1982 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1983 if (TARGET_MEM_FUNCTIONS)
1985 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1986 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1988 else
1990 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1991 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1994 /* Now we have to build up the CALL_EXPR itself. */
1995 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1996 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1997 call_expr, arg_list, NULL_TREE);
1998 TREE_SIDE_EFFECTS (call_expr) = 1;
2000 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2002 /* If we are initializing a readonly value, show the above call clobbered
2003 it. Otherwise, a load from it may erroneously be hoisted from a loop, or
2004 the delay slot scheduler might overlook conflicts and take nasty
2005 decisions. */
2006 if (RTX_UNCHANGING_P (dst))
2007 add_function_usage_to
2008 (last_call_insn (), gen_rtx_EXPR_LIST (VOIDmode,
2009 gen_rtx_CLOBBER (VOIDmode, dst),
2010 NULL_RTX));
2012 return TARGET_MEM_FUNCTIONS ? retval : NULL_RTX;
2015 /* A subroutine of emit_block_move_via_libcall. Create the tree node
2016 for the function we use for block copies. The first time FOR_CALL
2017 is true, we call assemble_external. */
2019 static GTY(()) tree block_move_fn;
2021 void
2022 init_block_move_fn (const char *asmspec)
2024 if (!block_move_fn)
2026 tree args, fn;
2028 if (TARGET_MEM_FUNCTIONS)
2030 fn = get_identifier ("memcpy");
2031 args = build_function_type_list (ptr_type_node, ptr_type_node,
2032 const_ptr_type_node, sizetype,
2033 NULL_TREE);
2035 else
2037 fn = get_identifier ("bcopy");
2038 args = build_function_type_list (void_type_node, const_ptr_type_node,
2039 ptr_type_node, unsigned_type_node,
2040 NULL_TREE);
2043 fn = build_decl (FUNCTION_DECL, fn, args);
2044 DECL_EXTERNAL (fn) = 1;
2045 TREE_PUBLIC (fn) = 1;
2046 DECL_ARTIFICIAL (fn) = 1;
2047 TREE_NOTHROW (fn) = 1;
2049 block_move_fn = fn;
2052 if (asmspec)
2054 SET_DECL_RTL (block_move_fn, NULL_RTX);
2055 SET_DECL_ASSEMBLER_NAME (block_move_fn, get_identifier (asmspec));
2059 static tree
2060 emit_block_move_libcall_fn (int for_call)
2062 static bool emitted_extern;
2064 if (!block_move_fn)
2065 init_block_move_fn (NULL);
2067 if (for_call && !emitted_extern)
2069 emitted_extern = true;
2070 make_decl_rtl (block_move_fn, NULL);
2071 assemble_external (block_move_fn);
2074 return block_move_fn;
2077 /* A subroutine of emit_block_move. Copy the data via an explicit
2078 loop. This is used only when libcalls are forbidden. */
2079 /* ??? It'd be nice to copy in hunks larger than QImode. */
2081 static void
2082 emit_block_move_via_loop (rtx x, rtx y, rtx size,
2083 unsigned int align ATTRIBUTE_UNUSED)
2085 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
2086 enum machine_mode iter_mode;
2088 iter_mode = GET_MODE (size);
2089 if (iter_mode == VOIDmode)
2090 iter_mode = word_mode;
2092 top_label = gen_label_rtx ();
2093 cmp_label = gen_label_rtx ();
2094 iter = gen_reg_rtx (iter_mode);
2096 emit_move_insn (iter, const0_rtx);
2098 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
2099 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
2100 do_pending_stack_adjust ();
2102 emit_note (NOTE_INSN_LOOP_BEG);
2104 emit_jump (cmp_label);
2105 emit_label (top_label);
2107 tmp = convert_modes (Pmode, iter_mode, iter, true);
2108 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
2109 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
2110 x = change_address (x, QImode, x_addr);
2111 y = change_address (y, QImode, y_addr);
2113 emit_move_insn (x, y);
2115 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
2116 true, OPTAB_LIB_WIDEN);
2117 if (tmp != iter)
2118 emit_move_insn (iter, tmp);
2120 emit_note (NOTE_INSN_LOOP_CONT);
2121 emit_label (cmp_label);
2123 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
2124 true, top_label);
2126 emit_note (NOTE_INSN_LOOP_END);
2129 /* Copy all or part of a value X into registers starting at REGNO.
2130 The number of registers to be filled is NREGS. */
2132 void
2133 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
2135 int i;
2136 #ifdef HAVE_load_multiple
2137 rtx pat;
2138 rtx last;
2139 #endif
2141 if (nregs == 0)
2142 return;
2144 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2145 x = validize_mem (force_const_mem (mode, x));
2147 /* See if the machine can do this with a load multiple insn. */
2148 #ifdef HAVE_load_multiple
2149 if (HAVE_load_multiple)
2151 last = get_last_insn ();
2152 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
2153 GEN_INT (nregs));
2154 if (pat)
2156 emit_insn (pat);
2157 return;
2159 else
2160 delete_insns_since (last);
2162 #endif
2164 for (i = 0; i < nregs; i++)
2165 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
2166 operand_subword_force (x, i, mode));
2169 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
2170 The number of registers to be filled is NREGS. */
2172 void
2173 move_block_from_reg (int regno, rtx x, int nregs)
2175 int i;
2177 if (nregs == 0)
2178 return;
2180 /* See if the machine can do this with a store multiple insn. */
2181 #ifdef HAVE_store_multiple
2182 if (HAVE_store_multiple)
2184 rtx last = get_last_insn ();
2185 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
2186 GEN_INT (nregs));
2187 if (pat)
2189 emit_insn (pat);
2190 return;
2192 else
2193 delete_insns_since (last);
2195 #endif
2197 for (i = 0; i < nregs; i++)
2199 rtx tem = operand_subword (x, i, 1, BLKmode);
2201 if (tem == 0)
2202 abort ();
2204 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
2208 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
2209 ORIG, where ORIG is a non-consecutive group of registers represented by
2210 a PARALLEL. The clone is identical to the original except in that the
2211 original set of registers is replaced by a new set of pseudo registers.
2212 The new set has the same modes as the original set. */
2215 gen_group_rtx (rtx orig)
2217 int i, length;
2218 rtx *tmps;
2220 if (GET_CODE (orig) != PARALLEL)
2221 abort ();
2223 length = XVECLEN (orig, 0);
2224 tmps = (rtx *) alloca (sizeof (rtx) * length);
2226 /* Skip a NULL entry in first slot. */
2227 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
2229 if (i)
2230 tmps[0] = 0;
2232 for (; i < length; i++)
2234 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
2235 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
2237 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
2240 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
2243 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
2244 registers represented by a PARALLEL. SSIZE represents the total size of
2245 block SRC in bytes, or -1 if not known. */
2246 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatant assumption that
2247 the balance will be in what would be the low-order memory addresses, i.e.
2248 left justified for big endian, right justified for little endian. This
2249 happens to be true for the targets currently using this support. If this
2250 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
2251 would be needed. */
2253 void
2254 emit_group_load (rtx dst, rtx orig_src, int ssize)
2256 rtx *tmps, src;
2257 int start, i;
2259 if (GET_CODE (dst) != PARALLEL)
2260 abort ();
2262 /* Check for a NULL entry, used to indicate that the parameter goes
2263 both on the stack and in registers. */
2264 if (XEXP (XVECEXP (dst, 0, 0), 0))
2265 start = 0;
2266 else
2267 start = 1;
2269 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
2271 /* Process the pieces. */
2272 for (i = start; i < XVECLEN (dst, 0); i++)
2274 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
2275 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
2276 unsigned int bytelen = GET_MODE_SIZE (mode);
2277 int shift = 0;
2279 /* Handle trailing fragments that run over the size of the struct. */
2280 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2282 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2283 bytelen = ssize - bytepos;
2284 if (bytelen <= 0)
2285 abort ();
2288 /* If we won't be loading directly from memory, protect the real source
2289 from strange tricks we might play; but make sure that the source can
2290 be loaded directly into the destination. */
2291 src = orig_src;
2292 if (GET_CODE (orig_src) != MEM
2293 && (!CONSTANT_P (orig_src)
2294 || (GET_MODE (orig_src) != mode
2295 && GET_MODE (orig_src) != VOIDmode)))
2297 if (GET_MODE (orig_src) == VOIDmode)
2298 src = gen_reg_rtx (mode);
2299 else
2300 src = gen_reg_rtx (GET_MODE (orig_src));
2302 emit_move_insn (src, orig_src);
2305 /* Optimize the access just a bit. */
2306 if (GET_CODE (src) == MEM
2307 && MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode)
2308 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2309 && bytelen == GET_MODE_SIZE (mode))
2311 tmps[i] = gen_reg_rtx (mode);
2312 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2314 else if (GET_CODE (src) == CONCAT)
2316 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
2317 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
2319 if ((bytepos == 0 && bytelen == slen0)
2320 || (bytepos != 0 && bytepos + bytelen <= slen))
2322 /* The following assumes that the concatenated objects all
2323 have the same size. In this case, a simple calculation
2324 can be used to determine the object and the bit field
2325 to be extracted. */
2326 tmps[i] = XEXP (src, bytepos / slen0);
2327 if (! CONSTANT_P (tmps[i])
2328 && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
2329 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
2330 (bytepos % slen0) * BITS_PER_UNIT,
2331 1, NULL_RTX, mode, mode, ssize);
2333 else if (bytepos == 0)
2335 rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
2336 emit_move_insn (mem, src);
2337 tmps[i] = adjust_address (mem, mode, 0);
2339 else
2340 abort ();
2342 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
2343 SIMD register, which is currently broken. While we get GCC
2344 to emit proper RTL for these cases, let's dump to memory. */
2345 else if (VECTOR_MODE_P (GET_MODE (dst))
2346 && GET_CODE (src) == REG)
2348 int slen = GET_MODE_SIZE (GET_MODE (src));
2349 rtx mem;
2351 mem = assign_stack_temp (GET_MODE (src), slen, 0);
2352 emit_move_insn (mem, src);
2353 tmps[i] = adjust_address (mem, mode, (int) bytepos);
2355 else if (CONSTANT_P (src)
2356 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2357 tmps[i] = src;
2358 else
2359 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2360 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2361 mode, mode, ssize);
2363 if (BYTES_BIG_ENDIAN && shift)
2364 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2365 tmps[i], 0, OPTAB_WIDEN);
2368 emit_queue ();
2370 /* Copy the extracted pieces into the proper (probable) hard regs. */
2371 for (i = start; i < XVECLEN (dst, 0); i++)
2372 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2375 /* Emit code to move a block SRC to block DST, where SRC and DST are
2376 non-consecutive groups of registers, each represented by a PARALLEL. */
2378 void
2379 emit_group_move (rtx dst, rtx src)
2381 int i;
2383 if (GET_CODE (src) != PARALLEL
2384 || GET_CODE (dst) != PARALLEL
2385 || XVECLEN (src, 0) != XVECLEN (dst, 0))
2386 abort ();
2388 /* Skip first entry if NULL. */
2389 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
2390 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
2391 XEXP (XVECEXP (src, 0, i), 0));
2394 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2395 registers represented by a PARALLEL. SSIZE represents the total size of
2396 block DST, or -1 if not known. */
2398 void
2399 emit_group_store (rtx orig_dst, rtx src, int ssize)
2401 rtx *tmps, dst;
2402 int start, i;
2404 if (GET_CODE (src) != PARALLEL)
2405 abort ();
2407 /* Check for a NULL entry, used to indicate that the parameter goes
2408 both on the stack and in registers. */
2409 if (XEXP (XVECEXP (src, 0, 0), 0))
2410 start = 0;
2411 else
2412 start = 1;
2414 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2416 /* Copy the (probable) hard regs into pseudos. */
2417 for (i = start; i < XVECLEN (src, 0); i++)
2419 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2420 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2421 emit_move_insn (tmps[i], reg);
2423 emit_queue ();
2425 /* If we won't be storing directly into memory, protect the real destination
2426 from strange tricks we might play. */
2427 dst = orig_dst;
2428 if (GET_CODE (dst) == PARALLEL)
2430 rtx temp;
2432 /* We can get a PARALLEL dst if there is a conditional expression in
2433 a return statement. In that case, the dst and src are the same,
2434 so no action is necessary. */
2435 if (rtx_equal_p (dst, src))
2436 return;
2438 /* It is unclear if we can ever reach here, but we may as well handle
2439 it. Allocate a temporary, and split this into a store/load to/from
2440 the temporary. */
2442 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2443 emit_group_store (temp, src, ssize);
2444 emit_group_load (dst, temp, ssize);
2445 return;
2447 else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
2449 dst = gen_reg_rtx (GET_MODE (orig_dst));
2450 /* Make life a bit easier for combine. */
2451 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
2454 /* Process the pieces. */
2455 for (i = start; i < XVECLEN (src, 0); i++)
2457 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2458 enum machine_mode mode = GET_MODE (tmps[i]);
2459 unsigned int bytelen = GET_MODE_SIZE (mode);
2460 rtx dest = dst;
2462 /* Handle trailing fragments that run over the size of the struct. */
2463 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2465 if (BYTES_BIG_ENDIAN)
2467 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2468 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2469 tmps[i], 0, OPTAB_WIDEN);
2471 bytelen = ssize - bytepos;
2474 if (GET_CODE (dst) == CONCAT)
2476 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2477 dest = XEXP (dst, 0);
2478 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2480 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2481 dest = XEXP (dst, 1);
2483 else if (bytepos == 0 && XVECLEN (src, 0))
2485 dest = assign_stack_temp (GET_MODE (dest),
2486 GET_MODE_SIZE (GET_MODE (dest)), 0);
2487 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2488 tmps[i]);
2489 dst = dest;
2490 break;
2492 else
2493 abort ();
2496 /* Optimize the access just a bit. */
2497 if (GET_CODE (dest) == MEM
2498 && MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode)
2499 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2500 && bytelen == GET_MODE_SIZE (mode))
2501 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2502 else
2503 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2504 mode, tmps[i], ssize);
2507 emit_queue ();
2509 /* Copy from the pseudo into the (probable) hard reg. */
2510 if (orig_dst != dst)
2511 emit_move_insn (orig_dst, dst);
2514 /* Generate code to copy a BLKmode object of TYPE out of a
2515 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2516 is null, a stack temporary is created. TGTBLK is returned.
2518 The primary purpose of this routine is to handle functions
2519 that return BLKmode structures in registers. Some machines
2520 (the PA for example) want to return all small structures
2521 in registers regardless of the structure's alignment. */
2524 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2526 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2527 rtx src = NULL, dst = NULL;
2528 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2529 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2531 if (tgtblk == 0)
2533 tgtblk = assign_temp (build_qualified_type (type,
2534 (TYPE_QUALS (type)
2535 | TYPE_QUAL_CONST)),
2536 0, 1, 1);
2537 preserve_temp_slots (tgtblk);
2540 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2541 into a new pseudo which is a full word. */
2543 if (GET_MODE (srcreg) != BLKmode
2544 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2545 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2547 /* Structures whose size is not a multiple of a word are aligned
2548 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2549 machine, this means we must skip the empty high order bytes when
2550 calculating the bit offset. */
2551 if (BYTES_BIG_ENDIAN
2552 && bytes % UNITS_PER_WORD)
2553 big_endian_correction
2554 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2556 /* Copy the structure BITSIZE bites at a time.
2558 We could probably emit more efficient code for machines which do not use
2559 strict alignment, but it doesn't seem worth the effort at the current
2560 time. */
2561 for (bitpos = 0, xbitpos = big_endian_correction;
2562 bitpos < bytes * BITS_PER_UNIT;
2563 bitpos += bitsize, xbitpos += bitsize)
2565 /* We need a new source operand each time xbitpos is on a
2566 word boundary and when xbitpos == big_endian_correction
2567 (the first time through). */
2568 if (xbitpos % BITS_PER_WORD == 0
2569 || xbitpos == big_endian_correction)
2570 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2571 GET_MODE (srcreg));
2573 /* We need a new destination operand each time bitpos is on
2574 a word boundary. */
2575 if (bitpos % BITS_PER_WORD == 0)
2576 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2578 /* Use xbitpos for the source extraction (right justified) and
2579 xbitpos for the destination store (left justified). */
2580 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2581 extract_bit_field (src, bitsize,
2582 xbitpos % BITS_PER_WORD, 1,
2583 NULL_RTX, word_mode, word_mode,
2584 BITS_PER_WORD),
2585 BITS_PER_WORD);
2588 return tgtblk;
2591 /* Add a USE expression for REG to the (possibly empty) list pointed
2592 to by CALL_FUSAGE. REG must denote a hard register. */
2594 void
2595 use_reg (rtx *call_fusage, rtx reg)
2597 if (GET_CODE (reg) != REG
2598 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2599 abort ();
2601 *call_fusage
2602 = gen_rtx_EXPR_LIST (VOIDmode,
2603 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2606 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2607 starting at REGNO. All of these registers must be hard registers. */
2609 void
2610 use_regs (rtx *call_fusage, int regno, int nregs)
2612 int i;
2614 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2615 abort ();
2617 for (i = 0; i < nregs; i++)
2618 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2621 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2622 PARALLEL REGS. This is for calls that pass values in multiple
2623 non-contiguous locations. The Irix 6 ABI has examples of this. */
2625 void
2626 use_group_regs (rtx *call_fusage, rtx regs)
2628 int i;
2630 for (i = 0; i < XVECLEN (regs, 0); i++)
2632 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2634 /* A NULL entry means the parameter goes both on the stack and in
2635 registers. This can also be a MEM for targets that pass values
2636 partially on the stack and partially in registers. */
2637 if (reg != 0 && GET_CODE (reg) == REG)
2638 use_reg (call_fusage, reg);
2643 /* Determine whether the LEN bytes generated by CONSTFUN can be
2644 stored to memory using several move instructions. CONSTFUNDATA is
2645 a pointer which will be passed as argument in every CONSTFUN call.
2646 ALIGN is maximum alignment we can assume. Return nonzero if a
2647 call to store_by_pieces should succeed. */
2650 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2651 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2652 void *constfundata, unsigned int align)
2654 unsigned HOST_WIDE_INT max_size, l;
2655 HOST_WIDE_INT offset = 0;
2656 enum machine_mode mode, tmode;
2657 enum insn_code icode;
2658 int reverse;
2659 rtx cst;
2661 if (len == 0)
2662 return 1;
2664 if (! STORE_BY_PIECES_P (len, align))
2665 return 0;
2667 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2668 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2669 align = MOVE_MAX * BITS_PER_UNIT;
2671 /* We would first store what we can in the largest integer mode, then go to
2672 successively smaller modes. */
2674 for (reverse = 0;
2675 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2676 reverse++)
2678 l = len;
2679 mode = VOIDmode;
2680 max_size = STORE_MAX_PIECES + 1;
2681 while (max_size > 1)
2683 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2684 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2685 if (GET_MODE_SIZE (tmode) < max_size)
2686 mode = tmode;
2688 if (mode == VOIDmode)
2689 break;
2691 icode = mov_optab->handlers[(int) mode].insn_code;
2692 if (icode != CODE_FOR_nothing
2693 && align >= GET_MODE_ALIGNMENT (mode))
2695 unsigned int size = GET_MODE_SIZE (mode);
2697 while (l >= size)
2699 if (reverse)
2700 offset -= size;
2702 cst = (*constfun) (constfundata, offset, mode);
2703 if (!LEGITIMATE_CONSTANT_P (cst))
2704 return 0;
2706 if (!reverse)
2707 offset += size;
2709 l -= size;
2713 max_size = GET_MODE_SIZE (mode);
2716 /* The code above should have handled everything. */
2717 if (l != 0)
2718 abort ();
2721 return 1;
2724 /* Generate several move instructions to store LEN bytes generated by
2725 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2726 pointer which will be passed as argument in every CONSTFUN call.
2727 ALIGN is maximum alignment we can assume.
2728 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2729 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2730 stpcpy. */
2733 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2734 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2735 void *constfundata, unsigned int align, int endp)
2737 struct store_by_pieces data;
2739 if (len == 0)
2741 if (endp == 2)
2742 abort ();
2743 return to;
2746 if (! STORE_BY_PIECES_P (len, align))
2747 abort ();
2748 to = protect_from_queue (to, 1);
2749 data.constfun = constfun;
2750 data.constfundata = constfundata;
2751 data.len = len;
2752 data.to = to;
2753 store_by_pieces_1 (&data, align);
2754 if (endp)
2756 rtx to1;
2758 if (data.reverse)
2759 abort ();
2760 if (data.autinc_to)
2762 if (endp == 2)
2764 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2765 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2766 else
2767 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2768 -1));
2770 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2771 data.offset);
2773 else
2775 if (endp == 2)
2776 --data.offset;
2777 to1 = adjust_address (data.to, QImode, data.offset);
2779 return to1;
2781 else
2782 return data.to;
2785 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2786 rtx with BLKmode). The caller must pass TO through protect_from_queue
2787 before calling. ALIGN is maximum alignment we can assume. */
2789 static void
2790 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2792 struct store_by_pieces data;
2794 if (len == 0)
2795 return;
2797 data.constfun = clear_by_pieces_1;
2798 data.constfundata = NULL;
2799 data.len = len;
2800 data.to = to;
2801 store_by_pieces_1 (&data, align);
2804 /* Callback routine for clear_by_pieces.
2805 Return const0_rtx unconditionally. */
2807 static rtx
2808 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2809 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2810 enum machine_mode mode ATTRIBUTE_UNUSED)
2812 return const0_rtx;
2815 /* Subroutine of clear_by_pieces and store_by_pieces.
2816 Generate several move instructions to store LEN bytes of block TO. (A MEM
2817 rtx with BLKmode). The caller must pass TO through protect_from_queue
2818 before calling. ALIGN is maximum alignment we can assume. */
2820 static void
2821 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2822 unsigned int align ATTRIBUTE_UNUSED)
2824 rtx to_addr = XEXP (data->to, 0);
2825 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
2826 enum machine_mode mode = VOIDmode, tmode;
2827 enum insn_code icode;
2829 data->offset = 0;
2830 data->to_addr = to_addr;
2831 data->autinc_to
2832 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2833 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2835 data->explicit_inc_to = 0;
2836 data->reverse
2837 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2838 if (data->reverse)
2839 data->offset = data->len;
2841 /* If storing requires more than two move insns,
2842 copy addresses to registers (to make displacements shorter)
2843 and use post-increment if available. */
2844 if (!data->autinc_to
2845 && move_by_pieces_ninsns (data->len, align) > 2)
2847 /* Determine the main mode we'll be using. */
2848 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2849 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2850 if (GET_MODE_SIZE (tmode) < max_size)
2851 mode = tmode;
2853 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2855 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2856 data->autinc_to = 1;
2857 data->explicit_inc_to = -1;
2860 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2861 && ! data->autinc_to)
2863 data->to_addr = copy_addr_to_reg (to_addr);
2864 data->autinc_to = 1;
2865 data->explicit_inc_to = 1;
2868 if ( !data->autinc_to && CONSTANT_P (to_addr))
2869 data->to_addr = copy_addr_to_reg (to_addr);
2872 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2873 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2874 align = MOVE_MAX * BITS_PER_UNIT;
2876 /* First store what we can in the largest integer mode, then go to
2877 successively smaller modes. */
2879 while (max_size > 1)
2881 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2882 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2883 if (GET_MODE_SIZE (tmode) < max_size)
2884 mode = tmode;
2886 if (mode == VOIDmode)
2887 break;
2889 icode = mov_optab->handlers[(int) mode].insn_code;
2890 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2891 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2893 max_size = GET_MODE_SIZE (mode);
2896 /* The code above should have handled everything. */
2897 if (data->len != 0)
2898 abort ();
2901 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2902 with move instructions for mode MODE. GENFUN is the gen_... function
2903 to make a move insn for that mode. DATA has all the other info. */
2905 static void
2906 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2907 struct store_by_pieces *data)
2909 unsigned int size = GET_MODE_SIZE (mode);
2910 rtx to1, cst;
2912 while (data->len >= size)
2914 if (data->reverse)
2915 data->offset -= size;
2917 if (data->autinc_to)
2918 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2919 data->offset);
2920 else
2921 to1 = adjust_address (data->to, mode, data->offset);
2923 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2924 emit_insn (gen_add2_insn (data->to_addr,
2925 GEN_INT (-(HOST_WIDE_INT) size)));
2927 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2928 emit_insn ((*genfun) (to1, cst));
2930 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2931 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2933 if (! data->reverse)
2934 data->offset += size;
2936 data->len -= size;
2940 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2941 its length in bytes. */
2944 clear_storage (rtx object, rtx size)
2946 rtx retval = 0;
2947 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2948 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2950 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2951 just move a zero. Otherwise, do this a piece at a time. */
2952 if (GET_MODE (object) != BLKmode
2953 && GET_CODE (size) == CONST_INT
2954 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
2955 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2956 else
2958 object = protect_from_queue (object, 1);
2959 size = protect_from_queue (size, 0);
2961 if (GET_CODE (size) == CONST_INT && INTVAL (size) == 0)
2963 else if (GET_CODE (size) == CONST_INT
2964 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2965 clear_by_pieces (object, INTVAL (size), align);
2966 else if (clear_storage_via_clrstr (object, size, align))
2968 else
2969 retval = clear_storage_via_libcall (object, size);
2972 return retval;
2975 /* A subroutine of clear_storage. Expand a clrstr pattern;
2976 return true if successful. */
2978 static bool
2979 clear_storage_via_clrstr (rtx object, rtx size, unsigned int align)
2981 /* Try the most limited insn first, because there's no point
2982 including more than one in the machine description unless
2983 the more limited one has some advantage. */
2985 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2986 enum machine_mode mode;
2988 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2989 mode = GET_MODE_WIDER_MODE (mode))
2991 enum insn_code code = clrstr_optab[(int) mode];
2992 insn_operand_predicate_fn pred;
2994 if (code != CODE_FOR_nothing
2995 /* We don't need MODE to be narrower than
2996 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2997 the mode mask, as it is returned by the macro, it will
2998 definitely be less than the actual mode mask. */
2999 && ((GET_CODE (size) == CONST_INT
3000 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3001 <= (GET_MODE_MASK (mode) >> 1)))
3002 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3003 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
3004 || (*pred) (object, BLKmode))
3005 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
3006 || (*pred) (opalign, VOIDmode)))
3008 rtx op1;
3009 rtx last = get_last_insn ();
3010 rtx pat;
3012 op1 = convert_to_mode (mode, size, 1);
3013 pred = insn_data[(int) code].operand[1].predicate;
3014 if (pred != 0 && ! (*pred) (op1, mode))
3015 op1 = copy_to_mode_reg (mode, op1);
3017 pat = GEN_FCN ((int) code) (object, op1, opalign);
3018 if (pat)
3020 emit_insn (pat);
3021 return true;
3023 else
3024 delete_insns_since (last);
3028 return false;
3031 /* A subroutine of clear_storage. Expand a call to memset or bzero.
3032 Return the return value of memset, 0 otherwise. */
3034 static rtx
3035 clear_storage_via_libcall (rtx object, rtx size)
3037 tree call_expr, arg_list, fn, object_tree, size_tree;
3038 enum machine_mode size_mode;
3039 rtx retval;
3041 /* OBJECT or SIZE may have been passed through protect_from_queue.
3043 It is unsafe to save the value generated by protect_from_queue
3044 and reuse it later. Consider what happens if emit_queue is
3045 called before the return value from protect_from_queue is used.
3047 Expansion of the CALL_EXPR below will call emit_queue before
3048 we are finished emitting RTL for argument setup. So if we are
3049 not careful we could get the wrong value for an argument.
3051 To avoid this problem we go ahead and emit code to copy OBJECT
3052 and SIZE into new pseudos. We can then place those new pseudos
3053 into an RTL_EXPR and use them later, even after a call to
3054 emit_queue.
3056 Note this is not strictly needed for library calls since they
3057 do not call emit_queue before loading their arguments. However,
3058 we may need to have library calls call emit_queue in the future
3059 since failing to do so could cause problems for targets which
3060 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
3062 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
3064 if (TARGET_MEM_FUNCTIONS)
3065 size_mode = TYPE_MODE (sizetype);
3066 else
3067 size_mode = TYPE_MODE (unsigned_type_node);
3068 size = convert_to_mode (size_mode, size, 1);
3069 size = copy_to_mode_reg (size_mode, size);
3071 /* It is incorrect to use the libcall calling conventions to call
3072 memset in this context. This could be a user call to memset and
3073 the user may wish to examine the return value from memset. For
3074 targets where libcalls and normal calls have different conventions
3075 for returning pointers, we could end up generating incorrect code.
3077 For convenience, we generate the call to bzero this way as well. */
3079 object_tree = make_tree (ptr_type_node, object);
3080 if (TARGET_MEM_FUNCTIONS)
3081 size_tree = make_tree (sizetype, size);
3082 else
3083 size_tree = make_tree (unsigned_type_node, size);
3085 fn = clear_storage_libcall_fn (true);
3086 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
3087 if (TARGET_MEM_FUNCTIONS)
3088 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
3089 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
3091 /* Now we have to build up the CALL_EXPR itself. */
3092 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
3093 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
3094 call_expr, arg_list, NULL_TREE);
3095 TREE_SIDE_EFFECTS (call_expr) = 1;
3097 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
3099 /* If we are initializing a readonly value, show the above call
3100 clobbered it. Otherwise, a load from it may erroneously be
3101 hoisted from a loop. */
3102 if (RTX_UNCHANGING_P (object))
3103 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
3105 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
3108 /* A subroutine of clear_storage_via_libcall. Create the tree node
3109 for the function we use for block clears. The first time FOR_CALL
3110 is true, we call assemble_external. */
3112 static GTY(()) tree block_clear_fn;
3114 void
3115 init_block_clear_fn (const char *asmspec)
3117 if (!block_clear_fn)
3119 tree fn, args;
3121 if (TARGET_MEM_FUNCTIONS)
3123 fn = get_identifier ("memset");
3124 args = build_function_type_list (ptr_type_node, ptr_type_node,
3125 integer_type_node, sizetype,
3126 NULL_TREE);
3128 else
3130 fn = get_identifier ("bzero");
3131 args = build_function_type_list (void_type_node, ptr_type_node,
3132 unsigned_type_node, NULL_TREE);
3135 fn = build_decl (FUNCTION_DECL, fn, args);
3136 DECL_EXTERNAL (fn) = 1;
3137 TREE_PUBLIC (fn) = 1;
3138 DECL_ARTIFICIAL (fn) = 1;
3139 TREE_NOTHROW (fn) = 1;
3141 block_clear_fn = fn;
3144 if (asmspec)
3146 SET_DECL_RTL (block_clear_fn, NULL_RTX);
3147 SET_DECL_ASSEMBLER_NAME (block_clear_fn, get_identifier (asmspec));
3151 static tree
3152 clear_storage_libcall_fn (int for_call)
3154 static bool emitted_extern;
3156 if (!block_clear_fn)
3157 init_block_clear_fn (NULL);
3159 if (for_call && !emitted_extern)
3161 emitted_extern = true;
3162 make_decl_rtl (block_clear_fn, NULL);
3163 assemble_external (block_clear_fn);
3166 return block_clear_fn;
3169 /* Generate code to copy Y into X.
3170 Both Y and X must have the same mode, except that
3171 Y can be a constant with VOIDmode.
3172 This mode cannot be BLKmode; use emit_block_move for that.
3174 Return the last instruction emitted. */
3177 emit_move_insn (rtx x, rtx y)
3179 enum machine_mode mode = GET_MODE (x);
3180 rtx y_cst = NULL_RTX;
3181 rtx last_insn, set;
3183 x = protect_from_queue (x, 1);
3184 y = protect_from_queue (y, 0);
3186 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
3187 abort ();
3189 /* Never force constant_p_rtx to memory. */
3190 if (GET_CODE (y) == CONSTANT_P_RTX)
3192 else if (CONSTANT_P (y))
3194 if (optimize
3195 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3196 && (last_insn = compress_float_constant (x, y)))
3197 return last_insn;
3199 y_cst = y;
3201 if (!LEGITIMATE_CONSTANT_P (y))
3203 y = force_const_mem (mode, y);
3205 /* If the target's cannot_force_const_mem prevented the spill,
3206 assume that the target's move expanders will also take care
3207 of the non-legitimate constant. */
3208 if (!y)
3209 y = y_cst;
3213 /* If X or Y are memory references, verify that their addresses are valid
3214 for the machine. */
3215 if (GET_CODE (x) == MEM
3216 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3217 && ! push_operand (x, GET_MODE (x)))
3218 || (flag_force_addr
3219 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
3220 x = validize_mem (x);
3222 if (GET_CODE (y) == MEM
3223 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3224 || (flag_force_addr
3225 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
3226 y = validize_mem (y);
3228 if (mode == BLKmode)
3229 abort ();
3231 last_insn = emit_move_insn_1 (x, y);
3233 if (y_cst && GET_CODE (x) == REG
3234 && (set = single_set (last_insn)) != NULL_RTX
3235 && SET_DEST (set) == x
3236 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3237 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3239 return last_insn;
3242 /* Low level part of emit_move_insn.
3243 Called just like emit_move_insn, but assumes X and Y
3244 are basically valid. */
3247 emit_move_insn_1 (rtx x, rtx y)
3249 enum machine_mode mode = GET_MODE (x);
3250 enum machine_mode submode;
3251 enum mode_class class = GET_MODE_CLASS (mode);
3253 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
3254 abort ();
3256 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
3257 return
3258 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
3260 /* Expand complex moves by moving real part and imag part, if possible. */
3261 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
3262 && BLKmode != (submode = GET_MODE_INNER (mode))
3263 && (mov_optab->handlers[(int) submode].insn_code
3264 != CODE_FOR_nothing))
3266 /* Don't split destination if it is a stack push. */
3267 int stack = push_operand (x, GET_MODE (x));
3269 #ifdef PUSH_ROUNDING
3270 /* In case we output to the stack, but the size is smaller than the
3271 machine can push exactly, we need to use move instructions. */
3272 if (stack
3273 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
3274 != GET_MODE_SIZE (submode)))
3276 rtx temp;
3277 HOST_WIDE_INT offset1, offset2;
3279 /* Do not use anti_adjust_stack, since we don't want to update
3280 stack_pointer_delta. */
3281 temp = expand_binop (Pmode,
3282 #ifdef STACK_GROWS_DOWNWARD
3283 sub_optab,
3284 #else
3285 add_optab,
3286 #endif
3287 stack_pointer_rtx,
3288 GEN_INT
3289 (PUSH_ROUNDING
3290 (GET_MODE_SIZE (GET_MODE (x)))),
3291 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3293 if (temp != stack_pointer_rtx)
3294 emit_move_insn (stack_pointer_rtx, temp);
3296 #ifdef STACK_GROWS_DOWNWARD
3297 offset1 = 0;
3298 offset2 = GET_MODE_SIZE (submode);
3299 #else
3300 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
3301 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
3302 + GET_MODE_SIZE (submode));
3303 #endif
3305 emit_move_insn (change_address (x, submode,
3306 gen_rtx_PLUS (Pmode,
3307 stack_pointer_rtx,
3308 GEN_INT (offset1))),
3309 gen_realpart (submode, y));
3310 emit_move_insn (change_address (x, submode,
3311 gen_rtx_PLUS (Pmode,
3312 stack_pointer_rtx,
3313 GEN_INT (offset2))),
3314 gen_imagpart (submode, y));
3316 else
3317 #endif
3318 /* If this is a stack, push the highpart first, so it
3319 will be in the argument order.
3321 In that case, change_address is used only to convert
3322 the mode, not to change the address. */
3323 if (stack)
3325 /* Note that the real part always precedes the imag part in memory
3326 regardless of machine's endianness. */
3327 #ifdef STACK_GROWS_DOWNWARD
3328 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3329 gen_imagpart (submode, y));
3330 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3331 gen_realpart (submode, y));
3332 #else
3333 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3334 gen_realpart (submode, y));
3335 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3336 gen_imagpart (submode, y));
3337 #endif
3339 else
3341 rtx realpart_x, realpart_y;
3342 rtx imagpart_x, imagpart_y;
3344 /* If this is a complex value with each part being smaller than a
3345 word, the usual calling sequence will likely pack the pieces into
3346 a single register. Unfortunately, SUBREG of hard registers only
3347 deals in terms of words, so we have a problem converting input
3348 arguments to the CONCAT of two registers that is used elsewhere
3349 for complex values. If this is before reload, we can copy it into
3350 memory and reload. FIXME, we should see about using extract and
3351 insert on integer registers, but complex short and complex char
3352 variables should be rarely used. */
3353 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
3354 && (reload_in_progress | reload_completed) == 0)
3356 int packed_dest_p
3357 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
3358 int packed_src_p
3359 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
3361 if (packed_dest_p || packed_src_p)
3363 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
3364 ? MODE_FLOAT : MODE_INT);
3366 enum machine_mode reg_mode
3367 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
3369 if (reg_mode != BLKmode)
3371 rtx mem = assign_stack_temp (reg_mode,
3372 GET_MODE_SIZE (mode), 0);
3373 rtx cmem = adjust_address (mem, mode, 0);
3375 cfun->cannot_inline
3376 = N_("function using short complex types cannot be inline");
3378 if (packed_dest_p)
3380 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
3382 emit_move_insn_1 (cmem, y);
3383 return emit_move_insn_1 (sreg, mem);
3385 else
3387 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
3389 emit_move_insn_1 (mem, sreg);
3390 return emit_move_insn_1 (x, cmem);
3396 realpart_x = gen_realpart (submode, x);
3397 realpart_y = gen_realpart (submode, y);
3398 imagpart_x = gen_imagpart (submode, x);
3399 imagpart_y = gen_imagpart (submode, y);
3401 /* Show the output dies here. This is necessary for SUBREGs
3402 of pseudos since we cannot track their lifetimes correctly;
3403 hard regs shouldn't appear here except as return values.
3404 We never want to emit such a clobber after reload. */
3405 if (x != y
3406 && ! (reload_in_progress || reload_completed)
3407 && (GET_CODE (realpart_x) == SUBREG
3408 || GET_CODE (imagpart_x) == SUBREG))
3409 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3411 emit_move_insn (realpart_x, realpart_y);
3412 emit_move_insn (imagpart_x, imagpart_y);
3415 return get_last_insn ();
3418 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
3419 find a mode to do it in. If we have a movcc, use it. Otherwise,
3420 find the MODE_INT mode of the same width. */
3421 else if (GET_MODE_CLASS (mode) == MODE_CC
3422 && mov_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
3424 enum insn_code insn_code;
3425 enum machine_mode tmode = VOIDmode;
3426 rtx x1 = x, y1 = y;
3428 if (mode != CCmode
3429 && mov_optab->handlers[(int) CCmode].insn_code != CODE_FOR_nothing)
3430 tmode = CCmode;
3431 else
3432 for (tmode = QImode; tmode != VOIDmode;
3433 tmode = GET_MODE_WIDER_MODE (tmode))
3434 if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode))
3435 break;
3437 if (tmode == VOIDmode)
3438 abort ();
3440 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
3441 may call change_address which is not appropriate if we were
3442 called when a reload was in progress. We don't have to worry
3443 about changing the address since the size in bytes is supposed to
3444 be the same. Copy the MEM to change the mode and move any
3445 substitutions from the old MEM to the new one. */
3447 if (reload_in_progress)
3449 x = gen_lowpart_common (tmode, x1);
3450 if (x == 0 && GET_CODE (x1) == MEM)
3452 x = adjust_address_nv (x1, tmode, 0);
3453 copy_replacements (x1, x);
3456 y = gen_lowpart_common (tmode, y1);
3457 if (y == 0 && GET_CODE (y1) == MEM)
3459 y = adjust_address_nv (y1, tmode, 0);
3460 copy_replacements (y1, y);
3463 else
3465 x = gen_lowpart (tmode, x);
3466 y = gen_lowpart (tmode, y);
3469 insn_code = mov_optab->handlers[(int) tmode].insn_code;
3470 return emit_insn (GEN_FCN (insn_code) (x, y));
3473 /* This will handle any multi-word or full-word mode that lacks a move_insn
3474 pattern. However, you will get better code if you define such patterns,
3475 even if they must turn into multiple assembler instructions. */
3476 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
3478 rtx last_insn = 0;
3479 rtx seq, inner;
3480 int need_clobber;
3481 int i;
3483 #ifdef PUSH_ROUNDING
3485 /* If X is a push on the stack, do the push now and replace
3486 X with a reference to the stack pointer. */
3487 if (push_operand (x, GET_MODE (x)))
3489 rtx temp;
3490 enum rtx_code code;
3492 /* Do not use anti_adjust_stack, since we don't want to update
3493 stack_pointer_delta. */
3494 temp = expand_binop (Pmode,
3495 #ifdef STACK_GROWS_DOWNWARD
3496 sub_optab,
3497 #else
3498 add_optab,
3499 #endif
3500 stack_pointer_rtx,
3501 GEN_INT
3502 (PUSH_ROUNDING
3503 (GET_MODE_SIZE (GET_MODE (x)))),
3504 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3506 if (temp != stack_pointer_rtx)
3507 emit_move_insn (stack_pointer_rtx, temp);
3509 code = GET_CODE (XEXP (x, 0));
3511 /* Just hope that small offsets off SP are OK. */
3512 if (code == POST_INC)
3513 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3514 GEN_INT (-((HOST_WIDE_INT)
3515 GET_MODE_SIZE (GET_MODE (x)))));
3516 else if (code == POST_DEC)
3517 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3518 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3519 else
3520 temp = stack_pointer_rtx;
3522 x = change_address (x, VOIDmode, temp);
3524 #endif
3526 /* If we are in reload, see if either operand is a MEM whose address
3527 is scheduled for replacement. */
3528 if (reload_in_progress && GET_CODE (x) == MEM
3529 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3530 x = replace_equiv_address_nv (x, inner);
3531 if (reload_in_progress && GET_CODE (y) == MEM
3532 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3533 y = replace_equiv_address_nv (y, inner);
3535 start_sequence ();
3537 need_clobber = 0;
3538 for (i = 0;
3539 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3540 i++)
3542 rtx xpart = operand_subword (x, i, 1, mode);
3543 rtx ypart = operand_subword (y, i, 1, mode);
3545 /* If we can't get a part of Y, put Y into memory if it is a
3546 constant. Otherwise, force it into a register. If we still
3547 can't get a part of Y, abort. */
3548 if (ypart == 0 && CONSTANT_P (y))
3550 y = force_const_mem (mode, y);
3551 ypart = operand_subword (y, i, 1, mode);
3553 else if (ypart == 0)
3554 ypart = operand_subword_force (y, i, mode);
3556 if (xpart == 0 || ypart == 0)
3557 abort ();
3559 need_clobber |= (GET_CODE (xpart) == SUBREG);
3561 last_insn = emit_move_insn (xpart, ypart);
3564 seq = get_insns ();
3565 end_sequence ();
3567 /* Show the output dies here. This is necessary for SUBREGs
3568 of pseudos since we cannot track their lifetimes correctly;
3569 hard regs shouldn't appear here except as return values.
3570 We never want to emit such a clobber after reload. */
3571 if (x != y
3572 && ! (reload_in_progress || reload_completed)
3573 && need_clobber != 0)
3574 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3576 emit_insn (seq);
3578 return last_insn;
3580 else
3581 abort ();
3584 /* If Y is representable exactly in a narrower mode, and the target can
3585 perform the extension directly from constant or memory, then emit the
3586 move as an extension. */
3588 static rtx
3589 compress_float_constant (rtx x, rtx y)
3591 enum machine_mode dstmode = GET_MODE (x);
3592 enum machine_mode orig_srcmode = GET_MODE (y);
3593 enum machine_mode srcmode;
3594 REAL_VALUE_TYPE r;
3596 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3598 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3599 srcmode != orig_srcmode;
3600 srcmode = GET_MODE_WIDER_MODE (srcmode))
3602 enum insn_code ic;
3603 rtx trunc_y, last_insn;
3605 /* Skip if the target can't extend this way. */
3606 ic = can_extend_p (dstmode, srcmode, 0);
3607 if (ic == CODE_FOR_nothing)
3608 continue;
3610 /* Skip if the narrowed value isn't exact. */
3611 if (! exact_real_truncate (srcmode, &r))
3612 continue;
3614 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3616 if (LEGITIMATE_CONSTANT_P (trunc_y))
3618 /* Skip if the target needs extra instructions to perform
3619 the extension. */
3620 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3621 continue;
3623 else if (float_extend_from_mem[dstmode][srcmode])
3624 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3625 else
3626 continue;
3628 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3629 last_insn = get_last_insn ();
3631 if (GET_CODE (x) == REG)
3632 set_unique_reg_note (last_insn, REG_EQUAL, y);
3634 return last_insn;
3637 return NULL_RTX;
3640 /* Pushing data onto the stack. */
3642 /* Push a block of length SIZE (perhaps variable)
3643 and return an rtx to address the beginning of the block.
3644 Note that it is not possible for the value returned to be a QUEUED.
3645 The value may be virtual_outgoing_args_rtx.
3647 EXTRA is the number of bytes of padding to push in addition to SIZE.
3648 BELOW nonzero means this padding comes at low addresses;
3649 otherwise, the padding comes at high addresses. */
3652 push_block (rtx size, int extra, int below)
3654 rtx temp;
3656 size = convert_modes (Pmode, ptr_mode, size, 1);
3657 if (CONSTANT_P (size))
3658 anti_adjust_stack (plus_constant (size, extra));
3659 else if (GET_CODE (size) == REG && extra == 0)
3660 anti_adjust_stack (size);
3661 else
3663 temp = copy_to_mode_reg (Pmode, size);
3664 if (extra != 0)
3665 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3666 temp, 0, OPTAB_LIB_WIDEN);
3667 anti_adjust_stack (temp);
3670 #ifndef STACK_GROWS_DOWNWARD
3671 if (0)
3672 #else
3673 if (1)
3674 #endif
3676 temp = virtual_outgoing_args_rtx;
3677 if (extra != 0 && below)
3678 temp = plus_constant (temp, extra);
3680 else
3682 if (GET_CODE (size) == CONST_INT)
3683 temp = plus_constant (virtual_outgoing_args_rtx,
3684 -INTVAL (size) - (below ? 0 : extra));
3685 else if (extra != 0 && !below)
3686 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3687 negate_rtx (Pmode, plus_constant (size, extra)));
3688 else
3689 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3690 negate_rtx (Pmode, size));
3693 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3696 #ifdef PUSH_ROUNDING
3698 /* Emit single push insn. */
3700 static void
3701 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3703 rtx dest_addr;
3704 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3705 rtx dest;
3706 enum insn_code icode;
3707 insn_operand_predicate_fn pred;
3709 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3710 /* If there is push pattern, use it. Otherwise try old way of throwing
3711 MEM representing push operation to move expander. */
3712 icode = push_optab->handlers[(int) mode].insn_code;
3713 if (icode != CODE_FOR_nothing)
3715 if (((pred = insn_data[(int) icode].operand[0].predicate)
3716 && !((*pred) (x, mode))))
3717 x = force_reg (mode, x);
3718 emit_insn (GEN_FCN (icode) (x));
3719 return;
3721 if (GET_MODE_SIZE (mode) == rounded_size)
3722 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3723 /* If we are to pad downward, adjust the stack pointer first and
3724 then store X into the stack location using an offset. This is
3725 because emit_move_insn does not know how to pad; it does not have
3726 access to type. */
3727 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3729 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3730 HOST_WIDE_INT offset;
3732 emit_move_insn (stack_pointer_rtx,
3733 expand_binop (Pmode,
3734 #ifdef STACK_GROWS_DOWNWARD
3735 sub_optab,
3736 #else
3737 add_optab,
3738 #endif
3739 stack_pointer_rtx,
3740 GEN_INT (rounded_size),
3741 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3743 offset = (HOST_WIDE_INT) padding_size;
3744 #ifdef STACK_GROWS_DOWNWARD
3745 if (STACK_PUSH_CODE == POST_DEC)
3746 /* We have already decremented the stack pointer, so get the
3747 previous value. */
3748 offset += (HOST_WIDE_INT) rounded_size;
3749 #else
3750 if (STACK_PUSH_CODE == POST_INC)
3751 /* We have already incremented the stack pointer, so get the
3752 previous value. */
3753 offset -= (HOST_WIDE_INT) rounded_size;
3754 #endif
3755 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3757 else
3759 #ifdef STACK_GROWS_DOWNWARD
3760 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3761 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3762 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3763 #else
3764 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3765 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3766 GEN_INT (rounded_size));
3767 #endif
3768 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3771 dest = gen_rtx_MEM (mode, dest_addr);
3773 if (type != 0)
3775 set_mem_attributes (dest, type, 1);
3777 if (flag_optimize_sibling_calls)
3778 /* Function incoming arguments may overlap with sibling call
3779 outgoing arguments and we cannot allow reordering of reads
3780 from function arguments with stores to outgoing arguments
3781 of sibling calls. */
3782 set_mem_alias_set (dest, 0);
3784 emit_move_insn (dest, x);
3786 #endif
3788 /* Generate code to push X onto the stack, assuming it has mode MODE and
3789 type TYPE.
3790 MODE is redundant except when X is a CONST_INT (since they don't
3791 carry mode info).
3792 SIZE is an rtx for the size of data to be copied (in bytes),
3793 needed only if X is BLKmode.
3795 ALIGN (in bits) is maximum alignment we can assume.
3797 If PARTIAL and REG are both nonzero, then copy that many of the first
3798 words of X into registers starting with REG, and push the rest of X.
3799 The amount of space pushed is decreased by PARTIAL words,
3800 rounded *down* to a multiple of PARM_BOUNDARY.
3801 REG must be a hard register in this case.
3802 If REG is zero but PARTIAL is not, take any all others actions for an
3803 argument partially in registers, but do not actually load any
3804 registers.
3806 EXTRA is the amount in bytes of extra space to leave next to this arg.
3807 This is ignored if an argument block has already been allocated.
3809 On a machine that lacks real push insns, ARGS_ADDR is the address of
3810 the bottom of the argument block for this call. We use indexing off there
3811 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3812 argument block has not been preallocated.
3814 ARGS_SO_FAR is the size of args previously pushed for this call.
3816 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3817 for arguments passed in registers. If nonzero, it will be the number
3818 of bytes required. */
3820 void
3821 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3822 unsigned int align, int partial, rtx reg, int extra,
3823 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3824 rtx alignment_pad)
3826 rtx xinner;
3827 enum direction stack_direction
3828 #ifdef STACK_GROWS_DOWNWARD
3829 = downward;
3830 #else
3831 = upward;
3832 #endif
3834 /* Decide where to pad the argument: `downward' for below,
3835 `upward' for above, or `none' for don't pad it.
3836 Default is below for small data on big-endian machines; else above. */
3837 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3839 /* Invert direction if stack is post-decrement.
3840 FIXME: why? */
3841 if (STACK_PUSH_CODE == POST_DEC)
3842 if (where_pad != none)
3843 where_pad = (where_pad == downward ? upward : downward);
3845 xinner = x = protect_from_queue (x, 0);
3847 if (mode == BLKmode)
3849 /* Copy a block into the stack, entirely or partially. */
3851 rtx temp;
3852 int used = partial * UNITS_PER_WORD;
3853 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3854 int skip;
3856 if (size == 0)
3857 abort ();
3859 used -= offset;
3861 /* USED is now the # of bytes we need not copy to the stack
3862 because registers will take care of them. */
3864 if (partial != 0)
3865 xinner = adjust_address (xinner, BLKmode, used);
3867 /* If the partial register-part of the arg counts in its stack size,
3868 skip the part of stack space corresponding to the registers.
3869 Otherwise, start copying to the beginning of the stack space,
3870 by setting SKIP to 0. */
3871 skip = (reg_parm_stack_space == 0) ? 0 : used;
3873 #ifdef PUSH_ROUNDING
3874 /* Do it with several push insns if that doesn't take lots of insns
3875 and if there is no difficulty with push insns that skip bytes
3876 on the stack for alignment purposes. */
3877 if (args_addr == 0
3878 && PUSH_ARGS
3879 && GET_CODE (size) == CONST_INT
3880 && skip == 0
3881 && MEM_ALIGN (xinner) >= align
3882 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3883 /* Here we avoid the case of a structure whose weak alignment
3884 forces many pushes of a small amount of data,
3885 and such small pushes do rounding that causes trouble. */
3886 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3887 || align >= BIGGEST_ALIGNMENT
3888 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3889 == (align / BITS_PER_UNIT)))
3890 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3892 /* Push padding now if padding above and stack grows down,
3893 or if padding below and stack grows up.
3894 But if space already allocated, this has already been done. */
3895 if (extra && args_addr == 0
3896 && where_pad != none && where_pad != stack_direction)
3897 anti_adjust_stack (GEN_INT (extra));
3899 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3901 else
3902 #endif /* PUSH_ROUNDING */
3904 rtx target;
3906 /* Otherwise make space on the stack and copy the data
3907 to the address of that space. */
3909 /* Deduct words put into registers from the size we must copy. */
3910 if (partial != 0)
3912 if (GET_CODE (size) == CONST_INT)
3913 size = GEN_INT (INTVAL (size) - used);
3914 else
3915 size = expand_binop (GET_MODE (size), sub_optab, size,
3916 GEN_INT (used), NULL_RTX, 0,
3917 OPTAB_LIB_WIDEN);
3920 /* Get the address of the stack space.
3921 In this case, we do not deal with EXTRA separately.
3922 A single stack adjust will do. */
3923 if (! args_addr)
3925 temp = push_block (size, extra, where_pad == downward);
3926 extra = 0;
3928 else if (GET_CODE (args_so_far) == CONST_INT)
3929 temp = memory_address (BLKmode,
3930 plus_constant (args_addr,
3931 skip + INTVAL (args_so_far)));
3932 else
3933 temp = memory_address (BLKmode,
3934 plus_constant (gen_rtx_PLUS (Pmode,
3935 args_addr,
3936 args_so_far),
3937 skip));
3939 if (!ACCUMULATE_OUTGOING_ARGS)
3941 /* If the source is referenced relative to the stack pointer,
3942 copy it to another register to stabilize it. We do not need
3943 to do this if we know that we won't be changing sp. */
3945 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3946 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3947 temp = copy_to_reg (temp);
3950 target = gen_rtx_MEM (BLKmode, temp);
3952 if (type != 0)
3954 set_mem_attributes (target, type, 1);
3955 /* Function incoming arguments may overlap with sibling call
3956 outgoing arguments and we cannot allow reordering of reads
3957 from function arguments with stores to outgoing arguments
3958 of sibling calls. */
3959 set_mem_alias_set (target, 0);
3962 /* ALIGN may well be better aligned than TYPE, e.g. due to
3963 PARM_BOUNDARY. Assume the caller isn't lying. */
3964 set_mem_align (target, align);
3966 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3969 else if (partial > 0)
3971 /* Scalar partly in registers. */
3973 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3974 int i;
3975 int not_stack;
3976 /* # words of start of argument
3977 that we must make space for but need not store. */
3978 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3979 int args_offset = INTVAL (args_so_far);
3980 int skip;
3982 /* Push padding now if padding above and stack grows down,
3983 or if padding below and stack grows up.
3984 But if space already allocated, this has already been done. */
3985 if (extra && args_addr == 0
3986 && where_pad != none && where_pad != stack_direction)
3987 anti_adjust_stack (GEN_INT (extra));
3989 /* If we make space by pushing it, we might as well push
3990 the real data. Otherwise, we can leave OFFSET nonzero
3991 and leave the space uninitialized. */
3992 if (args_addr == 0)
3993 offset = 0;
3995 /* Now NOT_STACK gets the number of words that we don't need to
3996 allocate on the stack. */
3997 not_stack = partial - offset;
3999 /* If the partial register-part of the arg counts in its stack size,
4000 skip the part of stack space corresponding to the registers.
4001 Otherwise, start copying to the beginning of the stack space,
4002 by setting SKIP to 0. */
4003 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
4005 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
4006 x = validize_mem (force_const_mem (mode, x));
4008 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4009 SUBREGs of such registers are not allowed. */
4010 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
4011 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
4012 x = copy_to_reg (x);
4014 /* Loop over all the words allocated on the stack for this arg. */
4015 /* We can do it by words, because any scalar bigger than a word
4016 has a size a multiple of a word. */
4017 #ifndef PUSH_ARGS_REVERSED
4018 for (i = not_stack; i < size; i++)
4019 #else
4020 for (i = size - 1; i >= not_stack; i--)
4021 #endif
4022 if (i >= not_stack + offset)
4023 emit_push_insn (operand_subword_force (x, i, mode),
4024 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
4025 0, args_addr,
4026 GEN_INT (args_offset + ((i - not_stack + skip)
4027 * UNITS_PER_WORD)),
4028 reg_parm_stack_space, alignment_pad);
4030 else
4032 rtx addr;
4033 rtx dest;
4035 /* Push padding now if padding above and stack grows down,
4036 or if padding below and stack grows up.
4037 But if space already allocated, this has already been done. */
4038 if (extra && args_addr == 0
4039 && where_pad != none && where_pad != stack_direction)
4040 anti_adjust_stack (GEN_INT (extra));
4042 #ifdef PUSH_ROUNDING
4043 if (args_addr == 0 && PUSH_ARGS)
4044 emit_single_push_insn (mode, x, type);
4045 else
4046 #endif
4048 if (GET_CODE (args_so_far) == CONST_INT)
4049 addr
4050 = memory_address (mode,
4051 plus_constant (args_addr,
4052 INTVAL (args_so_far)));
4053 else
4054 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
4055 args_so_far));
4056 dest = gen_rtx_MEM (mode, addr);
4057 if (type != 0)
4059 set_mem_attributes (dest, type, 1);
4060 /* Function incoming arguments may overlap with sibling call
4061 outgoing arguments and we cannot allow reordering of reads
4062 from function arguments with stores to outgoing arguments
4063 of sibling calls. */
4064 set_mem_alias_set (dest, 0);
4067 emit_move_insn (dest, x);
4071 /* If part should go in registers, copy that part
4072 into the appropriate registers. Do this now, at the end,
4073 since mem-to-mem copies above may do function calls. */
4074 if (partial > 0 && reg != 0)
4076 /* Handle calls that pass values in multiple non-contiguous locations.
4077 The Irix 6 ABI has examples of this. */
4078 if (GET_CODE (reg) == PARALLEL)
4079 emit_group_load (reg, x, -1); /* ??? size? */
4080 else
4081 move_block_to_reg (REGNO (reg), x, partial, mode);
4084 if (extra && args_addr == 0 && where_pad == stack_direction)
4085 anti_adjust_stack (GEN_INT (extra));
4087 if (alignment_pad && args_addr == 0)
4088 anti_adjust_stack (alignment_pad);
4091 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4092 operations. */
4094 static rtx
4095 get_subtarget (rtx x)
4097 return ((x == 0
4098 /* Only registers can be subtargets. */
4099 || GET_CODE (x) != REG
4100 /* If the register is readonly, it can't be set more than once. */
4101 || RTX_UNCHANGING_P (x)
4102 /* Don't use hard regs to avoid extending their life. */
4103 || REGNO (x) < FIRST_PSEUDO_REGISTER
4104 /* Avoid subtargets inside loops,
4105 since they hide some invariant expressions. */
4106 || preserve_subexpressions_p ())
4107 ? 0 : x);
4110 /* Expand an assignment that stores the value of FROM into TO.
4111 If WANT_VALUE is nonzero, return an rtx for the value of TO.
4112 (This may contain a QUEUED rtx;
4113 if the value is constant, this rtx is a constant.)
4114 Otherwise, the returned value is NULL_RTX.
4116 SUGGEST_REG is no longer actually used.
4117 It used to mean, copy the value through a register
4118 and return that register, if that is possible.
4119 We now use WANT_VALUE to decide whether to do this. */
4122 expand_assignment (tree to, tree from, int want_value,
4123 int suggest_reg ATTRIBUTE_UNUSED)
4125 rtx to_rtx = 0;
4126 rtx result;
4128 /* Don't crash if the lhs of the assignment was erroneous. */
4130 if (TREE_CODE (to) == ERROR_MARK)
4132 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
4133 return want_value ? result : NULL_RTX;
4136 /* Assignment of a structure component needs special treatment
4137 if the structure component's rtx is not simply a MEM.
4138 Assignment of an array element at a constant index, and assignment of
4139 an array element in an unaligned packed structure field, has the same
4140 problem. */
4142 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
4143 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF
4144 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4146 enum machine_mode mode1;
4147 HOST_WIDE_INT bitsize, bitpos;
4148 rtx orig_to_rtx;
4149 tree offset;
4150 int unsignedp;
4151 int volatilep = 0;
4152 tree tem;
4154 push_temp_slots ();
4155 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4156 &unsignedp, &volatilep);
4158 /* If we are going to use store_bit_field and extract_bit_field,
4159 make sure to_rtx will be safe for multiple use. */
4161 if (mode1 == VOIDmode && want_value)
4162 tem = stabilize_reference (tem);
4164 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
4166 if (offset != 0)
4168 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4170 if (GET_CODE (to_rtx) != MEM)
4171 abort ();
4173 #ifdef POINTERS_EXTEND_UNSIGNED
4174 if (GET_MODE (offset_rtx) != Pmode)
4175 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4176 #else
4177 if (GET_MODE (offset_rtx) != ptr_mode)
4178 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4179 #endif
4181 /* A constant address in TO_RTX can have VOIDmode, we must not try
4182 to call force_reg for that case. Avoid that case. */
4183 if (GET_CODE (to_rtx) == MEM
4184 && GET_MODE (to_rtx) == BLKmode
4185 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4186 && bitsize > 0
4187 && (bitpos % bitsize) == 0
4188 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4189 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4191 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4192 bitpos = 0;
4195 to_rtx = offset_address (to_rtx, offset_rtx,
4196 highest_pow2_factor_for_type (TREE_TYPE (to),
4197 offset));
4200 if (GET_CODE (to_rtx) == MEM)
4202 /* If the field is at offset zero, we could have been given the
4203 DECL_RTX of the parent struct. Don't munge it. */
4204 to_rtx = shallow_copy_rtx (to_rtx);
4206 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4209 /* Deal with volatile and readonly fields. The former is only done
4210 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4211 if (volatilep && GET_CODE (to_rtx) == MEM)
4213 if (to_rtx == orig_to_rtx)
4214 to_rtx = copy_rtx (to_rtx);
4215 MEM_VOLATILE_P (to_rtx) = 1;
4218 if (TREE_CODE (to) == COMPONENT_REF
4219 && TREE_READONLY (TREE_OPERAND (to, 1)))
4221 if (to_rtx == orig_to_rtx)
4222 to_rtx = copy_rtx (to_rtx);
4223 RTX_UNCHANGING_P (to_rtx) = 1;
4226 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
4228 if (to_rtx == orig_to_rtx)
4229 to_rtx = copy_rtx (to_rtx);
4230 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4233 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4234 (want_value
4235 /* Spurious cast for HPUX compiler. */
4236 ? ((enum machine_mode)
4237 TYPE_MODE (TREE_TYPE (to)))
4238 : VOIDmode),
4239 unsignedp, TREE_TYPE (tem), get_alias_set (to));
4241 preserve_temp_slots (result);
4242 free_temp_slots ();
4243 pop_temp_slots ();
4245 /* If the value is meaningful, convert RESULT to the proper mode.
4246 Otherwise, return nothing. */
4247 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
4248 TYPE_MODE (TREE_TYPE (from)),
4249 result,
4250 TREE_UNSIGNED (TREE_TYPE (to)))
4251 : NULL_RTX);
4254 /* If the rhs is a function call and its value is not an aggregate,
4255 call the function before we start to compute the lhs.
4256 This is needed for correct code for cases such as
4257 val = setjmp (buf) on machines where reference to val
4258 requires loading up part of an address in a separate insn.
4260 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4261 since it might be a promoted variable where the zero- or sign- extension
4262 needs to be done. Handling this in the normal way is safe because no
4263 computation is done before the call. */
4264 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
4265 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4266 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4267 && GET_CODE (DECL_RTL (to)) == REG))
4269 rtx value;
4271 push_temp_slots ();
4272 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
4273 if (to_rtx == 0)
4274 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4276 /* Handle calls that return values in multiple non-contiguous locations.
4277 The Irix 6 ABI has examples of this. */
4278 if (GET_CODE (to_rtx) == PARALLEL)
4279 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)));
4280 else if (GET_MODE (to_rtx) == BLKmode)
4281 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4282 else
4284 #ifdef POINTERS_EXTEND_UNSIGNED
4285 if (POINTER_TYPE_P (TREE_TYPE (to))
4286 && GET_MODE (to_rtx) != GET_MODE (value))
4287 value = convert_memory_address (GET_MODE (to_rtx), value);
4288 #endif
4289 emit_move_insn (to_rtx, value);
4291 preserve_temp_slots (to_rtx);
4292 free_temp_slots ();
4293 pop_temp_slots ();
4294 return want_value ? to_rtx : NULL_RTX;
4297 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4298 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4300 if (to_rtx == 0)
4301 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4303 /* Don't move directly into a return register. */
4304 if (TREE_CODE (to) == RESULT_DECL
4305 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
4307 rtx temp;
4309 push_temp_slots ();
4310 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
4312 if (GET_CODE (to_rtx) == PARALLEL)
4313 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)));
4314 else
4315 emit_move_insn (to_rtx, temp);
4317 preserve_temp_slots (to_rtx);
4318 free_temp_slots ();
4319 pop_temp_slots ();
4320 return want_value ? to_rtx : NULL_RTX;
4323 /* In case we are returning the contents of an object which overlaps
4324 the place the value is being stored, use a safe function when copying
4325 a value through a pointer into a structure value return block. */
4326 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4327 && current_function_returns_struct
4328 && !current_function_returns_pcc_struct)
4330 rtx from_rtx, size;
4332 push_temp_slots ();
4333 size = expr_size (from);
4334 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
4336 if (TARGET_MEM_FUNCTIONS)
4337 emit_library_call (memmove_libfunc, LCT_NORMAL,
4338 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4339 XEXP (from_rtx, 0), Pmode,
4340 convert_to_mode (TYPE_MODE (sizetype),
4341 size, TREE_UNSIGNED (sizetype)),
4342 TYPE_MODE (sizetype));
4343 else
4344 emit_library_call (bcopy_libfunc, LCT_NORMAL,
4345 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
4346 XEXP (to_rtx, 0), Pmode,
4347 convert_to_mode (TYPE_MODE (integer_type_node),
4348 size,
4349 TREE_UNSIGNED (integer_type_node)),
4350 TYPE_MODE (integer_type_node));
4352 preserve_temp_slots (to_rtx);
4353 free_temp_slots ();
4354 pop_temp_slots ();
4355 return want_value ? to_rtx : NULL_RTX;
4358 /* Compute FROM and store the value in the rtx we got. */
4360 push_temp_slots ();
4361 result = store_expr (from, to_rtx, want_value);
4362 preserve_temp_slots (result);
4363 free_temp_slots ();
4364 pop_temp_slots ();
4365 return want_value ? result : NULL_RTX;
4368 /* Generate code for computing expression EXP,
4369 and storing the value into TARGET.
4370 TARGET may contain a QUEUED rtx.
4372 If WANT_VALUE & 1 is nonzero, return a copy of the value
4373 not in TARGET, so that we can be sure to use the proper
4374 value in a containing expression even if TARGET has something
4375 else stored in it. If possible, we copy the value through a pseudo
4376 and return that pseudo. Or, if the value is constant, we try to
4377 return the constant. In some cases, we return a pseudo
4378 copied *from* TARGET.
4380 If the mode is BLKmode then we may return TARGET itself.
4381 It turns out that in BLKmode it doesn't cause a problem.
4382 because C has no operators that could combine two different
4383 assignments into the same BLKmode object with different values
4384 with no sequence point. Will other languages need this to
4385 be more thorough?
4387 If WANT_VALUE & 1 is 0, we return NULL, to make sure
4388 to catch quickly any cases where the caller uses the value
4389 and fails to set WANT_VALUE.
4391 If WANT_VALUE & 2 is set, this is a store into a call param on the
4392 stack, and block moves may need to be treated specially. */
4395 store_expr (tree exp, rtx target, int want_value)
4397 rtx temp;
4398 int dont_return_target = 0;
4399 int dont_store_target = 0;
4401 if (VOID_TYPE_P (TREE_TYPE (exp)))
4403 /* C++ can generate ?: expressions with a throw expression in one
4404 branch and an rvalue in the other. Here, we resolve attempts to
4405 store the throw expression's nonexistent result. */
4406 if (want_value)
4407 abort ();
4408 expand_expr (exp, const0_rtx, VOIDmode, 0);
4409 return NULL_RTX;
4411 if (TREE_CODE (exp) == COMPOUND_EXPR)
4413 /* Perform first part of compound expression, then assign from second
4414 part. */
4415 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4416 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4417 emit_queue ();
4418 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4420 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4422 /* For conditional expression, get safe form of the target. Then
4423 test the condition, doing the appropriate assignment on either
4424 side. This avoids the creation of unnecessary temporaries.
4425 For non-BLKmode, it is more efficient not to do this. */
4427 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4429 emit_queue ();
4430 target = protect_from_queue (target, 1);
4432 do_pending_stack_adjust ();
4433 NO_DEFER_POP;
4434 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4435 start_cleanup_deferral ();
4436 store_expr (TREE_OPERAND (exp, 1), target, want_value & 2);
4437 end_cleanup_deferral ();
4438 emit_queue ();
4439 emit_jump_insn (gen_jump (lab2));
4440 emit_barrier ();
4441 emit_label (lab1);
4442 start_cleanup_deferral ();
4443 store_expr (TREE_OPERAND (exp, 2), target, want_value & 2);
4444 end_cleanup_deferral ();
4445 emit_queue ();
4446 emit_label (lab2);
4447 OK_DEFER_POP;
4449 return want_value & 1 ? target : NULL_RTX;
4451 else if (queued_subexp_p (target))
4452 /* If target contains a postincrement, let's not risk
4453 using it as the place to generate the rhs. */
4455 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4457 /* Expand EXP into a new pseudo. */
4458 temp = gen_reg_rtx (GET_MODE (target));
4459 temp = expand_expr (exp, temp, GET_MODE (target),
4460 (want_value & 2
4461 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4463 else
4464 temp = expand_expr (exp, NULL_RTX, GET_MODE (target),
4465 (want_value & 2
4466 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4468 /* If target is volatile, ANSI requires accessing the value
4469 *from* the target, if it is accessed. So make that happen.
4470 In no case return the target itself. */
4471 if (! MEM_VOLATILE_P (target) && (want_value & 1) != 0)
4472 dont_return_target = 1;
4474 else if ((want_value & 1) != 0
4475 && GET_CODE (target) == MEM
4476 && ! MEM_VOLATILE_P (target)
4477 && GET_MODE (target) != BLKmode)
4478 /* If target is in memory and caller wants value in a register instead,
4479 arrange that. Pass TARGET as target for expand_expr so that,
4480 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4481 We know expand_expr will not use the target in that case.
4482 Don't do this if TARGET is volatile because we are supposed
4483 to write it and then read it. */
4485 temp = expand_expr (exp, target, GET_MODE (target),
4486 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4487 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4489 /* If TEMP is already in the desired TARGET, only copy it from
4490 memory and don't store it there again. */
4491 if (temp == target
4492 || (rtx_equal_p (temp, target)
4493 && ! side_effects_p (temp) && ! side_effects_p (target)))
4494 dont_store_target = 1;
4495 temp = copy_to_reg (temp);
4497 dont_return_target = 1;
4499 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4500 /* If this is a scalar in a register that is stored in a wider mode
4501 than the declared mode, compute the result into its declared mode
4502 and then convert to the wider mode. Our value is the computed
4503 expression. */
4505 rtx inner_target = 0;
4507 /* If we don't want a value, we can do the conversion inside EXP,
4508 which will often result in some optimizations. Do the conversion
4509 in two steps: first change the signedness, if needed, then
4510 the extend. But don't do this if the type of EXP is a subtype
4511 of something else since then the conversion might involve
4512 more than just converting modes. */
4513 if ((want_value & 1) == 0
4514 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4515 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4517 if (TREE_UNSIGNED (TREE_TYPE (exp))
4518 != SUBREG_PROMOTED_UNSIGNED_P (target))
4519 exp = convert
4520 ((*lang_hooks.types.signed_or_unsigned_type)
4521 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4523 exp = convert ((*lang_hooks.types.type_for_mode)
4524 (GET_MODE (SUBREG_REG (target)),
4525 SUBREG_PROMOTED_UNSIGNED_P (target)),
4526 exp);
4528 inner_target = SUBREG_REG (target);
4531 temp = expand_expr (exp, inner_target, VOIDmode,
4532 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4534 /* If TEMP is a MEM and we want a result value, make the access
4535 now so it gets done only once. Strictly speaking, this is
4536 only necessary if the MEM is volatile, or if the address
4537 overlaps TARGET. But not performing the load twice also
4538 reduces the amount of rtl we generate and then have to CSE. */
4539 if (GET_CODE (temp) == MEM && (want_value & 1) != 0)
4540 temp = copy_to_reg (temp);
4542 /* If TEMP is a VOIDmode constant, use convert_modes to make
4543 sure that we properly convert it. */
4544 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4546 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4547 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4548 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4549 GET_MODE (target), temp,
4550 SUBREG_PROMOTED_UNSIGNED_P (target));
4553 convert_move (SUBREG_REG (target), temp,
4554 SUBREG_PROMOTED_UNSIGNED_P (target));
4556 /* If we promoted a constant, change the mode back down to match
4557 target. Otherwise, the caller might get confused by a result whose
4558 mode is larger than expected. */
4560 if ((want_value & 1) != 0 && GET_MODE (temp) != GET_MODE (target))
4562 if (GET_MODE (temp) != VOIDmode)
4564 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4565 SUBREG_PROMOTED_VAR_P (temp) = 1;
4566 SUBREG_PROMOTED_UNSIGNED_SET (temp,
4567 SUBREG_PROMOTED_UNSIGNED_P (target));
4569 else
4570 temp = convert_modes (GET_MODE (target),
4571 GET_MODE (SUBREG_REG (target)),
4572 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4575 return want_value & 1 ? temp : NULL_RTX;
4577 else
4579 temp = expand_expr (exp, target, GET_MODE (target),
4580 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4581 /* Return TARGET if it's a specified hardware register.
4582 If TARGET is a volatile mem ref, either return TARGET
4583 or return a reg copied *from* TARGET; ANSI requires this.
4585 Otherwise, if TEMP is not TARGET, return TEMP
4586 if it is constant (for efficiency),
4587 or if we really want the correct value. */
4588 if (!(target && GET_CODE (target) == REG
4589 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4590 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4591 && ! rtx_equal_p (temp, target)
4592 && (CONSTANT_P (temp) || (want_value & 1) != 0))
4593 dont_return_target = 1;
4596 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4597 the same as that of TARGET, adjust the constant. This is needed, for
4598 example, in case it is a CONST_DOUBLE and we want only a word-sized
4599 value. */
4600 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4601 && TREE_CODE (exp) != ERROR_MARK
4602 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4603 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4604 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4606 /* If value was not generated in the target, store it there.
4607 Convert the value to TARGET's type first if necessary.
4608 If TEMP and TARGET compare equal according to rtx_equal_p, but
4609 one or both of them are volatile memory refs, we have to distinguish
4610 two cases:
4611 - expand_expr has used TARGET. In this case, we must not generate
4612 another copy. This can be detected by TARGET being equal according
4613 to == .
4614 - expand_expr has not used TARGET - that means that the source just
4615 happens to have the same RTX form. Since temp will have been created
4616 by expand_expr, it will compare unequal according to == .
4617 We must generate a copy in this case, to reach the correct number
4618 of volatile memory references. */
4620 if ((! rtx_equal_p (temp, target)
4621 || (temp != target && (side_effects_p (temp)
4622 || side_effects_p (target))))
4623 && TREE_CODE (exp) != ERROR_MARK
4624 && ! dont_store_target
4625 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4626 but TARGET is not valid memory reference, TEMP will differ
4627 from TARGET although it is really the same location. */
4628 && (TREE_CODE_CLASS (TREE_CODE (exp)) != 'd'
4629 || target != DECL_RTL_IF_SET (exp))
4630 /* If there's nothing to copy, don't bother. Don't call expr_size
4631 unless necessary, because some front-ends (C++) expr_size-hook
4632 aborts on objects that are not supposed to be bit-copied or
4633 bit-initialized. */
4634 && expr_size (exp) != const0_rtx)
4636 target = protect_from_queue (target, 1);
4637 if (GET_MODE (temp) != GET_MODE (target)
4638 && GET_MODE (temp) != VOIDmode)
4640 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4641 if (dont_return_target)
4643 /* In this case, we will return TEMP,
4644 so make sure it has the proper mode.
4645 But don't forget to store the value into TARGET. */
4646 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4647 emit_move_insn (target, temp);
4649 else
4650 convert_move (target, temp, unsignedp);
4653 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4655 /* Handle copying a string constant into an array. The string
4656 constant may be shorter than the array. So copy just the string's
4657 actual length, and clear the rest. First get the size of the data
4658 type of the string, which is actually the size of the target. */
4659 rtx size = expr_size (exp);
4661 if (GET_CODE (size) == CONST_INT
4662 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4663 emit_block_move (target, temp, size,
4664 (want_value & 2
4665 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4666 else
4668 /* Compute the size of the data to copy from the string. */
4669 tree copy_size
4670 = size_binop (MIN_EXPR,
4671 make_tree (sizetype, size),
4672 size_int (TREE_STRING_LENGTH (exp)));
4673 rtx copy_size_rtx
4674 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4675 (want_value & 2
4676 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4677 rtx label = 0;
4679 /* Copy that much. */
4680 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4681 TREE_UNSIGNED (sizetype));
4682 emit_block_move (target, temp, copy_size_rtx,
4683 (want_value & 2
4684 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4686 /* Figure out how much is left in TARGET that we have to clear.
4687 Do all calculations in ptr_mode. */
4688 if (GET_CODE (copy_size_rtx) == CONST_INT)
4690 size = plus_constant (size, -INTVAL (copy_size_rtx));
4691 target = adjust_address (target, BLKmode,
4692 INTVAL (copy_size_rtx));
4694 else
4696 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4697 copy_size_rtx, NULL_RTX, 0,
4698 OPTAB_LIB_WIDEN);
4700 #ifdef POINTERS_EXTEND_UNSIGNED
4701 if (GET_MODE (copy_size_rtx) != Pmode)
4702 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4703 TREE_UNSIGNED (sizetype));
4704 #endif
4706 target = offset_address (target, copy_size_rtx,
4707 highest_pow2_factor (copy_size));
4708 label = gen_label_rtx ();
4709 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4710 GET_MODE (size), 0, label);
4713 if (size != const0_rtx)
4714 clear_storage (target, size);
4716 if (label)
4717 emit_label (label);
4720 /* Handle calls that return values in multiple non-contiguous locations.
4721 The Irix 6 ABI has examples of this. */
4722 else if (GET_CODE (target) == PARALLEL)
4723 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)));
4724 else if (GET_MODE (temp) == BLKmode)
4725 emit_block_move (target, temp, expr_size (exp),
4726 (want_value & 2
4727 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4728 else
4729 emit_move_insn (target, temp);
4732 /* If we don't want a value, return NULL_RTX. */
4733 if ((want_value & 1) == 0)
4734 return NULL_RTX;
4736 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4737 ??? The latter test doesn't seem to make sense. */
4738 else if (dont_return_target && GET_CODE (temp) != MEM)
4739 return temp;
4741 /* Return TARGET itself if it is a hard register. */
4742 else if ((want_value & 1) != 0
4743 && GET_MODE (target) != BLKmode
4744 && ! (GET_CODE (target) == REG
4745 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4746 return copy_to_reg (target);
4748 else
4749 return target;
4752 /* Return 1 if EXP just contains zeros. */
4754 static int
4755 is_zeros_p (tree exp)
4757 tree elt;
4759 switch (TREE_CODE (exp))
4761 case CONVERT_EXPR:
4762 case NOP_EXPR:
4763 case NON_LVALUE_EXPR:
4764 case VIEW_CONVERT_EXPR:
4765 return is_zeros_p (TREE_OPERAND (exp, 0));
4767 case INTEGER_CST:
4768 return integer_zerop (exp);
4770 case COMPLEX_CST:
4771 return
4772 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4774 case REAL_CST:
4775 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4777 case VECTOR_CST:
4778 for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4779 elt = TREE_CHAIN (elt))
4780 if (!is_zeros_p (TREE_VALUE (elt)))
4781 return 0;
4783 return 1;
4785 case CONSTRUCTOR:
4786 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4787 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4788 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4789 if (! is_zeros_p (TREE_VALUE (elt)))
4790 return 0;
4792 return 1;
4794 default:
4795 return 0;
4799 /* Return 1 if EXP contains mostly (3/4) zeros. */
4801 static int
4802 mostly_zeros_p (tree exp)
4804 if (TREE_CODE (exp) == CONSTRUCTOR)
4806 int elts = 0, zeros = 0;
4807 tree elt = CONSTRUCTOR_ELTS (exp);
4808 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4810 /* If there are no ranges of true bits, it is all zero. */
4811 return elt == NULL_TREE;
4813 for (; elt; elt = TREE_CHAIN (elt))
4815 /* We do not handle the case where the index is a RANGE_EXPR,
4816 so the statistic will be somewhat inaccurate.
4817 We do make a more accurate count in store_constructor itself,
4818 so since this function is only used for nested array elements,
4819 this should be close enough. */
4820 if (mostly_zeros_p (TREE_VALUE (elt)))
4821 zeros++;
4822 elts++;
4825 return 4 * zeros >= 3 * elts;
4828 return is_zeros_p (exp);
4831 /* Helper function for store_constructor.
4832 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4833 TYPE is the type of the CONSTRUCTOR, not the element type.
4834 CLEARED is as for store_constructor.
4835 ALIAS_SET is the alias set to use for any stores.
4837 This provides a recursive shortcut back to store_constructor when it isn't
4838 necessary to go through store_field. This is so that we can pass through
4839 the cleared field to let store_constructor know that we may not have to
4840 clear a substructure if the outer structure has already been cleared. */
4842 static void
4843 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4844 HOST_WIDE_INT bitpos, enum machine_mode mode,
4845 tree exp, tree type, int cleared, int alias_set)
4847 if (TREE_CODE (exp) == CONSTRUCTOR
4848 && bitpos % BITS_PER_UNIT == 0
4849 /* If we have a nonzero bitpos for a register target, then we just
4850 let store_field do the bitfield handling. This is unlikely to
4851 generate unnecessary clear instructions anyways. */
4852 && (bitpos == 0 || GET_CODE (target) == MEM))
4854 if (GET_CODE (target) == MEM)
4855 target
4856 = adjust_address (target,
4857 GET_MODE (target) == BLKmode
4858 || 0 != (bitpos
4859 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4860 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4863 /* Update the alias set, if required. */
4864 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4865 && MEM_ALIAS_SET (target) != 0)
4867 target = copy_rtx (target);
4868 set_mem_alias_set (target, alias_set);
4871 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4873 else
4874 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4875 alias_set);
4878 /* Store the value of constructor EXP into the rtx TARGET.
4879 TARGET is either a REG or a MEM; we know it cannot conflict, since
4880 safe_from_p has been called.
4881 CLEARED is true if TARGET is known to have been zero'd.
4882 SIZE is the number of bytes of TARGET we are allowed to modify: this
4883 may not be the same as the size of EXP if we are assigning to a field
4884 which has been packed to exclude padding bits. */
4886 static void
4887 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4889 tree type = TREE_TYPE (exp);
4890 #ifdef WORD_REGISTER_OPERATIONS
4891 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4892 #endif
4894 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4895 || TREE_CODE (type) == QUAL_UNION_TYPE)
4897 tree elt;
4899 /* If size is zero or the target is already cleared, do nothing. */
4900 if (size == 0 || cleared)
4901 cleared = 1;
4902 /* We either clear the aggregate or indicate the value is dead. */
4903 else if ((TREE_CODE (type) == UNION_TYPE
4904 || TREE_CODE (type) == QUAL_UNION_TYPE)
4905 && ! CONSTRUCTOR_ELTS (exp))
4906 /* If the constructor is empty, clear the union. */
4908 clear_storage (target, expr_size (exp));
4909 cleared = 1;
4912 /* If we are building a static constructor into a register,
4913 set the initial value as zero so we can fold the value into
4914 a constant. But if more than one register is involved,
4915 this probably loses. */
4916 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4917 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4919 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4920 cleared = 1;
4923 /* If the constructor has fewer fields than the structure
4924 or if we are initializing the structure to mostly zeros,
4925 clear the whole structure first. Don't do this if TARGET is a
4926 register whose mode size isn't equal to SIZE since clear_storage
4927 can't handle this case. */
4928 else if (((list_length (CONSTRUCTOR_ELTS (exp)) != fields_length (type))
4929 || mostly_zeros_p (exp))
4930 && (GET_CODE (target) != REG
4931 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4932 == size)))
4934 rtx xtarget = target;
4936 if (readonly_fields_p (type))
4938 xtarget = copy_rtx (xtarget);
4939 RTX_UNCHANGING_P (xtarget) = 1;
4942 clear_storage (xtarget, GEN_INT (size));
4943 cleared = 1;
4946 if (! cleared)
4947 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4949 /* Store each element of the constructor into
4950 the corresponding field of TARGET. */
4952 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4954 tree field = TREE_PURPOSE (elt);
4955 tree value = TREE_VALUE (elt);
4956 enum machine_mode mode;
4957 HOST_WIDE_INT bitsize;
4958 HOST_WIDE_INT bitpos = 0;
4959 tree offset;
4960 rtx to_rtx = target;
4962 /* Just ignore missing fields.
4963 We cleared the whole structure, above,
4964 if any fields are missing. */
4965 if (field == 0)
4966 continue;
4968 if (cleared && is_zeros_p (value))
4969 continue;
4971 if (host_integerp (DECL_SIZE (field), 1))
4972 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4973 else
4974 bitsize = -1;
4976 mode = DECL_MODE (field);
4977 if (DECL_BIT_FIELD (field))
4978 mode = VOIDmode;
4980 offset = DECL_FIELD_OFFSET (field);
4981 if (host_integerp (offset, 0)
4982 && host_integerp (bit_position (field), 0))
4984 bitpos = int_bit_position (field);
4985 offset = 0;
4987 else
4988 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4990 if (offset)
4992 rtx offset_rtx;
4994 if (CONTAINS_PLACEHOLDER_P (offset))
4995 offset = build (WITH_RECORD_EXPR, sizetype,
4996 offset, make_tree (TREE_TYPE (exp), target));
4998 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4999 if (GET_CODE (to_rtx) != MEM)
5000 abort ();
5002 #ifdef POINTERS_EXTEND_UNSIGNED
5003 if (GET_MODE (offset_rtx) != Pmode)
5004 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
5005 #else
5006 if (GET_MODE (offset_rtx) != ptr_mode)
5007 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
5008 #endif
5010 to_rtx = offset_address (to_rtx, offset_rtx,
5011 highest_pow2_factor (offset));
5014 if (TREE_READONLY (field))
5016 if (GET_CODE (to_rtx) == MEM)
5017 to_rtx = copy_rtx (to_rtx);
5019 RTX_UNCHANGING_P (to_rtx) = 1;
5022 #ifdef WORD_REGISTER_OPERATIONS
5023 /* If this initializes a field that is smaller than a word, at the
5024 start of a word, try to widen it to a full word.
5025 This special case allows us to output C++ member function
5026 initializations in a form that the optimizers can understand. */
5027 if (GET_CODE (target) == REG
5028 && bitsize < BITS_PER_WORD
5029 && bitpos % BITS_PER_WORD == 0
5030 && GET_MODE_CLASS (mode) == MODE_INT
5031 && TREE_CODE (value) == INTEGER_CST
5032 && exp_size >= 0
5033 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
5035 tree type = TREE_TYPE (value);
5037 if (TYPE_PRECISION (type) < BITS_PER_WORD)
5039 type = (*lang_hooks.types.type_for_size)
5040 (BITS_PER_WORD, TREE_UNSIGNED (type));
5041 value = convert (type, value);
5044 if (BYTES_BIG_ENDIAN)
5045 value
5046 = fold (build (LSHIFT_EXPR, type, value,
5047 build_int_2 (BITS_PER_WORD - bitsize, 0)));
5048 bitsize = BITS_PER_WORD;
5049 mode = word_mode;
5051 #endif
5053 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5054 && DECL_NONADDRESSABLE_P (field))
5056 to_rtx = copy_rtx (to_rtx);
5057 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5060 store_constructor_field (to_rtx, bitsize, bitpos, mode,
5061 value, type, cleared,
5062 get_alias_set (TREE_TYPE (field)));
5065 else if (TREE_CODE (type) == ARRAY_TYPE
5066 || TREE_CODE (type) == VECTOR_TYPE)
5068 tree elt;
5069 int i;
5070 int need_to_clear;
5071 tree domain = TYPE_DOMAIN (type);
5072 tree elttype = TREE_TYPE (type);
5073 int const_bounds_p;
5074 HOST_WIDE_INT minelt = 0;
5075 HOST_WIDE_INT maxelt = 0;
5077 /* Vectors are like arrays, but the domain is stored via an array
5078 type indirectly. */
5079 if (TREE_CODE (type) == VECTOR_TYPE)
5081 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
5082 the same field as TYPE_DOMAIN, we are not guaranteed that
5083 it always will. */
5084 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
5085 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
5088 const_bounds_p = (TYPE_MIN_VALUE (domain)
5089 && TYPE_MAX_VALUE (domain)
5090 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5091 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5093 /* If we have constant bounds for the range of the type, get them. */
5094 if (const_bounds_p)
5096 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5097 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5100 /* If the constructor has fewer elements than the array,
5101 clear the whole array first. Similarly if this is
5102 static constructor of a non-BLKmode object. */
5103 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
5104 need_to_clear = 1;
5105 else
5107 HOST_WIDE_INT count = 0, zero_count = 0;
5108 need_to_clear = ! const_bounds_p;
5110 /* This loop is a more accurate version of the loop in
5111 mostly_zeros_p (it handles RANGE_EXPR in an index).
5112 It is also needed to check for missing elements. */
5113 for (elt = CONSTRUCTOR_ELTS (exp);
5114 elt != NULL_TREE && ! need_to_clear;
5115 elt = TREE_CHAIN (elt))
5117 tree index = TREE_PURPOSE (elt);
5118 HOST_WIDE_INT this_node_count;
5120 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5122 tree lo_index = TREE_OPERAND (index, 0);
5123 tree hi_index = TREE_OPERAND (index, 1);
5125 if (! host_integerp (lo_index, 1)
5126 || ! host_integerp (hi_index, 1))
5128 need_to_clear = 1;
5129 break;
5132 this_node_count = (tree_low_cst (hi_index, 1)
5133 - tree_low_cst (lo_index, 1) + 1);
5135 else
5136 this_node_count = 1;
5138 count += this_node_count;
5139 if (mostly_zeros_p (TREE_VALUE (elt)))
5140 zero_count += this_node_count;
5143 /* Clear the entire array first if there are any missing elements,
5144 or if the incidence of zero elements is >= 75%. */
5145 if (! need_to_clear
5146 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
5147 need_to_clear = 1;
5150 if (need_to_clear && size > 0)
5152 if (! cleared)
5154 if (REG_P (target))
5155 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5156 else
5157 clear_storage (target, GEN_INT (size));
5159 cleared = 1;
5161 else if (REG_P (target))
5162 /* Inform later passes that the old value is dead. */
5163 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5165 /* Store each element of the constructor into
5166 the corresponding element of TARGET, determined
5167 by counting the elements. */
5168 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
5169 elt;
5170 elt = TREE_CHAIN (elt), i++)
5172 enum machine_mode mode;
5173 HOST_WIDE_INT bitsize;
5174 HOST_WIDE_INT bitpos;
5175 int unsignedp;
5176 tree value = TREE_VALUE (elt);
5177 tree index = TREE_PURPOSE (elt);
5178 rtx xtarget = target;
5180 if (cleared && is_zeros_p (value))
5181 continue;
5183 unsignedp = TREE_UNSIGNED (elttype);
5184 mode = TYPE_MODE (elttype);
5185 if (mode == BLKmode)
5186 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5187 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5188 : -1);
5189 else
5190 bitsize = GET_MODE_BITSIZE (mode);
5192 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5194 tree lo_index = TREE_OPERAND (index, 0);
5195 tree hi_index = TREE_OPERAND (index, 1);
5196 rtx index_r, pos_rtx, loop_end;
5197 struct nesting *loop;
5198 HOST_WIDE_INT lo, hi, count;
5199 tree position;
5201 /* If the range is constant and "small", unroll the loop. */
5202 if (const_bounds_p
5203 && host_integerp (lo_index, 0)
5204 && host_integerp (hi_index, 0)
5205 && (lo = tree_low_cst (lo_index, 0),
5206 hi = tree_low_cst (hi_index, 0),
5207 count = hi - lo + 1,
5208 (GET_CODE (target) != MEM
5209 || count <= 2
5210 || (host_integerp (TYPE_SIZE (elttype), 1)
5211 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5212 <= 40 * 8)))))
5214 lo -= minelt; hi -= minelt;
5215 for (; lo <= hi; lo++)
5217 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5219 if (GET_CODE (target) == MEM
5220 && !MEM_KEEP_ALIAS_SET_P (target)
5221 && TREE_CODE (type) == ARRAY_TYPE
5222 && TYPE_NONALIASED_COMPONENT (type))
5224 target = copy_rtx (target);
5225 MEM_KEEP_ALIAS_SET_P (target) = 1;
5228 store_constructor_field
5229 (target, bitsize, bitpos, mode, value, type, cleared,
5230 get_alias_set (elttype));
5233 else
5235 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
5236 loop_end = gen_label_rtx ();
5238 unsignedp = TREE_UNSIGNED (domain);
5240 index = build_decl (VAR_DECL, NULL_TREE, domain);
5242 index_r
5243 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5244 &unsignedp, 0));
5245 SET_DECL_RTL (index, index_r);
5246 if (TREE_CODE (value) == SAVE_EXPR
5247 && SAVE_EXPR_RTL (value) == 0)
5249 /* Make sure value gets expanded once before the
5250 loop. */
5251 expand_expr (value, const0_rtx, VOIDmode, 0);
5252 emit_queue ();
5254 store_expr (lo_index, index_r, 0);
5255 loop = expand_start_loop (0);
5257 /* Assign value to element index. */
5258 position
5259 = convert (ssizetype,
5260 fold (build (MINUS_EXPR, TREE_TYPE (index),
5261 index, TYPE_MIN_VALUE (domain))));
5262 position = size_binop (MULT_EXPR, position,
5263 convert (ssizetype,
5264 TYPE_SIZE_UNIT (elttype)));
5266 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
5267 xtarget = offset_address (target, pos_rtx,
5268 highest_pow2_factor (position));
5269 xtarget = adjust_address (xtarget, mode, 0);
5270 if (TREE_CODE (value) == CONSTRUCTOR)
5271 store_constructor (value, xtarget, cleared,
5272 bitsize / BITS_PER_UNIT);
5273 else
5274 store_expr (value, xtarget, 0);
5276 expand_exit_loop_if_false (loop,
5277 build (LT_EXPR, integer_type_node,
5278 index, hi_index));
5280 expand_increment (build (PREINCREMENT_EXPR,
5281 TREE_TYPE (index),
5282 index, integer_one_node), 0, 0);
5283 expand_end_loop ();
5284 emit_label (loop_end);
5287 else if ((index != 0 && ! host_integerp (index, 0))
5288 || ! host_integerp (TYPE_SIZE (elttype), 1))
5290 tree position;
5292 if (index == 0)
5293 index = ssize_int (1);
5295 if (minelt)
5296 index = convert (ssizetype,
5297 fold (build (MINUS_EXPR, index,
5298 TYPE_MIN_VALUE (domain))));
5300 position = size_binop (MULT_EXPR, index,
5301 convert (ssizetype,
5302 TYPE_SIZE_UNIT (elttype)));
5303 xtarget = offset_address (target,
5304 expand_expr (position, 0, VOIDmode, 0),
5305 highest_pow2_factor (position));
5306 xtarget = adjust_address (xtarget, mode, 0);
5307 store_expr (value, xtarget, 0);
5309 else
5311 if (index != 0)
5312 bitpos = ((tree_low_cst (index, 0) - minelt)
5313 * tree_low_cst (TYPE_SIZE (elttype), 1));
5314 else
5315 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5317 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
5318 && TREE_CODE (type) == ARRAY_TYPE
5319 && TYPE_NONALIASED_COMPONENT (type))
5321 target = copy_rtx (target);
5322 MEM_KEEP_ALIAS_SET_P (target) = 1;
5325 store_constructor_field (target, bitsize, bitpos, mode, value,
5326 type, cleared, get_alias_set (elttype));
5332 /* Set constructor assignments. */
5333 else if (TREE_CODE (type) == SET_TYPE)
5335 tree elt = CONSTRUCTOR_ELTS (exp);
5336 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
5337 tree domain = TYPE_DOMAIN (type);
5338 tree domain_min, domain_max, bitlength;
5340 /* The default implementation strategy is to extract the constant
5341 parts of the constructor, use that to initialize the target,
5342 and then "or" in whatever non-constant ranges we need in addition.
5344 If a large set is all zero or all ones, it is
5345 probably better to set it using memset (if available) or bzero.
5346 Also, if a large set has just a single range, it may also be
5347 better to first clear all the first clear the set (using
5348 bzero/memset), and set the bits we want. */
5350 /* Check for all zeros. */
5351 if (elt == NULL_TREE && size > 0)
5353 if (!cleared)
5354 clear_storage (target, GEN_INT (size));
5355 return;
5358 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
5359 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
5360 bitlength = size_binop (PLUS_EXPR,
5361 size_diffop (domain_max, domain_min),
5362 ssize_int (1));
5364 nbits = tree_low_cst (bitlength, 1);
5366 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5367 are "complicated" (more than one range), initialize (the
5368 constant parts) by copying from a constant. */
5369 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
5370 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
5372 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
5373 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
5374 char *bit_buffer = (char *) alloca (nbits);
5375 HOST_WIDE_INT word = 0;
5376 unsigned int bit_pos = 0;
5377 unsigned int ibit = 0;
5378 unsigned int offset = 0; /* In bytes from beginning of set. */
5380 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
5381 for (;;)
5383 if (bit_buffer[ibit])
5385 if (BYTES_BIG_ENDIAN)
5386 word |= (1 << (set_word_size - 1 - bit_pos));
5387 else
5388 word |= 1 << bit_pos;
5391 bit_pos++; ibit++;
5392 if (bit_pos >= set_word_size || ibit == nbits)
5394 if (word != 0 || ! cleared)
5396 rtx datum = GEN_INT (word);
5397 rtx to_rtx;
5399 /* The assumption here is that it is safe to use
5400 XEXP if the set is multi-word, but not if
5401 it's single-word. */
5402 if (GET_CODE (target) == MEM)
5403 to_rtx = adjust_address (target, mode, offset);
5404 else if (offset == 0)
5405 to_rtx = target;
5406 else
5407 abort ();
5408 emit_move_insn (to_rtx, datum);
5411 if (ibit == nbits)
5412 break;
5413 word = 0;
5414 bit_pos = 0;
5415 offset += set_word_size / BITS_PER_UNIT;
5419 else if (!cleared)
5420 /* Don't bother clearing storage if the set is all ones. */
5421 if (TREE_CHAIN (elt) != NULL_TREE
5422 || (TREE_PURPOSE (elt) == NULL_TREE
5423 ? nbits != 1
5424 : ( ! host_integerp (TREE_VALUE (elt), 0)
5425 || ! host_integerp (TREE_PURPOSE (elt), 0)
5426 || (tree_low_cst (TREE_VALUE (elt), 0)
5427 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5428 != (HOST_WIDE_INT) nbits))))
5429 clear_storage (target, expr_size (exp));
5431 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5433 /* Start of range of element or NULL. */
5434 tree startbit = TREE_PURPOSE (elt);
5435 /* End of range of element, or element value. */
5436 tree endbit = TREE_VALUE (elt);
5437 HOST_WIDE_INT startb, endb;
5438 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5440 bitlength_rtx = expand_expr (bitlength,
5441 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5443 /* Handle non-range tuple element like [ expr ]. */
5444 if (startbit == NULL_TREE)
5446 startbit = save_expr (endbit);
5447 endbit = startbit;
5450 startbit = convert (sizetype, startbit);
5451 endbit = convert (sizetype, endbit);
5452 if (! integer_zerop (domain_min))
5454 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5455 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5457 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5458 EXPAND_CONST_ADDRESS);
5459 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5460 EXPAND_CONST_ADDRESS);
5462 if (REG_P (target))
5464 targetx
5465 = assign_temp
5466 ((build_qualified_type ((*lang_hooks.types.type_for_mode)
5467 (GET_MODE (target), 0),
5468 TYPE_QUAL_CONST)),
5469 0, 1, 1);
5470 emit_move_insn (targetx, target);
5473 else if (GET_CODE (target) == MEM)
5474 targetx = target;
5475 else
5476 abort ();
5478 /* Optimization: If startbit and endbit are constants divisible
5479 by BITS_PER_UNIT, call memset instead. */
5480 if (TARGET_MEM_FUNCTIONS
5481 && TREE_CODE (startbit) == INTEGER_CST
5482 && TREE_CODE (endbit) == INTEGER_CST
5483 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5484 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5486 emit_library_call (memset_libfunc, LCT_NORMAL,
5487 VOIDmode, 3,
5488 plus_constant (XEXP (targetx, 0),
5489 startb / BITS_PER_UNIT),
5490 Pmode,
5491 constm1_rtx, TYPE_MODE (integer_type_node),
5492 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5493 TYPE_MODE (sizetype));
5495 else
5496 emit_library_call (setbits_libfunc, LCT_NORMAL,
5497 VOIDmode, 4, XEXP (targetx, 0),
5498 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5499 startbit_rtx, TYPE_MODE (sizetype),
5500 endbit_rtx, TYPE_MODE (sizetype));
5502 if (REG_P (target))
5503 emit_move_insn (target, targetx);
5507 else
5508 abort ();
5511 /* Store the value of EXP (an expression tree)
5512 into a subfield of TARGET which has mode MODE and occupies
5513 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5514 If MODE is VOIDmode, it means that we are storing into a bit-field.
5516 If VALUE_MODE is VOIDmode, return nothing in particular.
5517 UNSIGNEDP is not used in this case.
5519 Otherwise, return an rtx for the value stored. This rtx
5520 has mode VALUE_MODE if that is convenient to do.
5521 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5523 TYPE is the type of the underlying object,
5525 ALIAS_SET is the alias set for the destination. This value will
5526 (in general) be different from that for TARGET, since TARGET is a
5527 reference to the containing structure. */
5529 static rtx
5530 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5531 enum machine_mode mode, tree exp, enum machine_mode value_mode,
5532 int unsignedp, tree type, int alias_set)
5534 HOST_WIDE_INT width_mask = 0;
5536 if (TREE_CODE (exp) == ERROR_MARK)
5537 return const0_rtx;
5539 /* If we have nothing to store, do nothing unless the expression has
5540 side-effects. */
5541 if (bitsize == 0)
5542 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5543 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5544 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5546 /* If we are storing into an unaligned field of an aligned union that is
5547 in a register, we may have the mode of TARGET being an integer mode but
5548 MODE == BLKmode. In that case, get an aligned object whose size and
5549 alignment are the same as TARGET and store TARGET into it (we can avoid
5550 the store if the field being stored is the entire width of TARGET). Then
5551 call ourselves recursively to store the field into a BLKmode version of
5552 that object. Finally, load from the object into TARGET. This is not
5553 very efficient in general, but should only be slightly more expensive
5554 than the otherwise-required unaligned accesses. Perhaps this can be
5555 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5556 twice, once with emit_move_insn and once via store_field. */
5558 if (mode == BLKmode
5559 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5561 rtx object = assign_temp (type, 0, 1, 1);
5562 rtx blk_object = adjust_address (object, BLKmode, 0);
5564 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5565 emit_move_insn (object, target);
5567 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5568 alias_set);
5570 emit_move_insn (target, object);
5572 /* We want to return the BLKmode version of the data. */
5573 return blk_object;
5576 if (GET_CODE (target) == CONCAT)
5578 /* We're storing into a struct containing a single __complex. */
5580 if (bitpos != 0)
5581 abort ();
5582 return store_expr (exp, target, 0);
5585 /* If the structure is in a register or if the component
5586 is a bit field, we cannot use addressing to access it.
5587 Use bit-field techniques or SUBREG to store in it. */
5589 if (mode == VOIDmode
5590 || (mode != BLKmode && ! direct_store[(int) mode]
5591 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5592 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5593 || GET_CODE (target) == REG
5594 || GET_CODE (target) == SUBREG
5595 /* If the field isn't aligned enough to store as an ordinary memref,
5596 store it as a bit field. */
5597 || (mode != BLKmode
5598 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5599 || bitpos % GET_MODE_ALIGNMENT (mode))
5600 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5601 || (bitpos % BITS_PER_UNIT != 0)))
5602 /* If the RHS and field are a constant size and the size of the
5603 RHS isn't the same size as the bitfield, we must use bitfield
5604 operations. */
5605 || (bitsize >= 0
5606 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5607 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5609 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5611 /* If BITSIZE is narrower than the size of the type of EXP
5612 we will be narrowing TEMP. Normally, what's wanted are the
5613 low-order bits. However, if EXP's type is a record and this is
5614 big-endian machine, we want the upper BITSIZE bits. */
5615 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5616 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5617 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5618 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5619 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5620 - bitsize),
5621 NULL_RTX, 1);
5623 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5624 MODE. */
5625 if (mode != VOIDmode && mode != BLKmode
5626 && mode != TYPE_MODE (TREE_TYPE (exp)))
5627 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5629 /* If the modes of TARGET and TEMP are both BLKmode, both
5630 must be in memory and BITPOS must be aligned on a byte
5631 boundary. If so, we simply do a block copy. */
5632 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5634 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5635 || bitpos % BITS_PER_UNIT != 0)
5636 abort ();
5638 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5639 emit_block_move (target, temp,
5640 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5641 / BITS_PER_UNIT),
5642 BLOCK_OP_NORMAL);
5644 return value_mode == VOIDmode ? const0_rtx : target;
5647 /* Store the value in the bitfield. */
5648 store_bit_field (target, bitsize, bitpos, mode, temp,
5649 int_size_in_bytes (type));
5651 if (value_mode != VOIDmode)
5653 /* The caller wants an rtx for the value.
5654 If possible, avoid refetching from the bitfield itself. */
5655 if (width_mask != 0
5656 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5658 tree count;
5659 enum machine_mode tmode;
5661 tmode = GET_MODE (temp);
5662 if (tmode == VOIDmode)
5663 tmode = value_mode;
5665 if (unsignedp)
5666 return expand_and (tmode, temp,
5667 gen_int_mode (width_mask, tmode),
5668 NULL_RTX);
5670 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5671 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5672 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5675 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5676 NULL_RTX, value_mode, VOIDmode,
5677 int_size_in_bytes (type));
5679 return const0_rtx;
5681 else
5683 rtx addr = XEXP (target, 0);
5684 rtx to_rtx = target;
5686 /* If a value is wanted, it must be the lhs;
5687 so make the address stable for multiple use. */
5689 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5690 && ! CONSTANT_ADDRESS_P (addr)
5691 /* A frame-pointer reference is already stable. */
5692 && ! (GET_CODE (addr) == PLUS
5693 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5694 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5695 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5696 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5698 /* Now build a reference to just the desired component. */
5700 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5702 if (to_rtx == target)
5703 to_rtx = copy_rtx (to_rtx);
5705 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5706 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5707 set_mem_alias_set (to_rtx, alias_set);
5709 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5713 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5714 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5715 codes and find the ultimate containing object, which we return.
5717 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5718 bit position, and *PUNSIGNEDP to the signedness of the field.
5719 If the position of the field is variable, we store a tree
5720 giving the variable offset (in units) in *POFFSET.
5721 This offset is in addition to the bit position.
5722 If the position is not variable, we store 0 in *POFFSET.
5724 If any of the extraction expressions is volatile,
5725 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5727 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5728 is a mode that can be used to access the field. In that case, *PBITSIZE
5729 is redundant.
5731 If the field describes a variable-sized object, *PMODE is set to
5732 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5733 this case, but the address of the object can be found. */
5735 tree
5736 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5737 HOST_WIDE_INT *pbitpos, tree *poffset,
5738 enum machine_mode *pmode, int *punsignedp,
5739 int *pvolatilep)
5741 tree size_tree = 0;
5742 enum machine_mode mode = VOIDmode;
5743 tree offset = size_zero_node;
5744 tree bit_offset = bitsize_zero_node;
5745 tree placeholder_ptr = 0;
5746 tree tem;
5748 /* First get the mode, signedness, and size. We do this from just the
5749 outermost expression. */
5750 if (TREE_CODE (exp) == COMPONENT_REF)
5752 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5753 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5754 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5756 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5758 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5760 size_tree = TREE_OPERAND (exp, 1);
5761 *punsignedp = TREE_UNSIGNED (exp);
5763 else
5765 mode = TYPE_MODE (TREE_TYPE (exp));
5766 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5768 if (mode == BLKmode)
5769 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5770 else
5771 *pbitsize = GET_MODE_BITSIZE (mode);
5774 if (size_tree != 0)
5776 if (! host_integerp (size_tree, 1))
5777 mode = BLKmode, *pbitsize = -1;
5778 else
5779 *pbitsize = tree_low_cst (size_tree, 1);
5782 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5783 and find the ultimate containing object. */
5784 while (1)
5786 if (TREE_CODE (exp) == BIT_FIELD_REF)
5787 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5788 else if (TREE_CODE (exp) == COMPONENT_REF)
5790 tree field = TREE_OPERAND (exp, 1);
5791 tree this_offset = DECL_FIELD_OFFSET (field);
5793 /* If this field hasn't been filled in yet, don't go
5794 past it. This should only happen when folding expressions
5795 made during type construction. */
5796 if (this_offset == 0)
5797 break;
5798 else if (CONTAINS_PLACEHOLDER_P (this_offset))
5799 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5801 offset = size_binop (PLUS_EXPR, offset, this_offset);
5802 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5803 DECL_FIELD_BIT_OFFSET (field));
5805 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5808 else if (TREE_CODE (exp) == ARRAY_REF
5809 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5811 tree index = TREE_OPERAND (exp, 1);
5812 tree array = TREE_OPERAND (exp, 0);
5813 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5814 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5815 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5817 /* We assume all arrays have sizes that are a multiple of a byte.
5818 First subtract the lower bound, if any, in the type of the
5819 index, then convert to sizetype and multiply by the size of the
5820 array element. */
5821 if (low_bound != 0 && ! integer_zerop (low_bound))
5822 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5823 index, low_bound));
5825 /* If the index has a self-referential type, pass it to a
5826 WITH_RECORD_EXPR; if the component size is, pass our
5827 component to one. */
5828 if (CONTAINS_PLACEHOLDER_P (index))
5829 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5830 if (CONTAINS_PLACEHOLDER_P (unit_size))
5831 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5833 offset = size_binop (PLUS_EXPR, offset,
5834 size_binop (MULT_EXPR,
5835 convert (sizetype, index),
5836 unit_size));
5839 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5841 tree new = find_placeholder (exp, &placeholder_ptr);
5843 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5844 We might have been called from tree optimization where we
5845 haven't set up an object yet. */
5846 if (new == 0)
5847 break;
5848 else
5849 exp = new;
5851 continue;
5854 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5855 conversions that don't change the mode, and all view conversions
5856 except those that need to "step up" the alignment. */
5857 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5858 && ! (TREE_CODE (exp) == VIEW_CONVERT_EXPR
5859 && ! ((TYPE_ALIGN (TREE_TYPE (exp))
5860 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5861 && STRICT_ALIGNMENT
5862 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5863 < BIGGEST_ALIGNMENT)
5864 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5865 || TYPE_ALIGN_OK (TREE_TYPE
5866 (TREE_OPERAND (exp, 0))))))
5867 && ! ((TREE_CODE (exp) == NOP_EXPR
5868 || TREE_CODE (exp) == CONVERT_EXPR)
5869 && (TYPE_MODE (TREE_TYPE (exp))
5870 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5871 break;
5873 /* If any reference in the chain is volatile, the effect is volatile. */
5874 if (TREE_THIS_VOLATILE (exp))
5875 *pvolatilep = 1;
5877 exp = TREE_OPERAND (exp, 0);
5880 /* If OFFSET is constant, see if we can return the whole thing as a
5881 constant bit position. Otherwise, split it up. */
5882 if (host_integerp (offset, 0)
5883 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5884 bitsize_unit_node))
5885 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5886 && host_integerp (tem, 0))
5887 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5888 else
5889 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5891 *pmode = mode;
5892 return exp;
5895 /* Return 1 if T is an expression that get_inner_reference handles. */
5898 handled_component_p (tree t)
5900 switch (TREE_CODE (t))
5902 case BIT_FIELD_REF:
5903 case COMPONENT_REF:
5904 case ARRAY_REF:
5905 case ARRAY_RANGE_REF:
5906 case NON_LVALUE_EXPR:
5907 case VIEW_CONVERT_EXPR:
5908 return 1;
5910 /* ??? Sure they are handled, but get_inner_reference may return
5911 a different PBITSIZE, depending upon whether the expression is
5912 wrapped up in a NOP_EXPR or not, e.g. for bitfields. */
5913 case NOP_EXPR:
5914 case CONVERT_EXPR:
5915 return (TYPE_MODE (TREE_TYPE (t))
5916 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5918 default:
5919 return 0;
5923 /* Given an rtx VALUE that may contain additions and multiplications, return
5924 an equivalent value that just refers to a register, memory, or constant.
5925 This is done by generating instructions to perform the arithmetic and
5926 returning a pseudo-register containing the value.
5928 The returned value may be a REG, SUBREG, MEM or constant. */
5931 force_operand (rtx value, rtx target)
5933 rtx op1, op2;
5934 /* Use subtarget as the target for operand 0 of a binary operation. */
5935 rtx subtarget = get_subtarget (target);
5936 enum rtx_code code = GET_CODE (value);
5938 /* Check for a PIC address load. */
5939 if ((code == PLUS || code == MINUS)
5940 && XEXP (value, 0) == pic_offset_table_rtx
5941 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5942 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5943 || GET_CODE (XEXP (value, 1)) == CONST))
5945 if (!subtarget)
5946 subtarget = gen_reg_rtx (GET_MODE (value));
5947 emit_move_insn (subtarget, value);
5948 return subtarget;
5951 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5953 if (!target)
5954 target = gen_reg_rtx (GET_MODE (value));
5955 convert_move (target, force_operand (XEXP (value, 0), NULL),
5956 code == ZERO_EXTEND);
5957 return target;
5960 if (GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c')
5962 op2 = XEXP (value, 1);
5963 if (!CONSTANT_P (op2) && !(GET_CODE (op2) == REG && op2 != subtarget))
5964 subtarget = 0;
5965 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5967 code = PLUS;
5968 op2 = negate_rtx (GET_MODE (value), op2);
5971 /* Check for an addition with OP2 a constant integer and our first
5972 operand a PLUS of a virtual register and something else. In that
5973 case, we want to emit the sum of the virtual register and the
5974 constant first and then add the other value. This allows virtual
5975 register instantiation to simply modify the constant rather than
5976 creating another one around this addition. */
5977 if (code == PLUS && GET_CODE (op2) == CONST_INT
5978 && GET_CODE (XEXP (value, 0)) == PLUS
5979 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5980 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5981 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5983 rtx temp = expand_simple_binop (GET_MODE (value), code,
5984 XEXP (XEXP (value, 0), 0), op2,
5985 subtarget, 0, OPTAB_LIB_WIDEN);
5986 return expand_simple_binop (GET_MODE (value), code, temp,
5987 force_operand (XEXP (XEXP (value,
5988 0), 1), 0),
5989 target, 0, OPTAB_LIB_WIDEN);
5992 op1 = force_operand (XEXP (value, 0), subtarget);
5993 op2 = force_operand (op2, NULL_RTX);
5994 switch (code)
5996 case MULT:
5997 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5998 case DIV:
5999 if (!INTEGRAL_MODE_P (GET_MODE (value)))
6000 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6001 target, 1, OPTAB_LIB_WIDEN);
6002 else
6003 return expand_divmod (0,
6004 FLOAT_MODE_P (GET_MODE (value))
6005 ? RDIV_EXPR : TRUNC_DIV_EXPR,
6006 GET_MODE (value), op1, op2, target, 0);
6007 break;
6008 case MOD:
6009 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6010 target, 0);
6011 break;
6012 case UDIV:
6013 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
6014 target, 1);
6015 break;
6016 case UMOD:
6017 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6018 target, 1);
6019 break;
6020 case ASHIFTRT:
6021 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6022 target, 0, OPTAB_LIB_WIDEN);
6023 break;
6024 default:
6025 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6026 target, 1, OPTAB_LIB_WIDEN);
6029 if (GET_RTX_CLASS (code) == '1')
6031 op1 = force_operand (XEXP (value, 0), NULL_RTX);
6032 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
6035 #ifdef INSN_SCHEDULING
6036 /* On machines that have insn scheduling, we want all memory reference to be
6037 explicit, so we need to deal with such paradoxical SUBREGs. */
6038 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
6039 && (GET_MODE_SIZE (GET_MODE (value))
6040 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
6041 value
6042 = simplify_gen_subreg (GET_MODE (value),
6043 force_reg (GET_MODE (SUBREG_REG (value)),
6044 force_operand (SUBREG_REG (value),
6045 NULL_RTX)),
6046 GET_MODE (SUBREG_REG (value)),
6047 SUBREG_BYTE (value));
6048 #endif
6050 return value;
6053 /* Subroutine of expand_expr: return nonzero iff there is no way that
6054 EXP can reference X, which is being modified. TOP_P is nonzero if this
6055 call is going to be used to determine whether we need a temporary
6056 for EXP, as opposed to a recursive call to this function.
6058 It is always safe for this routine to return zero since it merely
6059 searches for optimization opportunities. */
6062 safe_from_p (rtx x, tree exp, int top_p)
6064 rtx exp_rtl = 0;
6065 int i, nops;
6066 static tree save_expr_list;
6068 if (x == 0
6069 /* If EXP has varying size, we MUST use a target since we currently
6070 have no way of allocating temporaries of variable size
6071 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6072 So we assume here that something at a higher level has prevented a
6073 clash. This is somewhat bogus, but the best we can do. Only
6074 do this when X is BLKmode and when we are at the top level. */
6075 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6076 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
6077 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
6078 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
6079 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
6080 != INTEGER_CST)
6081 && GET_MODE (x) == BLKmode)
6082 /* If X is in the outgoing argument area, it is always safe. */
6083 || (GET_CODE (x) == MEM
6084 && (XEXP (x, 0) == virtual_outgoing_args_rtx
6085 || (GET_CODE (XEXP (x, 0)) == PLUS
6086 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
6087 return 1;
6089 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6090 find the underlying pseudo. */
6091 if (GET_CODE (x) == SUBREG)
6093 x = SUBREG_REG (x);
6094 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6095 return 0;
6098 /* A SAVE_EXPR might appear many times in the expression passed to the
6099 top-level safe_from_p call, and if it has a complex subexpression,
6100 examining it multiple times could result in a combinatorial explosion.
6101 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
6102 with optimization took about 28 minutes to compile -- even though it was
6103 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
6104 and turn that off when we are done. We keep a list of the SAVE_EXPRs
6105 we have processed. Note that the only test of top_p was above. */
6107 if (top_p)
6109 int rtn;
6110 tree t;
6112 save_expr_list = 0;
6114 rtn = safe_from_p (x, exp, 0);
6116 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
6117 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
6119 return rtn;
6122 /* Now look at our tree code and possibly recurse. */
6123 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6125 case 'd':
6126 exp_rtl = DECL_RTL_IF_SET (exp);
6127 break;
6129 case 'c':
6130 return 1;
6132 case 'x':
6133 if (TREE_CODE (exp) == TREE_LIST)
6135 while (1)
6137 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
6138 return 0;
6139 exp = TREE_CHAIN (exp);
6140 if (!exp)
6141 return 1;
6142 if (TREE_CODE (exp) != TREE_LIST)
6143 return safe_from_p (x, exp, 0);
6146 else if (TREE_CODE (exp) == ERROR_MARK)
6147 return 1; /* An already-visited SAVE_EXPR? */
6148 else
6149 return 0;
6151 case '2':
6152 case '<':
6153 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6154 return 0;
6155 /* FALLTHRU */
6157 case '1':
6158 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6160 case 'e':
6161 case 'r':
6162 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6163 the expression. If it is set, we conflict iff we are that rtx or
6164 both are in memory. Otherwise, we check all operands of the
6165 expression recursively. */
6167 switch (TREE_CODE (exp))
6169 case ADDR_EXPR:
6170 /* If the operand is static or we are static, we can't conflict.
6171 Likewise if we don't conflict with the operand at all. */
6172 if (staticp (TREE_OPERAND (exp, 0))
6173 || TREE_STATIC (exp)
6174 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6175 return 1;
6177 /* Otherwise, the only way this can conflict is if we are taking
6178 the address of a DECL a that address if part of X, which is
6179 very rare. */
6180 exp = TREE_OPERAND (exp, 0);
6181 if (DECL_P (exp))
6183 if (!DECL_RTL_SET_P (exp)
6184 || GET_CODE (DECL_RTL (exp)) != MEM)
6185 return 0;
6186 else
6187 exp_rtl = XEXP (DECL_RTL (exp), 0);
6189 break;
6191 case INDIRECT_REF:
6192 if (GET_CODE (x) == MEM
6193 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6194 get_alias_set (exp)))
6195 return 0;
6196 break;
6198 case CALL_EXPR:
6199 /* Assume that the call will clobber all hard registers and
6200 all of memory. */
6201 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6202 || GET_CODE (x) == MEM)
6203 return 0;
6204 break;
6206 case RTL_EXPR:
6207 /* If a sequence exists, we would have to scan every instruction
6208 in the sequence to see if it was safe. This is probably not
6209 worthwhile. */
6210 if (RTL_EXPR_SEQUENCE (exp))
6211 return 0;
6213 exp_rtl = RTL_EXPR_RTL (exp);
6214 break;
6216 case WITH_CLEANUP_EXPR:
6217 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
6218 break;
6220 case CLEANUP_POINT_EXPR:
6221 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6223 case SAVE_EXPR:
6224 exp_rtl = SAVE_EXPR_RTL (exp);
6225 if (exp_rtl)
6226 break;
6228 /* If we've already scanned this, don't do it again. Otherwise,
6229 show we've scanned it and record for clearing the flag if we're
6230 going on. */
6231 if (TREE_PRIVATE (exp))
6232 return 1;
6234 TREE_PRIVATE (exp) = 1;
6235 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6237 TREE_PRIVATE (exp) = 0;
6238 return 0;
6241 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
6242 return 1;
6244 case BIND_EXPR:
6245 /* The only operand we look at is operand 1. The rest aren't
6246 part of the expression. */
6247 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
6249 case METHOD_CALL_EXPR:
6250 /* This takes an rtx argument, but shouldn't appear here. */
6251 abort ();
6253 default:
6254 break;
6257 /* If we have an rtx, we do not need to scan our operands. */
6258 if (exp_rtl)
6259 break;
6261 nops = first_rtl_op (TREE_CODE (exp));
6262 for (i = 0; i < nops; i++)
6263 if (TREE_OPERAND (exp, i) != 0
6264 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6265 return 0;
6267 /* If this is a language-specific tree code, it may require
6268 special handling. */
6269 if ((unsigned int) TREE_CODE (exp)
6270 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
6271 && !(*lang_hooks.safe_from_p) (x, exp))
6272 return 0;
6275 /* If we have an rtl, find any enclosed object. Then see if we conflict
6276 with it. */
6277 if (exp_rtl)
6279 if (GET_CODE (exp_rtl) == SUBREG)
6281 exp_rtl = SUBREG_REG (exp_rtl);
6282 if (GET_CODE (exp_rtl) == REG
6283 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6284 return 0;
6287 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6288 are memory and they conflict. */
6289 return ! (rtx_equal_p (x, exp_rtl)
6290 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
6291 && true_dependence (exp_rtl, VOIDmode, x,
6292 rtx_addr_varies_p)));
6295 /* If we reach here, it is safe. */
6296 return 1;
6299 /* Subroutine of expand_expr: return rtx if EXP is a
6300 variable or parameter; else return 0. */
6302 static rtx
6303 var_rtx (tree exp)
6305 STRIP_NOPS (exp);
6306 switch (TREE_CODE (exp))
6308 case PARM_DECL:
6309 case VAR_DECL:
6310 return DECL_RTL (exp);
6311 default:
6312 return 0;
6316 #ifdef MAX_INTEGER_COMPUTATION_MODE
6318 void
6319 check_max_integer_computation_mode (tree exp)
6321 enum tree_code code;
6322 enum machine_mode mode;
6324 /* Strip any NOPs that don't change the mode. */
6325 STRIP_NOPS (exp);
6326 code = TREE_CODE (exp);
6328 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
6329 if (code == NOP_EXPR
6330 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
6331 return;
6333 /* First check the type of the overall operation. We need only look at
6334 unary, binary and relational operations. */
6335 if (TREE_CODE_CLASS (code) == '1'
6336 || TREE_CODE_CLASS (code) == '2'
6337 || TREE_CODE_CLASS (code) == '<')
6339 mode = TYPE_MODE (TREE_TYPE (exp));
6340 if (GET_MODE_CLASS (mode) == MODE_INT
6341 && mode > MAX_INTEGER_COMPUTATION_MODE)
6342 internal_error ("unsupported wide integer operation");
6345 /* Check operand of a unary op. */
6346 if (TREE_CODE_CLASS (code) == '1')
6348 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6349 if (GET_MODE_CLASS (mode) == MODE_INT
6350 && mode > MAX_INTEGER_COMPUTATION_MODE)
6351 internal_error ("unsupported wide integer operation");
6354 /* Check operands of a binary/comparison op. */
6355 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
6357 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6358 if (GET_MODE_CLASS (mode) == MODE_INT
6359 && mode > MAX_INTEGER_COMPUTATION_MODE)
6360 internal_error ("unsupported wide integer operation");
6362 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
6363 if (GET_MODE_CLASS (mode) == MODE_INT
6364 && mode > MAX_INTEGER_COMPUTATION_MODE)
6365 internal_error ("unsupported wide integer operation");
6368 #endif
6370 /* Return the highest power of two that EXP is known to be a multiple of.
6371 This is used in updating alignment of MEMs in array references. */
6373 static unsigned HOST_WIDE_INT
6374 highest_pow2_factor (tree exp)
6376 unsigned HOST_WIDE_INT c0, c1;
6378 switch (TREE_CODE (exp))
6380 case INTEGER_CST:
6381 /* We can find the lowest bit that's a one. If the low
6382 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6383 We need to handle this case since we can find it in a COND_EXPR,
6384 a MIN_EXPR, or a MAX_EXPR. If the constant overlows, we have an
6385 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6386 later ICE. */
6387 if (TREE_CONSTANT_OVERFLOW (exp))
6388 return BIGGEST_ALIGNMENT;
6389 else
6391 /* Note: tree_low_cst is intentionally not used here,
6392 we don't care about the upper bits. */
6393 c0 = TREE_INT_CST_LOW (exp);
6394 c0 &= -c0;
6395 return c0 ? c0 : BIGGEST_ALIGNMENT;
6397 break;
6399 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6400 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6401 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6402 return MIN (c0, c1);
6404 case MULT_EXPR:
6405 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6406 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6407 return c0 * c1;
6409 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6410 case CEIL_DIV_EXPR:
6411 if (integer_pow2p (TREE_OPERAND (exp, 1))
6412 && host_integerp (TREE_OPERAND (exp, 1), 1))
6414 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6415 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6416 return MAX (1, c0 / c1);
6418 break;
6420 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6421 case SAVE_EXPR: case WITH_RECORD_EXPR:
6422 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6424 case COMPOUND_EXPR:
6425 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6427 case COND_EXPR:
6428 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6429 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6430 return MIN (c0, c1);
6432 default:
6433 break;
6436 return 1;
6439 /* Similar, except that it is known that the expression must be a multiple
6440 of the alignment of TYPE. */
6442 static unsigned HOST_WIDE_INT
6443 highest_pow2_factor_for_type (tree type, tree exp)
6445 unsigned HOST_WIDE_INT type_align, factor;
6447 factor = highest_pow2_factor (exp);
6448 type_align = TYPE_ALIGN (type) / BITS_PER_UNIT;
6449 return MAX (factor, type_align);
6452 /* Return an object on the placeholder list that matches EXP, a
6453 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6454 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6455 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6456 is a location which initially points to a starting location in the
6457 placeholder list (zero means start of the list) and where a pointer into
6458 the placeholder list at which the object is found is placed. */
6460 tree
6461 find_placeholder (tree exp, tree *plist)
6463 tree type = TREE_TYPE (exp);
6464 tree placeholder_expr;
6466 for (placeholder_expr
6467 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
6468 placeholder_expr != 0;
6469 placeholder_expr = TREE_CHAIN (placeholder_expr))
6471 tree need_type = TYPE_MAIN_VARIANT (type);
6472 tree elt;
6474 /* Find the outermost reference that is of the type we want. If none,
6475 see if any object has a type that is a pointer to the type we
6476 want. */
6477 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6478 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
6479 || TREE_CODE (elt) == COND_EXPR)
6480 ? TREE_OPERAND (elt, 1)
6481 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6482 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6483 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6484 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6485 ? TREE_OPERAND (elt, 0) : 0))
6486 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6488 if (plist)
6489 *plist = placeholder_expr;
6490 return elt;
6493 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6495 = ((TREE_CODE (elt) == COMPOUND_EXPR
6496 || TREE_CODE (elt) == COND_EXPR)
6497 ? TREE_OPERAND (elt, 1)
6498 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6499 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6500 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6501 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6502 ? TREE_OPERAND (elt, 0) : 0))
6503 if (POINTER_TYPE_P (TREE_TYPE (elt))
6504 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6505 == need_type))
6507 if (plist)
6508 *plist = placeholder_expr;
6509 return build1 (INDIRECT_REF, need_type, elt);
6513 return 0;
6516 /* expand_expr: generate code for computing expression EXP.
6517 An rtx for the computed value is returned. The value is never null.
6518 In the case of a void EXP, const0_rtx is returned.
6520 The value may be stored in TARGET if TARGET is nonzero.
6521 TARGET is just a suggestion; callers must assume that
6522 the rtx returned may not be the same as TARGET.
6524 If TARGET is CONST0_RTX, it means that the value will be ignored.
6526 If TMODE is not VOIDmode, it suggests generating the
6527 result in mode TMODE. But this is done only when convenient.
6528 Otherwise, TMODE is ignored and the value generated in its natural mode.
6529 TMODE is just a suggestion; callers must assume that
6530 the rtx returned may not have mode TMODE.
6532 Note that TARGET may have neither TMODE nor MODE. In that case, it
6533 probably will not be used.
6535 If MODIFIER is EXPAND_SUM then when EXP is an addition
6536 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6537 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6538 products as above, or REG or MEM, or constant.
6539 Ordinarily in such cases we would output mul or add instructions
6540 and then return a pseudo reg containing the sum.
6542 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6543 it also marks a label as absolutely required (it can't be dead).
6544 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6545 This is used for outputting expressions used in initializers.
6547 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6548 with a constant address even if that address is not normally legitimate.
6549 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6551 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6552 a call parameter. Such targets require special care as we haven't yet
6553 marked TARGET so that it's safe from being trashed by libcalls. We
6554 don't want to use TARGET for anything but the final result;
6555 Intermediate values must go elsewhere. Additionally, calls to
6556 emit_block_move will be flagged with BLOCK_OP_CALL_PARM. */
6559 expand_expr (tree exp, rtx target, enum machine_mode tmode, enum expand_modifier modifier)
6561 rtx op0, op1, temp;
6562 tree type = TREE_TYPE (exp);
6563 int unsignedp = TREE_UNSIGNED (type);
6564 enum machine_mode mode;
6565 enum tree_code code = TREE_CODE (exp);
6566 optab this_optab;
6567 rtx subtarget, original_target;
6568 int ignore;
6569 tree context;
6571 /* Handle ERROR_MARK before anybody tries to access its type. */
6572 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6574 op0 = CONST0_RTX (tmode);
6575 if (op0 != 0)
6576 return op0;
6577 return const0_rtx;
6580 mode = TYPE_MODE (type);
6581 /* Use subtarget as the target for operand 0 of a binary operation. */
6582 subtarget = get_subtarget (target);
6583 original_target = target;
6584 ignore = (target == const0_rtx
6585 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6586 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6587 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
6588 && TREE_CODE (type) == VOID_TYPE));
6590 /* If we are going to ignore this result, we need only do something
6591 if there is a side-effect somewhere in the expression. If there
6592 is, short-circuit the most common cases here. Note that we must
6593 not call expand_expr with anything but const0_rtx in case this
6594 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6596 if (ignore)
6598 if (! TREE_SIDE_EFFECTS (exp))
6599 return const0_rtx;
6601 /* Ensure we reference a volatile object even if value is ignored, but
6602 don't do this if all we are doing is taking its address. */
6603 if (TREE_THIS_VOLATILE (exp)
6604 && TREE_CODE (exp) != FUNCTION_DECL
6605 && mode != VOIDmode && mode != BLKmode
6606 && modifier != EXPAND_CONST_ADDRESS)
6608 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6609 if (GET_CODE (temp) == MEM)
6610 temp = copy_to_reg (temp);
6611 return const0_rtx;
6614 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6615 || code == INDIRECT_REF || code == BUFFER_REF)
6616 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6617 modifier);
6619 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6620 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6622 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6623 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6624 return const0_rtx;
6626 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6627 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6628 /* If the second operand has no side effects, just evaluate
6629 the first. */
6630 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6631 modifier);
6632 else if (code == BIT_FIELD_REF)
6634 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6635 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6636 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6637 return const0_rtx;
6640 target = 0;
6643 #ifdef MAX_INTEGER_COMPUTATION_MODE
6644 /* Only check stuff here if the mode we want is different from the mode
6645 of the expression; if it's the same, check_max_integer_computation_mode
6646 will handle it. Do we really need to check this stuff at all? */
6648 if (target
6649 && GET_MODE (target) != mode
6650 && TREE_CODE (exp) != INTEGER_CST
6651 && TREE_CODE (exp) != PARM_DECL
6652 && TREE_CODE (exp) != ARRAY_REF
6653 && TREE_CODE (exp) != ARRAY_RANGE_REF
6654 && TREE_CODE (exp) != COMPONENT_REF
6655 && TREE_CODE (exp) != BIT_FIELD_REF
6656 && TREE_CODE (exp) != INDIRECT_REF
6657 && TREE_CODE (exp) != CALL_EXPR
6658 && TREE_CODE (exp) != VAR_DECL
6659 && TREE_CODE (exp) != RTL_EXPR)
6661 enum machine_mode mode = GET_MODE (target);
6663 if (GET_MODE_CLASS (mode) == MODE_INT
6664 && mode > MAX_INTEGER_COMPUTATION_MODE)
6665 internal_error ("unsupported wide integer operation");
6668 if (tmode != mode
6669 && TREE_CODE (exp) != INTEGER_CST
6670 && TREE_CODE (exp) != PARM_DECL
6671 && TREE_CODE (exp) != ARRAY_REF
6672 && TREE_CODE (exp) != ARRAY_RANGE_REF
6673 && TREE_CODE (exp) != COMPONENT_REF
6674 && TREE_CODE (exp) != BIT_FIELD_REF
6675 && TREE_CODE (exp) != INDIRECT_REF
6676 && TREE_CODE (exp) != VAR_DECL
6677 && TREE_CODE (exp) != CALL_EXPR
6678 && TREE_CODE (exp) != RTL_EXPR
6679 && GET_MODE_CLASS (tmode) == MODE_INT
6680 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6681 internal_error ("unsupported wide integer operation");
6683 check_max_integer_computation_mode (exp);
6684 #endif
6686 /* If will do cse, generate all results into pseudo registers
6687 since 1) that allows cse to find more things
6688 and 2) otherwise cse could produce an insn the machine
6689 cannot support. An exception is a CONSTRUCTOR into a multi-word
6690 MEM: that's much more likely to be most efficient into the MEM.
6691 Another is a CALL_EXPR which must return in memory. */
6693 if (! cse_not_expected && mode != BLKmode && target
6694 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6695 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6696 && ! (code == CALL_EXPR && aggregate_value_p (exp)))
6697 target = 0;
6699 switch (code)
6701 case LABEL_DECL:
6703 tree function = decl_function_context (exp);
6704 /* Labels in containing functions, or labels used from initializers,
6705 must be forced. */
6706 if (modifier == EXPAND_INITIALIZER
6707 || (function != current_function_decl
6708 && function != inline_function_decl
6709 && function != 0))
6710 temp = force_label_rtx (exp);
6711 else
6712 temp = label_rtx (exp);
6714 temp = gen_rtx_MEM (FUNCTION_MODE, gen_rtx_LABEL_REF (Pmode, temp));
6715 if (function != current_function_decl
6716 && function != inline_function_decl && function != 0)
6717 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6718 return temp;
6721 case PARM_DECL:
6722 if (!DECL_RTL_SET_P (exp))
6724 error_with_decl (exp, "prior parameter's size depends on `%s'");
6725 return CONST0_RTX (mode);
6728 /* ... fall through ... */
6730 case VAR_DECL:
6731 /* If a static var's type was incomplete when the decl was written,
6732 but the type is complete now, lay out the decl now. */
6733 if (DECL_SIZE (exp) == 0
6734 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6735 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6736 layout_decl (exp, 0);
6738 /* ... fall through ... */
6740 case FUNCTION_DECL:
6741 case RESULT_DECL:
6742 if (DECL_RTL (exp) == 0)
6743 abort ();
6745 /* Ensure variable marked as used even if it doesn't go through
6746 a parser. If it hasn't be used yet, write out an external
6747 definition. */
6748 if (! TREE_USED (exp))
6750 assemble_external (exp);
6751 TREE_USED (exp) = 1;
6754 /* Show we haven't gotten RTL for this yet. */
6755 temp = 0;
6757 /* Handle variables inherited from containing functions. */
6758 context = decl_function_context (exp);
6760 /* We treat inline_function_decl as an alias for the current function
6761 because that is the inline function whose vars, types, etc.
6762 are being merged into the current function.
6763 See expand_inline_function. */
6765 if (context != 0 && context != current_function_decl
6766 && context != inline_function_decl
6767 /* If var is static, we don't need a static chain to access it. */
6768 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6769 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6771 rtx addr;
6773 /* Mark as non-local and addressable. */
6774 DECL_NONLOCAL (exp) = 1;
6775 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6776 abort ();
6777 (*lang_hooks.mark_addressable) (exp);
6778 if (GET_CODE (DECL_RTL (exp)) != MEM)
6779 abort ();
6780 addr = XEXP (DECL_RTL (exp), 0);
6781 if (GET_CODE (addr) == MEM)
6782 addr
6783 = replace_equiv_address (addr,
6784 fix_lexical_addr (XEXP (addr, 0), exp));
6785 else
6786 addr = fix_lexical_addr (addr, exp);
6788 temp = replace_equiv_address (DECL_RTL (exp), addr);
6791 /* This is the case of an array whose size is to be determined
6792 from its initializer, while the initializer is still being parsed.
6793 See expand_decl. */
6795 else if (GET_CODE (DECL_RTL (exp)) == MEM
6796 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6797 temp = validize_mem (DECL_RTL (exp));
6799 /* If DECL_RTL is memory, we are in the normal case and either
6800 the address is not valid or it is not a register and -fforce-addr
6801 is specified, get the address into a register. */
6803 else if (GET_CODE (DECL_RTL (exp)) == MEM
6804 && modifier != EXPAND_CONST_ADDRESS
6805 && modifier != EXPAND_SUM
6806 && modifier != EXPAND_INITIALIZER
6807 && (! memory_address_p (DECL_MODE (exp),
6808 XEXP (DECL_RTL (exp), 0))
6809 || (flag_force_addr
6810 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6811 temp = replace_equiv_address (DECL_RTL (exp),
6812 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6814 /* If we got something, return it. But first, set the alignment
6815 if the address is a register. */
6816 if (temp != 0)
6818 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6819 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6821 return temp;
6824 /* If the mode of DECL_RTL does not match that of the decl, it
6825 must be a promoted value. We return a SUBREG of the wanted mode,
6826 but mark it so that we know that it was already extended. */
6828 if (GET_CODE (DECL_RTL (exp)) == REG
6829 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6831 /* Get the signedness used for this variable. Ensure we get the
6832 same mode we got when the variable was declared. */
6833 if (GET_MODE (DECL_RTL (exp))
6834 != promote_mode (type, DECL_MODE (exp), &unsignedp,
6835 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6836 abort ();
6838 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6839 SUBREG_PROMOTED_VAR_P (temp) = 1;
6840 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6841 return temp;
6844 return DECL_RTL (exp);
6846 case INTEGER_CST:
6847 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6848 TREE_INT_CST_HIGH (exp), mode);
6850 /* ??? If overflow is set, fold will have done an incomplete job,
6851 which can result in (plus xx (const_int 0)), which can get
6852 simplified by validate_replace_rtx during virtual register
6853 instantiation, which can result in unrecognizable insns.
6854 Avoid this by forcing all overflows into registers. */
6855 if (TREE_CONSTANT_OVERFLOW (exp)
6856 && modifier != EXPAND_INITIALIZER)
6857 temp = force_reg (mode, temp);
6859 return temp;
6861 case VECTOR_CST:
6862 return const_vector_from_tree (exp);
6864 case CONST_DECL:
6865 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6867 case REAL_CST:
6868 /* If optimized, generate immediate CONST_DOUBLE
6869 which will be turned into memory by reload if necessary.
6871 We used to force a register so that loop.c could see it. But
6872 this does not allow gen_* patterns to perform optimizations with
6873 the constants. It also produces two insns in cases like "x = 1.0;".
6874 On most machines, floating-point constants are not permitted in
6875 many insns, so we'd end up copying it to a register in any case.
6877 Now, we do the copying in expand_binop, if appropriate. */
6878 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6879 TYPE_MODE (TREE_TYPE (exp)));
6881 case COMPLEX_CST:
6882 /* Handle evaluating a complex constant in a CONCAT target. */
6883 if (original_target && GET_CODE (original_target) == CONCAT)
6885 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6886 rtx rtarg, itarg;
6888 rtarg = XEXP (original_target, 0);
6889 itarg = XEXP (original_target, 1);
6891 /* Move the real and imaginary parts separately. */
6892 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
6893 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
6895 if (op0 != rtarg)
6896 emit_move_insn (rtarg, op0);
6897 if (op1 != itarg)
6898 emit_move_insn (itarg, op1);
6900 return original_target;
6903 /* ... fall through ... */
6905 case STRING_CST:
6906 temp = output_constant_def (exp, 1);
6908 /* temp contains a constant address.
6909 On RISC machines where a constant address isn't valid,
6910 make some insns to get that address into a register. */
6911 if (modifier != EXPAND_CONST_ADDRESS
6912 && modifier != EXPAND_INITIALIZER
6913 && modifier != EXPAND_SUM
6914 && (! memory_address_p (mode, XEXP (temp, 0))
6915 || flag_force_addr))
6916 return replace_equiv_address (temp,
6917 copy_rtx (XEXP (temp, 0)));
6918 return temp;
6920 case EXPR_WITH_FILE_LOCATION:
6922 rtx to_return;
6923 location_t saved_loc = input_location;
6924 input_filename = EXPR_WFL_FILENAME (exp);
6925 input_line = EXPR_WFL_LINENO (exp);
6926 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6927 emit_line_note (input_location);
6928 /* Possibly avoid switching back and forth here. */
6929 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6930 input_location = saved_loc;
6931 return to_return;
6934 case SAVE_EXPR:
6935 context = decl_function_context (exp);
6937 /* If this SAVE_EXPR was at global context, assume we are an
6938 initialization function and move it into our context. */
6939 if (context == 0)
6940 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6942 /* We treat inline_function_decl as an alias for the current function
6943 because that is the inline function whose vars, types, etc.
6944 are being merged into the current function.
6945 See expand_inline_function. */
6946 if (context == current_function_decl || context == inline_function_decl)
6947 context = 0;
6949 /* If this is non-local, handle it. */
6950 if (context)
6952 /* The following call just exists to abort if the context is
6953 not of a containing function. */
6954 find_function_data (context);
6956 temp = SAVE_EXPR_RTL (exp);
6957 if (temp && GET_CODE (temp) == REG)
6959 put_var_into_stack (exp, /*rescan=*/true);
6960 temp = SAVE_EXPR_RTL (exp);
6962 if (temp == 0 || GET_CODE (temp) != MEM)
6963 abort ();
6964 return
6965 replace_equiv_address (temp,
6966 fix_lexical_addr (XEXP (temp, 0), exp));
6968 if (SAVE_EXPR_RTL (exp) == 0)
6970 if (mode == VOIDmode)
6971 temp = const0_rtx;
6972 else
6973 temp = assign_temp (build_qualified_type (type,
6974 (TYPE_QUALS (type)
6975 | TYPE_QUAL_CONST)),
6976 3, 0, 0);
6978 SAVE_EXPR_RTL (exp) = temp;
6979 if (!optimize && GET_CODE (temp) == REG)
6980 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6981 save_expr_regs);
6983 /* If the mode of TEMP does not match that of the expression, it
6984 must be a promoted value. We pass store_expr a SUBREG of the
6985 wanted mode but mark it so that we know that it was already
6986 extended. */
6988 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6990 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6991 promote_mode (type, mode, &unsignedp, 0);
6992 SUBREG_PROMOTED_VAR_P (temp) = 1;
6993 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6996 if (temp == const0_rtx)
6997 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6998 else
6999 store_expr (TREE_OPERAND (exp, 0), temp,
7000 modifier == EXPAND_STACK_PARM ? 2 : 0);
7002 TREE_USED (exp) = 1;
7005 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
7006 must be a promoted value. We return a SUBREG of the wanted mode,
7007 but mark it so that we know that it was already extended. */
7009 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
7010 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
7012 /* Compute the signedness and make the proper SUBREG. */
7013 promote_mode (type, mode, &unsignedp, 0);
7014 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
7015 SUBREG_PROMOTED_VAR_P (temp) = 1;
7016 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
7017 return temp;
7020 return SAVE_EXPR_RTL (exp);
7022 case UNSAVE_EXPR:
7024 rtx temp;
7025 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7026 TREE_OPERAND (exp, 0)
7027 = (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0));
7028 return temp;
7031 case PLACEHOLDER_EXPR:
7033 tree old_list = placeholder_list;
7034 tree placeholder_expr = 0;
7036 exp = find_placeholder (exp, &placeholder_expr);
7037 if (exp == 0)
7038 abort ();
7040 placeholder_list = TREE_CHAIN (placeholder_expr);
7041 temp = expand_expr (exp, original_target, tmode, modifier);
7042 placeholder_list = old_list;
7043 return temp;
7046 case WITH_RECORD_EXPR:
7047 /* Put the object on the placeholder list, expand our first operand,
7048 and pop the list. */
7049 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
7050 placeholder_list);
7051 target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
7052 modifier);
7053 placeholder_list = TREE_CHAIN (placeholder_list);
7054 return target;
7056 case GOTO_EXPR:
7057 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
7058 expand_goto (TREE_OPERAND (exp, 0));
7059 else
7060 expand_computed_goto (TREE_OPERAND (exp, 0));
7061 return const0_rtx;
7063 case EXIT_EXPR:
7064 expand_exit_loop_if_false (NULL,
7065 invert_truthvalue (TREE_OPERAND (exp, 0)));
7066 return const0_rtx;
7068 case LABELED_BLOCK_EXPR:
7069 if (LABELED_BLOCK_BODY (exp))
7070 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
7071 /* Should perhaps use expand_label, but this is simpler and safer. */
7072 do_pending_stack_adjust ();
7073 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
7074 return const0_rtx;
7076 case EXIT_BLOCK_EXPR:
7077 if (EXIT_BLOCK_RETURN (exp))
7078 sorry ("returned value in block_exit_expr");
7079 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
7080 return const0_rtx;
7082 case LOOP_EXPR:
7083 push_temp_slots ();
7084 expand_start_loop (1);
7085 expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
7086 expand_end_loop ();
7087 pop_temp_slots ();
7089 return const0_rtx;
7091 case BIND_EXPR:
7093 tree vars = TREE_OPERAND (exp, 0);
7095 /* Need to open a binding contour here because
7096 if there are any cleanups they must be contained here. */
7097 expand_start_bindings (2);
7099 /* Mark the corresponding BLOCK for output in its proper place. */
7100 if (TREE_OPERAND (exp, 2) != 0
7101 && ! TREE_USED (TREE_OPERAND (exp, 2)))
7102 (*lang_hooks.decls.insert_block) (TREE_OPERAND (exp, 2));
7104 /* If VARS have not yet been expanded, expand them now. */
7105 while (vars)
7107 if (!DECL_RTL_SET_P (vars))
7108 expand_decl (vars);
7109 expand_decl_init (vars);
7110 vars = TREE_CHAIN (vars);
7113 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
7115 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
7117 return temp;
7120 case RTL_EXPR:
7121 if (RTL_EXPR_SEQUENCE (exp))
7123 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
7124 abort ();
7125 emit_insn (RTL_EXPR_SEQUENCE (exp));
7126 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
7128 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
7129 free_temps_for_rtl_expr (exp);
7130 return RTL_EXPR_RTL (exp);
7132 case CONSTRUCTOR:
7133 /* If we don't need the result, just ensure we evaluate any
7134 subexpressions. */
7135 if (ignore)
7137 tree elt;
7139 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
7140 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
7142 return const0_rtx;
7145 /* All elts simple constants => refer to a constant in memory. But
7146 if this is a non-BLKmode mode, let it store a field at a time
7147 since that should make a CONST_INT or CONST_DOUBLE when we
7148 fold. Likewise, if we have a target we can use, it is best to
7149 store directly into the target unless the type is large enough
7150 that memcpy will be used. If we are making an initializer and
7151 all operands are constant, put it in memory as well.
7153 FIXME: Avoid trying to fill vector constructors piece-meal.
7154 Output them with output_constant_def below unless we're sure
7155 they're zeros. This should go away when vector initializers
7156 are treated like VECTOR_CST instead of arrays.
7158 else if ((TREE_STATIC (exp)
7159 && ((mode == BLKmode
7160 && ! (target != 0 && safe_from_p (target, exp, 1)))
7161 || TREE_ADDRESSABLE (exp)
7162 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7163 && (! MOVE_BY_PIECES_P
7164 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7165 TYPE_ALIGN (type)))
7166 && ((TREE_CODE (type) == VECTOR_TYPE
7167 && !is_zeros_p (exp))
7168 || ! mostly_zeros_p (exp)))))
7169 || ((modifier == EXPAND_INITIALIZER
7170 || modifier == EXPAND_CONST_ADDRESS)
7171 && TREE_CONSTANT (exp)))
7173 rtx constructor = output_constant_def (exp, 1);
7175 if (modifier != EXPAND_CONST_ADDRESS
7176 && modifier != EXPAND_INITIALIZER
7177 && modifier != EXPAND_SUM)
7178 constructor = validize_mem (constructor);
7180 return constructor;
7182 else
7184 /* Handle calls that pass values in multiple non-contiguous
7185 locations. The Irix 6 ABI has examples of this. */
7186 if (target == 0 || ! safe_from_p (target, exp, 1)
7187 || GET_CODE (target) == PARALLEL
7188 || modifier == EXPAND_STACK_PARM)
7189 target
7190 = assign_temp (build_qualified_type (type,
7191 (TYPE_QUALS (type)
7192 | (TREE_READONLY (exp)
7193 * TYPE_QUAL_CONST))),
7194 0, TREE_ADDRESSABLE (exp), 1);
7196 store_constructor (exp, target, 0, int_expr_size (exp));
7197 return target;
7200 case INDIRECT_REF:
7202 tree exp1 = TREE_OPERAND (exp, 0);
7203 tree index;
7204 tree string = string_constant (exp1, &index);
7206 /* Try to optimize reads from const strings. */
7207 if (string
7208 && TREE_CODE (string) == STRING_CST
7209 && TREE_CODE (index) == INTEGER_CST
7210 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
7211 && GET_MODE_CLASS (mode) == MODE_INT
7212 && GET_MODE_SIZE (mode) == 1
7213 && modifier != EXPAND_WRITE)
7214 return gen_int_mode (TREE_STRING_POINTER (string)
7215 [TREE_INT_CST_LOW (index)], mode);
7217 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
7218 op0 = memory_address (mode, op0);
7219 temp = gen_rtx_MEM (mode, op0);
7220 set_mem_attributes (temp, exp, 0);
7222 /* If we are writing to this object and its type is a record with
7223 readonly fields, we must mark it as readonly so it will
7224 conflict with readonly references to those fields. */
7225 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
7226 RTX_UNCHANGING_P (temp) = 1;
7228 return temp;
7231 case ARRAY_REF:
7232 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
7233 abort ();
7236 tree array = TREE_OPERAND (exp, 0);
7237 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
7238 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
7239 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
7240 HOST_WIDE_INT i;
7242 /* Optimize the special-case of a zero lower bound.
7244 We convert the low_bound to sizetype to avoid some problems
7245 with constant folding. (E.g. suppose the lower bound is 1,
7246 and its mode is QI. Without the conversion, (ARRAY
7247 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7248 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
7250 if (! integer_zerop (low_bound))
7251 index = size_diffop (index, convert (sizetype, low_bound));
7253 /* Fold an expression like: "foo"[2].
7254 This is not done in fold so it won't happen inside &.
7255 Don't fold if this is for wide characters since it's too
7256 difficult to do correctly and this is a very rare case. */
7258 if (modifier != EXPAND_CONST_ADDRESS
7259 && modifier != EXPAND_INITIALIZER
7260 && modifier != EXPAND_MEMORY
7261 && TREE_CODE (array) == STRING_CST
7262 && TREE_CODE (index) == INTEGER_CST
7263 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
7264 && GET_MODE_CLASS (mode) == MODE_INT
7265 && GET_MODE_SIZE (mode) == 1)
7266 return gen_int_mode (TREE_STRING_POINTER (array)
7267 [TREE_INT_CST_LOW (index)], mode);
7269 /* If this is a constant index into a constant array,
7270 just get the value from the array. Handle both the cases when
7271 we have an explicit constructor and when our operand is a variable
7272 that was declared const. */
7274 if (modifier != EXPAND_CONST_ADDRESS
7275 && modifier != EXPAND_INITIALIZER
7276 && modifier != EXPAND_MEMORY
7277 && TREE_CODE (array) == CONSTRUCTOR
7278 && ! TREE_SIDE_EFFECTS (array)
7279 && TREE_CODE (index) == INTEGER_CST
7280 && 0 > compare_tree_int (index,
7281 list_length (CONSTRUCTOR_ELTS
7282 (TREE_OPERAND (exp, 0)))))
7284 tree elem;
7286 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7287 i = TREE_INT_CST_LOW (index);
7288 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
7291 if (elem)
7292 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
7293 modifier);
7296 else if (optimize >= 1
7297 && modifier != EXPAND_CONST_ADDRESS
7298 && modifier != EXPAND_INITIALIZER
7299 && modifier != EXPAND_MEMORY
7300 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7301 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7302 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
7304 if (TREE_CODE (index) == INTEGER_CST)
7306 tree init = DECL_INITIAL (array);
7308 if (TREE_CODE (init) == CONSTRUCTOR)
7310 tree elem;
7312 for (elem = CONSTRUCTOR_ELTS (init);
7313 (elem
7314 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
7315 elem = TREE_CHAIN (elem))
7318 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
7319 return expand_expr (fold (TREE_VALUE (elem)), target,
7320 tmode, modifier);
7322 else if (TREE_CODE (init) == STRING_CST
7323 && 0 > compare_tree_int (index,
7324 TREE_STRING_LENGTH (init)))
7326 tree type = TREE_TYPE (TREE_TYPE (init));
7327 enum machine_mode mode = TYPE_MODE (type);
7329 if (GET_MODE_CLASS (mode) == MODE_INT
7330 && GET_MODE_SIZE (mode) == 1)
7331 return gen_int_mode (TREE_STRING_POINTER (init)
7332 [TREE_INT_CST_LOW (index)], mode);
7337 goto normal_inner_ref;
7339 case COMPONENT_REF:
7340 /* If the operand is a CONSTRUCTOR, we can just extract the
7341 appropriate field if it is present. */
7342 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
7344 tree elt;
7346 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
7347 elt = TREE_CHAIN (elt))
7348 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
7349 /* We can normally use the value of the field in the
7350 CONSTRUCTOR. However, if this is a bitfield in
7351 an integral mode that we can fit in a HOST_WIDE_INT,
7352 we must mask only the number of bits in the bitfield,
7353 since this is done implicitly by the constructor. If
7354 the bitfield does not meet either of those conditions,
7355 we can't do this optimization. */
7356 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
7357 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
7358 == MODE_INT)
7359 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
7360 <= HOST_BITS_PER_WIDE_INT))))
7362 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
7363 && modifier == EXPAND_STACK_PARM)
7364 target = 0;
7365 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
7366 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
7368 HOST_WIDE_INT bitsize
7369 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
7370 enum machine_mode imode
7371 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
7373 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
7375 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7376 op0 = expand_and (imode, op0, op1, target);
7378 else
7380 tree count
7381 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
7384 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7385 target, 0);
7386 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7387 target, 0);
7391 return op0;
7394 goto normal_inner_ref;
7396 case BIT_FIELD_REF:
7397 case ARRAY_RANGE_REF:
7398 normal_inner_ref:
7400 enum machine_mode mode1;
7401 HOST_WIDE_INT bitsize, bitpos;
7402 tree offset;
7403 int volatilep = 0;
7404 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7405 &mode1, &unsignedp, &volatilep);
7406 rtx orig_op0;
7408 /* If we got back the original object, something is wrong. Perhaps
7409 we are evaluating an expression too early. In any event, don't
7410 infinitely recurse. */
7411 if (tem == exp)
7412 abort ();
7414 /* If TEM's type is a union of variable size, pass TARGET to the inner
7415 computation, since it will need a temporary and TARGET is known
7416 to have to do. This occurs in unchecked conversion in Ada. */
7418 orig_op0 = op0
7419 = expand_expr (tem,
7420 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7421 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7422 != INTEGER_CST)
7423 && modifier != EXPAND_STACK_PARM
7424 ? target : NULL_RTX),
7425 VOIDmode,
7426 (modifier == EXPAND_INITIALIZER
7427 || modifier == EXPAND_CONST_ADDRESS
7428 || modifier == EXPAND_STACK_PARM)
7429 ? modifier : EXPAND_NORMAL);
7431 /* If this is a constant, put it into a register if it is a
7432 legitimate constant and OFFSET is 0 and memory if it isn't. */
7433 if (CONSTANT_P (op0))
7435 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7436 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7437 && offset == 0)
7438 op0 = force_reg (mode, op0);
7439 else
7440 op0 = validize_mem (force_const_mem (mode, op0));
7443 if (offset != 0)
7445 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7446 EXPAND_SUM);
7448 /* If this object is in a register, put it into memory.
7449 This case can't occur in C, but can in Ada if we have
7450 unchecked conversion of an expression from a scalar type to
7451 an array or record type. */
7452 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7453 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7455 /* If the operand is a SAVE_EXPR, we can deal with this by
7456 forcing the SAVE_EXPR into memory. */
7457 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7459 put_var_into_stack (TREE_OPERAND (exp, 0),
7460 /*rescan=*/true);
7461 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7463 else
7465 tree nt
7466 = build_qualified_type (TREE_TYPE (tem),
7467 (TYPE_QUALS (TREE_TYPE (tem))
7468 | TYPE_QUAL_CONST));
7469 rtx memloc = assign_temp (nt, 1, 1, 1);
7471 emit_move_insn (memloc, op0);
7472 op0 = memloc;
7476 if (GET_CODE (op0) != MEM)
7477 abort ();
7479 #ifdef POINTERS_EXTEND_UNSIGNED
7480 if (GET_MODE (offset_rtx) != Pmode)
7481 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7482 #else
7483 if (GET_MODE (offset_rtx) != ptr_mode)
7484 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7485 #endif
7487 /* A constant address in OP0 can have VOIDmode, we must not try
7488 to call force_reg for that case. Avoid that case. */
7489 if (GET_CODE (op0) == MEM
7490 && GET_MODE (op0) == BLKmode
7491 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7492 && bitsize != 0
7493 && (bitpos % bitsize) == 0
7494 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7495 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7497 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7498 bitpos = 0;
7501 op0 = offset_address (op0, offset_rtx,
7502 highest_pow2_factor (offset));
7505 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7506 record its alignment as BIGGEST_ALIGNMENT. */
7507 if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
7508 && is_aligning_offset (offset, tem))
7509 set_mem_align (op0, BIGGEST_ALIGNMENT);
7511 /* Don't forget about volatility even if this is a bitfield. */
7512 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7514 if (op0 == orig_op0)
7515 op0 = copy_rtx (op0);
7517 MEM_VOLATILE_P (op0) = 1;
7520 /* The following code doesn't handle CONCAT.
7521 Assume only bitpos == 0 can be used for CONCAT, due to
7522 one element arrays having the same mode as its element. */
7523 if (GET_CODE (op0) == CONCAT)
7525 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7526 abort ();
7527 return op0;
7530 /* In cases where an aligned union has an unaligned object
7531 as a field, we might be extracting a BLKmode value from
7532 an integer-mode (e.g., SImode) object. Handle this case
7533 by doing the extract into an object as wide as the field
7534 (which we know to be the width of a basic mode), then
7535 storing into memory, and changing the mode to BLKmode. */
7536 if (mode1 == VOIDmode
7537 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7538 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7539 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7540 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7541 && modifier != EXPAND_CONST_ADDRESS
7542 && modifier != EXPAND_INITIALIZER)
7543 /* If the field isn't aligned enough to fetch as a memref,
7544 fetch it as a bit field. */
7545 || (mode1 != BLKmode
7546 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7547 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))
7548 && ((modifier == EXPAND_CONST_ADDRESS
7549 || modifier == EXPAND_INITIALIZER)
7550 ? STRICT_ALIGNMENT
7551 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7552 || (bitpos % BITS_PER_UNIT != 0)))
7553 /* If the type and the field are a constant size and the
7554 size of the type isn't the same size as the bitfield,
7555 we must use bitfield operations. */
7556 || (bitsize >= 0
7557 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7558 == INTEGER_CST)
7559 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7560 bitsize)))
7562 enum machine_mode ext_mode = mode;
7564 if (ext_mode == BLKmode
7565 && ! (target != 0 && GET_CODE (op0) == MEM
7566 && GET_CODE (target) == MEM
7567 && bitpos % BITS_PER_UNIT == 0))
7568 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7570 if (ext_mode == BLKmode)
7572 /* In this case, BITPOS must start at a byte boundary and
7573 TARGET, if specified, must be a MEM. */
7574 if (GET_CODE (op0) != MEM
7575 || (target != 0 && GET_CODE (target) != MEM)
7576 || bitpos % BITS_PER_UNIT != 0)
7577 abort ();
7579 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
7580 if (target == 0)
7581 target = assign_temp (type, 0, 1, 1);
7583 emit_block_move (target, op0,
7584 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7585 / BITS_PER_UNIT),
7586 (modifier == EXPAND_STACK_PARM
7587 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7589 return target;
7592 op0 = validize_mem (op0);
7594 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7595 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7597 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7598 (modifier == EXPAND_STACK_PARM
7599 ? NULL_RTX : target),
7600 ext_mode, ext_mode,
7601 int_size_in_bytes (TREE_TYPE (tem)));
7603 /* If the result is a record type and BITSIZE is narrower than
7604 the mode of OP0, an integral mode, and this is a big endian
7605 machine, we must put the field into the high-order bits. */
7606 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7607 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7608 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7609 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7610 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7611 - bitsize),
7612 op0, 1);
7614 if (mode == BLKmode)
7616 rtx new = assign_temp (build_qualified_type
7617 ((*lang_hooks.types.type_for_mode)
7618 (ext_mode, 0),
7619 TYPE_QUAL_CONST), 0, 1, 1);
7621 emit_move_insn (new, op0);
7622 op0 = copy_rtx (new);
7623 PUT_MODE (op0, BLKmode);
7624 set_mem_attributes (op0, exp, 1);
7627 return op0;
7630 /* If the result is BLKmode, use that to access the object
7631 now as well. */
7632 if (mode == BLKmode)
7633 mode1 = BLKmode;
7635 /* Get a reference to just this component. */
7636 if (modifier == EXPAND_CONST_ADDRESS
7637 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7638 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7639 else
7640 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7642 if (op0 == orig_op0)
7643 op0 = copy_rtx (op0);
7645 set_mem_attributes (op0, exp, 0);
7646 if (GET_CODE (XEXP (op0, 0)) == REG)
7647 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7649 MEM_VOLATILE_P (op0) |= volatilep;
7650 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7651 || modifier == EXPAND_CONST_ADDRESS
7652 || modifier == EXPAND_INITIALIZER)
7653 return op0;
7654 else if (target == 0)
7655 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7657 convert_move (target, op0, unsignedp);
7658 return target;
7661 case VTABLE_REF:
7663 rtx insn, before = get_last_insn (), vtbl_ref;
7665 /* Evaluate the interior expression. */
7666 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7667 tmode, modifier);
7669 /* Get or create an instruction off which to hang a note. */
7670 if (REG_P (subtarget))
7672 target = subtarget;
7673 insn = get_last_insn ();
7674 if (insn == before)
7675 abort ();
7676 if (! INSN_P (insn))
7677 insn = prev_nonnote_insn (insn);
7679 else
7681 target = gen_reg_rtx (GET_MODE (subtarget));
7682 insn = emit_move_insn (target, subtarget);
7685 /* Collect the data for the note. */
7686 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7687 vtbl_ref = plus_constant (vtbl_ref,
7688 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7689 /* Discard the initial CONST that was added. */
7690 vtbl_ref = XEXP (vtbl_ref, 0);
7692 REG_NOTES (insn)
7693 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7695 return target;
7698 /* Intended for a reference to a buffer of a file-object in Pascal.
7699 But it's not certain that a special tree code will really be
7700 necessary for these. INDIRECT_REF might work for them. */
7701 case BUFFER_REF:
7702 abort ();
7704 case IN_EXPR:
7706 /* Pascal set IN expression.
7708 Algorithm:
7709 rlo = set_low - (set_low%bits_per_word);
7710 the_word = set [ (index - rlo)/bits_per_word ];
7711 bit_index = index % bits_per_word;
7712 bitmask = 1 << bit_index;
7713 return !!(the_word & bitmask); */
7715 tree set = TREE_OPERAND (exp, 0);
7716 tree index = TREE_OPERAND (exp, 1);
7717 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7718 tree set_type = TREE_TYPE (set);
7719 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7720 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7721 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7722 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7723 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7724 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7725 rtx setaddr = XEXP (setval, 0);
7726 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7727 rtx rlow;
7728 rtx diff, quo, rem, addr, bit, result;
7730 /* If domain is empty, answer is no. Likewise if index is constant
7731 and out of bounds. */
7732 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7733 && TREE_CODE (set_low_bound) == INTEGER_CST
7734 && tree_int_cst_lt (set_high_bound, set_low_bound))
7735 || (TREE_CODE (index) == INTEGER_CST
7736 && TREE_CODE (set_low_bound) == INTEGER_CST
7737 && tree_int_cst_lt (index, set_low_bound))
7738 || (TREE_CODE (set_high_bound) == INTEGER_CST
7739 && TREE_CODE (index) == INTEGER_CST
7740 && tree_int_cst_lt (set_high_bound, index))))
7741 return const0_rtx;
7743 if (target == 0)
7744 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7746 /* If we get here, we have to generate the code for both cases
7747 (in range and out of range). */
7749 op0 = gen_label_rtx ();
7750 op1 = gen_label_rtx ();
7752 if (! (GET_CODE (index_val) == CONST_INT
7753 && GET_CODE (lo_r) == CONST_INT))
7754 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7755 GET_MODE (index_val), iunsignedp, op1);
7757 if (! (GET_CODE (index_val) == CONST_INT
7758 && GET_CODE (hi_r) == CONST_INT))
7759 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7760 GET_MODE (index_val), iunsignedp, op1);
7762 /* Calculate the element number of bit zero in the first word
7763 of the set. */
7764 if (GET_CODE (lo_r) == CONST_INT)
7765 rlow = GEN_INT (INTVAL (lo_r)
7766 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7767 else
7768 rlow = expand_binop (index_mode, and_optab, lo_r,
7769 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7770 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7772 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7773 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7775 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7776 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7777 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7778 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7780 addr = memory_address (byte_mode,
7781 expand_binop (index_mode, add_optab, diff,
7782 setaddr, NULL_RTX, iunsignedp,
7783 OPTAB_LIB_WIDEN));
7785 /* Extract the bit we want to examine. */
7786 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7787 gen_rtx_MEM (byte_mode, addr),
7788 make_tree (TREE_TYPE (index), rem),
7789 NULL_RTX, 1);
7790 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7791 GET_MODE (target) == byte_mode ? target : 0,
7792 1, OPTAB_LIB_WIDEN);
7794 if (result != target)
7795 convert_move (target, result, 1);
7797 /* Output the code to handle the out-of-range case. */
7798 emit_jump (op0);
7799 emit_label (op1);
7800 emit_move_insn (target, const0_rtx);
7801 emit_label (op0);
7802 return target;
7805 case WITH_CLEANUP_EXPR:
7806 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7808 WITH_CLEANUP_EXPR_RTL (exp)
7809 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7810 expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1),
7811 CLEANUP_EH_ONLY (exp));
7813 /* That's it for this cleanup. */
7814 TREE_OPERAND (exp, 1) = 0;
7816 return WITH_CLEANUP_EXPR_RTL (exp);
7818 case CLEANUP_POINT_EXPR:
7820 /* Start a new binding layer that will keep track of all cleanup
7821 actions to be performed. */
7822 expand_start_bindings (2);
7824 target_temp_slot_level = temp_slot_level;
7826 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7827 /* If we're going to use this value, load it up now. */
7828 if (! ignore)
7829 op0 = force_not_mem (op0);
7830 preserve_temp_slots (op0);
7831 expand_end_bindings (NULL_TREE, 0, 0);
7833 return op0;
7835 case CALL_EXPR:
7836 /* Check for a built-in function. */
7837 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7838 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7839 == FUNCTION_DECL)
7840 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7842 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7843 == BUILT_IN_FRONTEND)
7844 return (*lang_hooks.expand_expr) (exp, original_target,
7845 tmode, modifier);
7846 else
7847 return expand_builtin (exp, target, subtarget, tmode, ignore);
7850 return expand_call (exp, target, ignore);
7852 case NON_LVALUE_EXPR:
7853 case NOP_EXPR:
7854 case CONVERT_EXPR:
7855 case REFERENCE_EXPR:
7856 if (TREE_OPERAND (exp, 0) == error_mark_node)
7857 return const0_rtx;
7859 if (TREE_CODE (type) == UNION_TYPE)
7861 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7863 /* If both input and output are BLKmode, this conversion isn't doing
7864 anything except possibly changing memory attribute. */
7865 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7867 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7868 modifier);
7870 result = copy_rtx (result);
7871 set_mem_attributes (result, exp, 0);
7872 return result;
7875 if (target == 0)
7876 target = assign_temp (type, 0, 1, 1);
7878 if (GET_CODE (target) == MEM)
7879 /* Store data into beginning of memory target. */
7880 store_expr (TREE_OPERAND (exp, 0),
7881 adjust_address (target, TYPE_MODE (valtype), 0),
7882 modifier == EXPAND_STACK_PARM ? 2 : 0);
7884 else if (GET_CODE (target) == REG)
7885 /* Store this field into a union of the proper type. */
7886 store_field (target,
7887 MIN ((int_size_in_bytes (TREE_TYPE
7888 (TREE_OPERAND (exp, 0)))
7889 * BITS_PER_UNIT),
7890 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7891 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7892 VOIDmode, 0, type, 0);
7893 else
7894 abort ();
7896 /* Return the entire union. */
7897 return target;
7900 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7902 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7903 modifier);
7905 /* If the signedness of the conversion differs and OP0 is
7906 a promoted SUBREG, clear that indication since we now
7907 have to do the proper extension. */
7908 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7909 && GET_CODE (op0) == SUBREG)
7910 SUBREG_PROMOTED_VAR_P (op0) = 0;
7912 return op0;
7915 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7916 if (GET_MODE (op0) == mode)
7917 return op0;
7919 /* If OP0 is a constant, just convert it into the proper mode. */
7920 if (CONSTANT_P (op0))
7922 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7923 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7925 if (modifier == EXPAND_INITIALIZER)
7926 return simplify_gen_subreg (mode, op0, inner_mode,
7927 subreg_lowpart_offset (mode,
7928 inner_mode));
7929 else
7930 return convert_modes (mode, inner_mode, op0,
7931 TREE_UNSIGNED (inner_type));
7934 if (modifier == EXPAND_INITIALIZER)
7935 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7937 if (target == 0)
7938 return
7939 convert_to_mode (mode, op0,
7940 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7941 else
7942 convert_move (target, op0,
7943 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7944 return target;
7946 case VIEW_CONVERT_EXPR:
7947 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7949 /* If the input and output modes are both the same, we are done.
7950 Otherwise, if neither mode is BLKmode and both are integral and within
7951 a word, we can use gen_lowpart. If neither is true, make sure the
7952 operand is in memory and convert the MEM to the new mode. */
7953 if (TYPE_MODE (type) == GET_MODE (op0))
7955 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7956 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7957 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
7958 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7959 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7960 op0 = gen_lowpart (TYPE_MODE (type), op0);
7961 else if (GET_CODE (op0) != MEM)
7963 /* If the operand is not a MEM, force it into memory. Since we
7964 are going to be be changing the mode of the MEM, don't call
7965 force_const_mem for constants because we don't allow pool
7966 constants to change mode. */
7967 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7969 if (TREE_ADDRESSABLE (exp))
7970 abort ();
7972 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7973 target
7974 = assign_stack_temp_for_type
7975 (TYPE_MODE (inner_type),
7976 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7978 emit_move_insn (target, op0);
7979 op0 = target;
7982 /* At this point, OP0 is in the correct mode. If the output type is such
7983 that the operand is known to be aligned, indicate that it is.
7984 Otherwise, we need only be concerned about alignment for non-BLKmode
7985 results. */
7986 if (GET_CODE (op0) == MEM)
7988 op0 = copy_rtx (op0);
7990 if (TYPE_ALIGN_OK (type))
7991 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7992 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7993 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7995 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7996 HOST_WIDE_INT temp_size
7997 = MAX (int_size_in_bytes (inner_type),
7998 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7999 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
8000 temp_size, 0, type);
8001 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
8003 if (TREE_ADDRESSABLE (exp))
8004 abort ();
8006 if (GET_MODE (op0) == BLKmode)
8007 emit_block_move (new_with_op0_mode, op0,
8008 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
8009 (modifier == EXPAND_STACK_PARM
8010 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
8011 else
8012 emit_move_insn (new_with_op0_mode, op0);
8014 op0 = new;
8017 op0 = adjust_address (op0, TYPE_MODE (type), 0);
8020 return op0;
8022 case PLUS_EXPR:
8023 this_optab = ! unsignedp && flag_trapv
8024 && (GET_MODE_CLASS (mode) == MODE_INT)
8025 ? addv_optab : add_optab;
8027 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
8028 something else, make sure we add the register to the constant and
8029 then to the other thing. This case can occur during strength
8030 reduction and doing it this way will produce better code if the
8031 frame pointer or argument pointer is eliminated.
8033 fold-const.c will ensure that the constant is always in the inner
8034 PLUS_EXPR, so the only case we need to do anything about is if
8035 sp, ap, or fp is our second argument, in which case we must swap
8036 the innermost first argument and our second argument. */
8038 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
8039 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
8040 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
8041 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
8042 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
8043 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
8045 tree t = TREE_OPERAND (exp, 1);
8047 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
8048 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
8051 /* If the result is to be ptr_mode and we are adding an integer to
8052 something, we might be forming a constant. So try to use
8053 plus_constant. If it produces a sum and we can't accept it,
8054 use force_operand. This allows P = &ARR[const] to generate
8055 efficient code on machines where a SYMBOL_REF is not a valid
8056 address.
8058 If this is an EXPAND_SUM call, always return the sum. */
8059 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8060 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8062 if (modifier == EXPAND_STACK_PARM)
8063 target = 0;
8064 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
8065 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8066 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
8068 rtx constant_part;
8070 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
8071 EXPAND_SUM);
8072 /* Use immed_double_const to ensure that the constant is
8073 truncated according to the mode of OP1, then sign extended
8074 to a HOST_WIDE_INT. Using the constant directly can result
8075 in non-canonical RTL in a 64x32 cross compile. */
8076 constant_part
8077 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
8078 (HOST_WIDE_INT) 0,
8079 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
8080 op1 = plus_constant (op1, INTVAL (constant_part));
8081 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8082 op1 = force_operand (op1, target);
8083 return op1;
8086 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8087 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
8088 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
8090 rtx constant_part;
8092 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8093 (modifier == EXPAND_INITIALIZER
8094 ? EXPAND_INITIALIZER : EXPAND_SUM));
8095 if (! CONSTANT_P (op0))
8097 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8098 VOIDmode, modifier);
8099 /* Don't go to both_summands if modifier
8100 says it's not right to return a PLUS. */
8101 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8102 goto binop2;
8103 goto both_summands;
8105 /* Use immed_double_const to ensure that the constant is
8106 truncated according to the mode of OP1, then sign extended
8107 to a HOST_WIDE_INT. Using the constant directly can result
8108 in non-canonical RTL in a 64x32 cross compile. */
8109 constant_part
8110 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
8111 (HOST_WIDE_INT) 0,
8112 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
8113 op0 = plus_constant (op0, INTVAL (constant_part));
8114 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8115 op0 = force_operand (op0, target);
8116 return op0;
8120 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8121 subtarget = 0;
8123 /* No sense saving up arithmetic to be done
8124 if it's all in the wrong mode to form part of an address.
8125 And force_operand won't know whether to sign-extend or
8126 zero-extend. */
8127 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8128 || mode != ptr_mode)
8130 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8131 if (! operand_equal_p (TREE_OPERAND (exp, 0),
8132 TREE_OPERAND (exp, 1), 0))
8133 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8134 else
8135 op1 = op0;
8136 if (op0 == const0_rtx)
8137 return op1;
8138 if (op1 == const0_rtx)
8139 return op0;
8140 goto binop2;
8143 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
8144 if (! operand_equal_p (TREE_OPERAND (exp, 0),
8145 TREE_OPERAND (exp, 1), 0))
8146 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8147 VOIDmode, modifier);
8148 else
8149 op1 = op0;
8151 /* We come here from MINUS_EXPR when the second operand is a
8152 constant. */
8153 both_summands:
8154 /* Make sure any term that's a sum with a constant comes last. */
8155 if (GET_CODE (op0) == PLUS
8156 && CONSTANT_P (XEXP (op0, 1)))
8158 temp = op0;
8159 op0 = op1;
8160 op1 = temp;
8162 /* If adding to a sum including a constant,
8163 associate it to put the constant outside. */
8164 if (GET_CODE (op1) == PLUS
8165 && CONSTANT_P (XEXP (op1, 1)))
8167 rtx constant_term = const0_rtx;
8169 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
8170 if (temp != 0)
8171 op0 = temp;
8172 /* Ensure that MULT comes first if there is one. */
8173 else if (GET_CODE (op0) == MULT)
8174 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
8175 else
8176 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
8178 /* Let's also eliminate constants from op0 if possible. */
8179 op0 = eliminate_constant_term (op0, &constant_term);
8181 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
8182 their sum should be a constant. Form it into OP1, since the
8183 result we want will then be OP0 + OP1. */
8185 temp = simplify_binary_operation (PLUS, mode, constant_term,
8186 XEXP (op1, 1));
8187 if (temp != 0)
8188 op1 = temp;
8189 else
8190 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
8193 /* Put a constant term last and put a multiplication first. */
8194 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
8195 temp = op1, op1 = op0, op0 = temp;
8197 temp = simplify_binary_operation (PLUS, mode, op0, op1);
8198 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
8200 case MINUS_EXPR:
8201 /* For initializers, we are allowed to return a MINUS of two
8202 symbolic constants. Here we handle all cases when both operands
8203 are constant. */
8204 /* Handle difference of two symbolic constants,
8205 for the sake of an initializer. */
8206 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8207 && really_constant_p (TREE_OPERAND (exp, 0))
8208 && really_constant_p (TREE_OPERAND (exp, 1)))
8210 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
8211 modifier);
8212 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
8213 modifier);
8215 /* If the last operand is a CONST_INT, use plus_constant of
8216 the negated constant. Else make the MINUS. */
8217 if (GET_CODE (op1) == CONST_INT)
8218 return plus_constant (op0, - INTVAL (op1));
8219 else
8220 return gen_rtx_MINUS (mode, op0, op1);
8223 this_optab = ! unsignedp && flag_trapv
8224 && (GET_MODE_CLASS(mode) == MODE_INT)
8225 ? subv_optab : sub_optab;
8227 /* No sense saving up arithmetic to be done
8228 if it's all in the wrong mode to form part of an address.
8229 And force_operand won't know whether to sign-extend or
8230 zero-extend. */
8231 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8232 || mode != ptr_mode)
8233 goto binop;
8235 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8236 subtarget = 0;
8238 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
8239 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
8241 /* Convert A - const to A + (-const). */
8242 if (GET_CODE (op1) == CONST_INT)
8244 op1 = negate_rtx (mode, op1);
8245 goto both_summands;
8248 goto binop2;
8250 case MULT_EXPR:
8251 /* If first operand is constant, swap them.
8252 Thus the following special case checks need only
8253 check the second operand. */
8254 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
8256 tree t1 = TREE_OPERAND (exp, 0);
8257 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
8258 TREE_OPERAND (exp, 1) = t1;
8261 /* Attempt to return something suitable for generating an
8262 indexed address, for machines that support that. */
8264 if (modifier == EXPAND_SUM && mode == ptr_mode
8265 && host_integerp (TREE_OPERAND (exp, 1), 0))
8267 tree exp1 = TREE_OPERAND (exp, 1);
8269 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8270 EXPAND_SUM);
8272 /* If we knew for certain that this is arithmetic for an array
8273 reference, and we knew the bounds of the array, then we could
8274 apply the distributive law across (PLUS X C) for constant C.
8275 Without such knowledge, we risk overflowing the computation
8276 when both X and C are large, but X+C isn't. */
8277 /* ??? Could perhaps special-case EXP being unsigned and C being
8278 positive. In that case we are certain that X+C is no smaller
8279 than X and so the transformed expression will overflow iff the
8280 original would have. */
8282 if (GET_CODE (op0) != REG)
8283 op0 = force_operand (op0, NULL_RTX);
8284 if (GET_CODE (op0) != REG)
8285 op0 = copy_to_mode_reg (mode, op0);
8287 return gen_rtx_MULT (mode, op0,
8288 gen_int_mode (tree_low_cst (exp1, 0),
8289 TYPE_MODE (TREE_TYPE (exp1))));
8292 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8293 subtarget = 0;
8295 if (modifier == EXPAND_STACK_PARM)
8296 target = 0;
8298 /* Check for multiplying things that have been extended
8299 from a narrower type. If this machine supports multiplying
8300 in that narrower type with a result in the desired type,
8301 do it that way, and avoid the explicit type-conversion. */
8302 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
8303 && TREE_CODE (type) == INTEGER_TYPE
8304 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8305 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8306 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8307 && int_fits_type_p (TREE_OPERAND (exp, 1),
8308 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8309 /* Don't use a widening multiply if a shift will do. */
8310 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8311 > HOST_BITS_PER_WIDE_INT)
8312 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
8314 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8315 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8317 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
8318 /* If both operands are extended, they must either both
8319 be zero-extended or both be sign-extended. */
8320 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8322 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
8324 enum machine_mode innermode
8325 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
8326 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8327 ? smul_widen_optab : umul_widen_optab);
8328 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8329 ? umul_widen_optab : smul_widen_optab);
8330 if (mode == GET_MODE_WIDER_MODE (innermode))
8332 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
8334 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8335 NULL_RTX, VOIDmode, 0);
8336 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8337 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8338 VOIDmode, 0);
8339 else
8340 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8341 NULL_RTX, VOIDmode, 0);
8342 goto binop2;
8344 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
8345 && innermode == word_mode)
8347 rtx htem;
8348 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8349 NULL_RTX, VOIDmode, 0);
8350 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8351 op1 = convert_modes (innermode, mode,
8352 expand_expr (TREE_OPERAND (exp, 1),
8353 NULL_RTX, VOIDmode, 0),
8354 unsignedp);
8355 else
8356 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8357 NULL_RTX, VOIDmode, 0);
8358 temp = expand_binop (mode, other_optab, op0, op1, target,
8359 unsignedp, OPTAB_LIB_WIDEN);
8360 htem = expand_mult_highpart_adjust (innermode,
8361 gen_highpart (innermode, temp),
8362 op0, op1,
8363 gen_highpart (innermode, temp),
8364 unsignedp);
8365 emit_move_insn (gen_highpart (innermode, temp), htem);
8366 return temp;
8370 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8371 if (! operand_equal_p (TREE_OPERAND (exp, 0),
8372 TREE_OPERAND (exp, 1), 0))
8373 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8374 else
8375 op1 = op0;
8376 return expand_mult (mode, op0, op1, target, unsignedp);
8378 case TRUNC_DIV_EXPR:
8379 case FLOOR_DIV_EXPR:
8380 case CEIL_DIV_EXPR:
8381 case ROUND_DIV_EXPR:
8382 case EXACT_DIV_EXPR:
8383 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8384 subtarget = 0;
8385 if (modifier == EXPAND_STACK_PARM)
8386 target = 0;
8387 /* Possible optimization: compute the dividend with EXPAND_SUM
8388 then if the divisor is constant can optimize the case
8389 where some terms of the dividend have coeffs divisible by it. */
8390 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8391 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8392 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8394 case RDIV_EXPR:
8395 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
8396 expensive divide. If not, combine will rebuild the original
8397 computation. */
8398 if (flag_unsafe_math_optimizations && optimize && !optimize_size
8399 && TREE_CODE (type) == REAL_TYPE
8400 && !real_onep (TREE_OPERAND (exp, 0)))
8401 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
8402 build (RDIV_EXPR, type,
8403 build_real (type, dconst1),
8404 TREE_OPERAND (exp, 1))),
8405 target, tmode, modifier);
8406 this_optab = sdiv_optab;
8407 goto binop;
8409 case TRUNC_MOD_EXPR:
8410 case FLOOR_MOD_EXPR:
8411 case CEIL_MOD_EXPR:
8412 case ROUND_MOD_EXPR:
8413 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8414 subtarget = 0;
8415 if (modifier == EXPAND_STACK_PARM)
8416 target = 0;
8417 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8418 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8419 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8421 case FIX_ROUND_EXPR:
8422 case FIX_FLOOR_EXPR:
8423 case FIX_CEIL_EXPR:
8424 abort (); /* Not used for C. */
8426 case FIX_TRUNC_EXPR:
8427 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8428 if (target == 0 || modifier == EXPAND_STACK_PARM)
8429 target = gen_reg_rtx (mode);
8430 expand_fix (target, op0, unsignedp);
8431 return target;
8433 case FLOAT_EXPR:
8434 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8435 if (target == 0 || modifier == EXPAND_STACK_PARM)
8436 target = gen_reg_rtx (mode);
8437 /* expand_float can't figure out what to do if FROM has VOIDmode.
8438 So give it the correct mode. With -O, cse will optimize this. */
8439 if (GET_MODE (op0) == VOIDmode)
8440 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8441 op0);
8442 expand_float (target, op0,
8443 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8444 return target;
8446 case NEGATE_EXPR:
8447 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8448 if (modifier == EXPAND_STACK_PARM)
8449 target = 0;
8450 temp = expand_unop (mode,
8451 ! unsignedp && flag_trapv
8452 && (GET_MODE_CLASS(mode) == MODE_INT)
8453 ? negv_optab : neg_optab, op0, target, 0);
8454 if (temp == 0)
8455 abort ();
8456 return temp;
8458 case ABS_EXPR:
8459 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8460 if (modifier == EXPAND_STACK_PARM)
8461 target = 0;
8463 /* Handle complex values specially. */
8464 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
8465 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
8466 return expand_complex_abs (mode, op0, target, unsignedp);
8468 /* Unsigned abs is simply the operand. Testing here means we don't
8469 risk generating incorrect code below. */
8470 if (TREE_UNSIGNED (type))
8471 return op0;
8473 return expand_abs (mode, op0, target, unsignedp,
8474 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8476 case MAX_EXPR:
8477 case MIN_EXPR:
8478 target = original_target;
8479 if (target == 0
8480 || modifier == EXPAND_STACK_PARM
8481 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
8482 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
8483 || GET_MODE (target) != mode
8484 || (GET_CODE (target) == REG
8485 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8486 target = gen_reg_rtx (mode);
8487 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8488 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8490 /* First try to do it with a special MIN or MAX instruction.
8491 If that does not win, use a conditional jump to select the proper
8492 value. */
8493 this_optab = (TREE_UNSIGNED (type)
8494 ? (code == MIN_EXPR ? umin_optab : umax_optab)
8495 : (code == MIN_EXPR ? smin_optab : smax_optab));
8497 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8498 OPTAB_WIDEN);
8499 if (temp != 0)
8500 return temp;
8502 /* At this point, a MEM target is no longer useful; we will get better
8503 code without it. */
8505 if (GET_CODE (target) == MEM)
8506 target = gen_reg_rtx (mode);
8508 if (target != op0)
8509 emit_move_insn (target, op0);
8511 op0 = gen_label_rtx ();
8513 /* If this mode is an integer too wide to compare properly,
8514 compare word by word. Rely on cse to optimize constant cases. */
8515 if (GET_MODE_CLASS (mode) == MODE_INT
8516 && ! can_compare_p (GE, mode, ccp_jump))
8518 if (code == MAX_EXPR)
8519 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8520 target, op1, NULL_RTX, op0);
8521 else
8522 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8523 op1, target, NULL_RTX, op0);
8525 else
8527 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
8528 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
8529 unsignedp, mode, NULL_RTX, NULL_RTX,
8530 op0);
8532 emit_move_insn (target, op1);
8533 emit_label (op0);
8534 return target;
8536 case BIT_NOT_EXPR:
8537 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8538 if (modifier == EXPAND_STACK_PARM)
8539 target = 0;
8540 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8541 if (temp == 0)
8542 abort ();
8543 return temp;
8545 case FFS_EXPR:
8546 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8547 if (modifier == EXPAND_STACK_PARM)
8548 target = 0;
8549 temp = expand_unop (mode, ffs_optab, op0, target, 1);
8550 if (temp == 0)
8551 abort ();
8552 return temp;
8554 case CLZ_EXPR:
8555 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8556 temp = expand_unop (mode, clz_optab, op0, target, 1);
8557 if (temp == 0)
8558 abort ();
8559 return temp;
8561 case CTZ_EXPR:
8562 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8563 temp = expand_unop (mode, ctz_optab, op0, target, 1);
8564 if (temp == 0)
8565 abort ();
8566 return temp;
8568 case POPCOUNT_EXPR:
8569 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8570 temp = expand_unop (mode, popcount_optab, op0, target, 1);
8571 if (temp == 0)
8572 abort ();
8573 return temp;
8575 case PARITY_EXPR:
8576 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8577 temp = expand_unop (mode, parity_optab, op0, target, 1);
8578 if (temp == 0)
8579 abort ();
8580 return temp;
8582 /* ??? Can optimize bitwise operations with one arg constant.
8583 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8584 and (a bitwise1 b) bitwise2 b (etc)
8585 but that is probably not worth while. */
8587 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8588 boolean values when we want in all cases to compute both of them. In
8589 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8590 as actual zero-or-1 values and then bitwise anding. In cases where
8591 there cannot be any side effects, better code would be made by
8592 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8593 how to recognize those cases. */
8595 case TRUTH_AND_EXPR:
8596 case BIT_AND_EXPR:
8597 this_optab = and_optab;
8598 goto binop;
8600 case TRUTH_OR_EXPR:
8601 case BIT_IOR_EXPR:
8602 this_optab = ior_optab;
8603 goto binop;
8605 case TRUTH_XOR_EXPR:
8606 case BIT_XOR_EXPR:
8607 this_optab = xor_optab;
8608 goto binop;
8610 case LSHIFT_EXPR:
8611 case RSHIFT_EXPR:
8612 case LROTATE_EXPR:
8613 case RROTATE_EXPR:
8614 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8615 subtarget = 0;
8616 if (modifier == EXPAND_STACK_PARM)
8617 target = 0;
8618 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8619 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8620 unsignedp);
8622 /* Could determine the answer when only additive constants differ. Also,
8623 the addition of one can be handled by changing the condition. */
8624 case LT_EXPR:
8625 case LE_EXPR:
8626 case GT_EXPR:
8627 case GE_EXPR:
8628 case EQ_EXPR:
8629 case NE_EXPR:
8630 case UNORDERED_EXPR:
8631 case ORDERED_EXPR:
8632 case UNLT_EXPR:
8633 case UNLE_EXPR:
8634 case UNGT_EXPR:
8635 case UNGE_EXPR:
8636 case UNEQ_EXPR:
8637 temp = do_store_flag (exp,
8638 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8639 tmode != VOIDmode ? tmode : mode, 0);
8640 if (temp != 0)
8641 return temp;
8643 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8644 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8645 && original_target
8646 && GET_CODE (original_target) == REG
8647 && (GET_MODE (original_target)
8648 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8650 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8651 VOIDmode, 0);
8653 /* If temp is constant, we can just compute the result. */
8654 if (GET_CODE (temp) == CONST_INT)
8656 if (INTVAL (temp) != 0)
8657 emit_move_insn (target, const1_rtx);
8658 else
8659 emit_move_insn (target, const0_rtx);
8661 return target;
8664 if (temp != original_target)
8666 enum machine_mode mode1 = GET_MODE (temp);
8667 if (mode1 == VOIDmode)
8668 mode1 = tmode != VOIDmode ? tmode : mode;
8670 temp = copy_to_mode_reg (mode1, temp);
8673 op1 = gen_label_rtx ();
8674 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8675 GET_MODE (temp), unsignedp, op1);
8676 emit_move_insn (temp, const1_rtx);
8677 emit_label (op1);
8678 return temp;
8681 /* If no set-flag instruction, must generate a conditional
8682 store into a temporary variable. Drop through
8683 and handle this like && and ||. */
8685 case TRUTH_ANDIF_EXPR:
8686 case TRUTH_ORIF_EXPR:
8687 if (! ignore
8688 && (target == 0
8689 || modifier == EXPAND_STACK_PARM
8690 || ! safe_from_p (target, exp, 1)
8691 /* Make sure we don't have a hard reg (such as function's return
8692 value) live across basic blocks, if not optimizing. */
8693 || (!optimize && GET_CODE (target) == REG
8694 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8695 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8697 if (target)
8698 emit_clr_insn (target);
8700 op1 = gen_label_rtx ();
8701 jumpifnot (exp, op1);
8703 if (target)
8704 emit_0_to_1_insn (target);
8706 emit_label (op1);
8707 return ignore ? const0_rtx : target;
8709 case TRUTH_NOT_EXPR:
8710 if (modifier == EXPAND_STACK_PARM)
8711 target = 0;
8712 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8713 /* The parser is careful to generate TRUTH_NOT_EXPR
8714 only with operands that are always zero or one. */
8715 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8716 target, 1, OPTAB_LIB_WIDEN);
8717 if (temp == 0)
8718 abort ();
8719 return temp;
8721 case COMPOUND_EXPR:
8722 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8723 emit_queue ();
8724 return expand_expr (TREE_OPERAND (exp, 1),
8725 (ignore ? const0_rtx : target),
8726 VOIDmode, modifier);
8728 case COND_EXPR:
8729 /* If we would have a "singleton" (see below) were it not for a
8730 conversion in each arm, bring that conversion back out. */
8731 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8732 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8733 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8734 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8736 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8737 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8739 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8740 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8741 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8742 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8743 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8744 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8745 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8746 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8747 return expand_expr (build1 (NOP_EXPR, type,
8748 build (COND_EXPR, TREE_TYPE (iftrue),
8749 TREE_OPERAND (exp, 0),
8750 iftrue, iffalse)),
8751 target, tmode, modifier);
8755 /* Note that COND_EXPRs whose type is a structure or union
8756 are required to be constructed to contain assignments of
8757 a temporary variable, so that we can evaluate them here
8758 for side effect only. If type is void, we must do likewise. */
8760 /* If an arm of the branch requires a cleanup,
8761 only that cleanup is performed. */
8763 tree singleton = 0;
8764 tree binary_op = 0, unary_op = 0;
8766 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8767 convert it to our mode, if necessary. */
8768 if (integer_onep (TREE_OPERAND (exp, 1))
8769 && integer_zerop (TREE_OPERAND (exp, 2))
8770 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8772 if (ignore)
8774 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8775 modifier);
8776 return const0_rtx;
8779 if (modifier == EXPAND_STACK_PARM)
8780 target = 0;
8781 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8782 if (GET_MODE (op0) == mode)
8783 return op0;
8785 if (target == 0)
8786 target = gen_reg_rtx (mode);
8787 convert_move (target, op0, unsignedp);
8788 return target;
8791 /* Check for X ? A + B : A. If we have this, we can copy A to the
8792 output and conditionally add B. Similarly for unary operations.
8793 Don't do this if X has side-effects because those side effects
8794 might affect A or B and the "?" operation is a sequence point in
8795 ANSI. (operand_equal_p tests for side effects.) */
8797 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8798 && operand_equal_p (TREE_OPERAND (exp, 2),
8799 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8800 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8801 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8802 && operand_equal_p (TREE_OPERAND (exp, 1),
8803 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8804 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8805 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8806 && operand_equal_p (TREE_OPERAND (exp, 2),
8807 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8808 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8809 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8810 && operand_equal_p (TREE_OPERAND (exp, 1),
8811 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8812 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8814 /* If we are not to produce a result, we have no target. Otherwise,
8815 if a target was specified use it; it will not be used as an
8816 intermediate target unless it is safe. If no target, use a
8817 temporary. */
8819 if (ignore)
8820 temp = 0;
8821 else if (modifier == EXPAND_STACK_PARM)
8822 temp = assign_temp (type, 0, 0, 1);
8823 else if (original_target
8824 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8825 || (singleton && GET_CODE (original_target) == REG
8826 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8827 && original_target == var_rtx (singleton)))
8828 && GET_MODE (original_target) == mode
8829 #ifdef HAVE_conditional_move
8830 && (! can_conditionally_move_p (mode)
8831 || GET_CODE (original_target) == REG
8832 || TREE_ADDRESSABLE (type))
8833 #endif
8834 && (GET_CODE (original_target) != MEM
8835 || TREE_ADDRESSABLE (type)))
8836 temp = original_target;
8837 else if (TREE_ADDRESSABLE (type))
8838 abort ();
8839 else
8840 temp = assign_temp (type, 0, 0, 1);
8842 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8843 do the test of X as a store-flag operation, do this as
8844 A + ((X != 0) << log C). Similarly for other simple binary
8845 operators. Only do for C == 1 if BRANCH_COST is low. */
8846 if (temp && singleton && binary_op
8847 && (TREE_CODE (binary_op) == PLUS_EXPR
8848 || TREE_CODE (binary_op) == MINUS_EXPR
8849 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8850 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8851 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8852 : integer_onep (TREE_OPERAND (binary_op, 1)))
8853 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8855 rtx result;
8856 tree cond;
8857 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8858 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8859 ? addv_optab : add_optab)
8860 : TREE_CODE (binary_op) == MINUS_EXPR
8861 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8862 ? subv_optab : sub_optab)
8863 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8864 : xor_optab);
8866 /* If we had X ? A : A + 1, do this as A + (X == 0). */
8867 if (singleton == TREE_OPERAND (exp, 1))
8868 cond = invert_truthvalue (TREE_OPERAND (exp, 0));
8869 else
8870 cond = TREE_OPERAND (exp, 0);
8872 result = do_store_flag (cond, (safe_from_p (temp, singleton, 1)
8873 ? temp : NULL_RTX),
8874 mode, BRANCH_COST <= 1);
8876 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8877 result = expand_shift (LSHIFT_EXPR, mode, result,
8878 build_int_2 (tree_log2
8879 (TREE_OPERAND
8880 (binary_op, 1)),
8882 (safe_from_p (temp, singleton, 1)
8883 ? temp : NULL_RTX), 0);
8885 if (result)
8887 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8888 return expand_binop (mode, boptab, op1, result, temp,
8889 unsignedp, OPTAB_LIB_WIDEN);
8893 do_pending_stack_adjust ();
8894 NO_DEFER_POP;
8895 op0 = gen_label_rtx ();
8897 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8899 if (temp != 0)
8901 /* If the target conflicts with the other operand of the
8902 binary op, we can't use it. Also, we can't use the target
8903 if it is a hard register, because evaluating the condition
8904 might clobber it. */
8905 if ((binary_op
8906 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8907 || (GET_CODE (temp) == REG
8908 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8909 temp = gen_reg_rtx (mode);
8910 store_expr (singleton, temp,
8911 modifier == EXPAND_STACK_PARM ? 2 : 0);
8913 else
8914 expand_expr (singleton,
8915 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8916 if (singleton == TREE_OPERAND (exp, 1))
8917 jumpif (TREE_OPERAND (exp, 0), op0);
8918 else
8919 jumpifnot (TREE_OPERAND (exp, 0), op0);
8921 start_cleanup_deferral ();
8922 if (binary_op && temp == 0)
8923 /* Just touch the other operand. */
8924 expand_expr (TREE_OPERAND (binary_op, 1),
8925 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8926 else if (binary_op)
8927 store_expr (build (TREE_CODE (binary_op), type,
8928 make_tree (type, temp),
8929 TREE_OPERAND (binary_op, 1)),
8930 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8931 else
8932 store_expr (build1 (TREE_CODE (unary_op), type,
8933 make_tree (type, temp)),
8934 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8935 op1 = op0;
8937 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8938 comparison operator. If we have one of these cases, set the
8939 output to A, branch on A (cse will merge these two references),
8940 then set the output to FOO. */
8941 else if (temp
8942 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8943 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8944 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8945 TREE_OPERAND (exp, 1), 0)
8946 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8947 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8948 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8950 if (GET_CODE (temp) == REG
8951 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8952 temp = gen_reg_rtx (mode);
8953 store_expr (TREE_OPERAND (exp, 1), temp,
8954 modifier == EXPAND_STACK_PARM ? 2 : 0);
8955 jumpif (TREE_OPERAND (exp, 0), op0);
8957 start_cleanup_deferral ();
8958 store_expr (TREE_OPERAND (exp, 2), temp,
8959 modifier == EXPAND_STACK_PARM ? 2 : 0);
8960 op1 = op0;
8962 else if (temp
8963 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8964 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8965 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8966 TREE_OPERAND (exp, 2), 0)
8967 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8968 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8969 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8971 if (GET_CODE (temp) == REG
8972 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8973 temp = gen_reg_rtx (mode);
8974 store_expr (TREE_OPERAND (exp, 2), temp,
8975 modifier == EXPAND_STACK_PARM ? 2 : 0);
8976 jumpifnot (TREE_OPERAND (exp, 0), op0);
8978 start_cleanup_deferral ();
8979 store_expr (TREE_OPERAND (exp, 1), temp,
8980 modifier == EXPAND_STACK_PARM ? 2 : 0);
8981 op1 = op0;
8983 else
8985 op1 = gen_label_rtx ();
8986 jumpifnot (TREE_OPERAND (exp, 0), op0);
8988 start_cleanup_deferral ();
8990 /* One branch of the cond can be void, if it never returns. For
8991 example A ? throw : E */
8992 if (temp != 0
8993 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8994 store_expr (TREE_OPERAND (exp, 1), temp,
8995 modifier == EXPAND_STACK_PARM ? 2 : 0);
8996 else
8997 expand_expr (TREE_OPERAND (exp, 1),
8998 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8999 end_cleanup_deferral ();
9000 emit_queue ();
9001 emit_jump_insn (gen_jump (op1));
9002 emit_barrier ();
9003 emit_label (op0);
9004 start_cleanup_deferral ();
9005 if (temp != 0
9006 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
9007 store_expr (TREE_OPERAND (exp, 2), temp,
9008 modifier == EXPAND_STACK_PARM ? 2 : 0);
9009 else
9010 expand_expr (TREE_OPERAND (exp, 2),
9011 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
9014 end_cleanup_deferral ();
9016 emit_queue ();
9017 emit_label (op1);
9018 OK_DEFER_POP;
9020 return temp;
9023 case TARGET_EXPR:
9025 /* Something needs to be initialized, but we didn't know
9026 where that thing was when building the tree. For example,
9027 it could be the return value of a function, or a parameter
9028 to a function which lays down in the stack, or a temporary
9029 variable which must be passed by reference.
9031 We guarantee that the expression will either be constructed
9032 or copied into our original target. */
9034 tree slot = TREE_OPERAND (exp, 0);
9035 tree cleanups = NULL_TREE;
9036 tree exp1;
9038 if (TREE_CODE (slot) != VAR_DECL)
9039 abort ();
9041 if (! ignore)
9042 target = original_target;
9044 /* Set this here so that if we get a target that refers to a
9045 register variable that's already been used, put_reg_into_stack
9046 knows that it should fix up those uses. */
9047 TREE_USED (slot) = 1;
9049 if (target == 0)
9051 if (DECL_RTL_SET_P (slot))
9053 target = DECL_RTL (slot);
9054 /* If we have already expanded the slot, so don't do
9055 it again. (mrs) */
9056 if (TREE_OPERAND (exp, 1) == NULL_TREE)
9057 return target;
9059 else
9061 target = assign_temp (type, 2, 0, 1);
9062 /* All temp slots at this level must not conflict. */
9063 preserve_temp_slots (target);
9064 SET_DECL_RTL (slot, target);
9065 if (TREE_ADDRESSABLE (slot))
9066 put_var_into_stack (slot, /*rescan=*/false);
9068 /* Since SLOT is not known to the called function
9069 to belong to its stack frame, we must build an explicit
9070 cleanup. This case occurs when we must build up a reference
9071 to pass the reference as an argument. In this case,
9072 it is very likely that such a reference need not be
9073 built here. */
9075 if (TREE_OPERAND (exp, 2) == 0)
9076 TREE_OPERAND (exp, 2)
9077 = (*lang_hooks.maybe_build_cleanup) (slot);
9078 cleanups = TREE_OPERAND (exp, 2);
9081 else
9083 /* This case does occur, when expanding a parameter which
9084 needs to be constructed on the stack. The target
9085 is the actual stack address that we want to initialize.
9086 The function we call will perform the cleanup in this case. */
9088 /* If we have already assigned it space, use that space,
9089 not target that we were passed in, as our target
9090 parameter is only a hint. */
9091 if (DECL_RTL_SET_P (slot))
9093 target = DECL_RTL (slot);
9094 /* If we have already expanded the slot, so don't do
9095 it again. (mrs) */
9096 if (TREE_OPERAND (exp, 1) == NULL_TREE)
9097 return target;
9099 else
9101 SET_DECL_RTL (slot, target);
9102 /* If we must have an addressable slot, then make sure that
9103 the RTL that we just stored in slot is OK. */
9104 if (TREE_ADDRESSABLE (slot))
9105 put_var_into_stack (slot, /*rescan=*/true);
9109 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
9110 /* Mark it as expanded. */
9111 TREE_OPERAND (exp, 1) = NULL_TREE;
9113 store_expr (exp1, target, modifier == EXPAND_STACK_PARM ? 2 : 0);
9115 expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
9117 return target;
9120 case INIT_EXPR:
9122 tree lhs = TREE_OPERAND (exp, 0);
9123 tree rhs = TREE_OPERAND (exp, 1);
9125 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
9126 return temp;
9129 case MODIFY_EXPR:
9131 /* If lhs is complex, expand calls in rhs before computing it.
9132 That's so we don't compute a pointer and save it over a
9133 call. If lhs is simple, compute it first so we can give it
9134 as a target if the rhs is just a call. This avoids an
9135 extra temp and copy and that prevents a partial-subsumption
9136 which makes bad code. Actually we could treat
9137 component_ref's of vars like vars. */
9139 tree lhs = TREE_OPERAND (exp, 0);
9140 tree rhs = TREE_OPERAND (exp, 1);
9142 temp = 0;
9144 /* Check for |= or &= of a bitfield of size one into another bitfield
9145 of size 1. In this case, (unless we need the result of the
9146 assignment) we can do this more efficiently with a
9147 test followed by an assignment, if necessary.
9149 ??? At this point, we can't get a BIT_FIELD_REF here. But if
9150 things change so we do, this code should be enhanced to
9151 support it. */
9152 if (ignore
9153 && TREE_CODE (lhs) == COMPONENT_REF
9154 && (TREE_CODE (rhs) == BIT_IOR_EXPR
9155 || TREE_CODE (rhs) == BIT_AND_EXPR)
9156 && TREE_OPERAND (rhs, 0) == lhs
9157 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
9158 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
9159 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
9161 rtx label = gen_label_rtx ();
9163 do_jump (TREE_OPERAND (rhs, 1),
9164 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
9165 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
9166 expand_assignment (lhs, convert (TREE_TYPE (rhs),
9167 (TREE_CODE (rhs) == BIT_IOR_EXPR
9168 ? integer_one_node
9169 : integer_zero_node)),
9170 0, 0);
9171 do_pending_stack_adjust ();
9172 emit_label (label);
9173 return const0_rtx;
9176 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
9178 return temp;
9181 case RETURN_EXPR:
9182 if (!TREE_OPERAND (exp, 0))
9183 expand_null_return ();
9184 else
9185 expand_return (TREE_OPERAND (exp, 0));
9186 return const0_rtx;
9188 case PREINCREMENT_EXPR:
9189 case PREDECREMENT_EXPR:
9190 return expand_increment (exp, 0, ignore);
9192 case POSTINCREMENT_EXPR:
9193 case POSTDECREMENT_EXPR:
9194 /* Faster to treat as pre-increment if result is not used. */
9195 return expand_increment (exp, ! ignore, ignore);
9197 case ADDR_EXPR:
9198 if (modifier == EXPAND_STACK_PARM)
9199 target = 0;
9200 /* Are we taking the address of a nested function? */
9201 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
9202 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
9203 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
9204 && ! TREE_STATIC (exp))
9206 op0 = trampoline_address (TREE_OPERAND (exp, 0));
9207 op0 = force_operand (op0, target);
9209 /* If we are taking the address of something erroneous, just
9210 return a zero. */
9211 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
9212 return const0_rtx;
9213 /* If we are taking the address of a constant and are at the
9214 top level, we have to use output_constant_def since we can't
9215 call force_const_mem at top level. */
9216 else if (cfun == 0
9217 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
9218 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
9219 == 'c')))
9220 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
9221 else
9223 /* We make sure to pass const0_rtx down if we came in with
9224 ignore set, to avoid doing the cleanups twice for something. */
9225 op0 = expand_expr (TREE_OPERAND (exp, 0),
9226 ignore ? const0_rtx : NULL_RTX, VOIDmode,
9227 (modifier == EXPAND_INITIALIZER
9228 ? modifier : EXPAND_CONST_ADDRESS));
9230 /* If we are going to ignore the result, OP0 will have been set
9231 to const0_rtx, so just return it. Don't get confused and
9232 think we are taking the address of the constant. */
9233 if (ignore)
9234 return op0;
9236 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
9237 clever and returns a REG when given a MEM. */
9238 op0 = protect_from_queue (op0, 1);
9240 /* We would like the object in memory. If it is a constant, we can
9241 have it be statically allocated into memory. For a non-constant,
9242 we need to allocate some memory and store the value into it. */
9244 if (CONSTANT_P (op0))
9245 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
9246 op0);
9247 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
9248 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
9249 || GET_CODE (op0) == PARALLEL || GET_CODE (op0) == LO_SUM)
9251 /* If the operand is a SAVE_EXPR, we can deal with this by
9252 forcing the SAVE_EXPR into memory. */
9253 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
9255 put_var_into_stack (TREE_OPERAND (exp, 0),
9256 /*rescan=*/true);
9257 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
9259 else
9261 /* If this object is in a register, it can't be BLKmode. */
9262 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9263 rtx memloc = assign_temp (inner_type, 1, 1, 1);
9265 if (GET_CODE (op0) == PARALLEL)
9266 /* Handle calls that pass values in multiple
9267 non-contiguous locations. The Irix 6 ABI has examples
9268 of this. */
9269 emit_group_store (memloc, op0,
9270 int_size_in_bytes (inner_type));
9271 else
9272 emit_move_insn (memloc, op0);
9274 op0 = memloc;
9278 if (GET_CODE (op0) != MEM)
9279 abort ();
9281 mark_temp_addr_taken (op0);
9282 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
9284 op0 = XEXP (op0, 0);
9285 #ifdef POINTERS_EXTEND_UNSIGNED
9286 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
9287 && mode == ptr_mode)
9288 op0 = convert_memory_address (ptr_mode, op0);
9289 #endif
9290 return op0;
9293 /* If OP0 is not aligned as least as much as the type requires, we
9294 need to make a temporary, copy OP0 to it, and take the address of
9295 the temporary. We want to use the alignment of the type, not of
9296 the operand. Note that this is incorrect for FUNCTION_TYPE, but
9297 the test for BLKmode means that can't happen. The test for
9298 BLKmode is because we never make mis-aligned MEMs with
9299 non-BLKmode.
9301 We don't need to do this at all if the machine doesn't have
9302 strict alignment. */
9303 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
9304 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
9305 > MEM_ALIGN (op0))
9306 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
9308 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9309 rtx new;
9311 if (TYPE_ALIGN_OK (inner_type))
9312 abort ();
9314 if (TREE_ADDRESSABLE (inner_type))
9316 /* We can't make a bitwise copy of this object, so fail. */
9317 error ("cannot take the address of an unaligned member");
9318 return const0_rtx;
9321 new = assign_stack_temp_for_type
9322 (TYPE_MODE (inner_type),
9323 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
9324 : int_size_in_bytes (inner_type),
9325 1, build_qualified_type (inner_type,
9326 (TYPE_QUALS (inner_type)
9327 | TYPE_QUAL_CONST)));
9329 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
9330 (modifier == EXPAND_STACK_PARM
9331 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
9333 op0 = new;
9336 op0 = force_operand (XEXP (op0, 0), target);
9339 if (flag_force_addr
9340 && GET_CODE (op0) != REG
9341 && modifier != EXPAND_CONST_ADDRESS
9342 && modifier != EXPAND_INITIALIZER
9343 && modifier != EXPAND_SUM)
9344 op0 = force_reg (Pmode, op0);
9346 if (GET_CODE (op0) == REG
9347 && ! REG_USERVAR_P (op0))
9348 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
9350 #ifdef POINTERS_EXTEND_UNSIGNED
9351 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
9352 && mode == ptr_mode)
9353 op0 = convert_memory_address (ptr_mode, op0);
9354 #endif
9356 return op0;
9358 case ENTRY_VALUE_EXPR:
9359 abort ();
9361 /* COMPLEX type for Extended Pascal & Fortran */
9362 case COMPLEX_EXPR:
9364 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9365 rtx insns;
9367 /* Get the rtx code of the operands. */
9368 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9369 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
9371 if (! target)
9372 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
9374 start_sequence ();
9376 /* Move the real (op0) and imaginary (op1) parts to their location. */
9377 emit_move_insn (gen_realpart (mode, target), op0);
9378 emit_move_insn (gen_imagpart (mode, target), op1);
9380 insns = get_insns ();
9381 end_sequence ();
9383 /* Complex construction should appear as a single unit. */
9384 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
9385 each with a separate pseudo as destination.
9386 It's not correct for flow to treat them as a unit. */
9387 if (GET_CODE (target) != CONCAT)
9388 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
9389 else
9390 emit_insn (insns);
9392 return target;
9395 case REALPART_EXPR:
9396 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9397 return gen_realpart (mode, op0);
9399 case IMAGPART_EXPR:
9400 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9401 return gen_imagpart (mode, op0);
9403 case CONJ_EXPR:
9405 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9406 rtx imag_t;
9407 rtx insns;
9409 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9411 if (! target)
9412 target = gen_reg_rtx (mode);
9414 start_sequence ();
9416 /* Store the realpart and the negated imagpart to target. */
9417 emit_move_insn (gen_realpart (partmode, target),
9418 gen_realpart (partmode, op0));
9420 imag_t = gen_imagpart (partmode, target);
9421 temp = expand_unop (partmode,
9422 ! unsignedp && flag_trapv
9423 && (GET_MODE_CLASS(partmode) == MODE_INT)
9424 ? negv_optab : neg_optab,
9425 gen_imagpart (partmode, op0), imag_t, 0);
9426 if (temp != imag_t)
9427 emit_move_insn (imag_t, temp);
9429 insns = get_insns ();
9430 end_sequence ();
9432 /* Conjugate should appear as a single unit
9433 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
9434 each with a separate pseudo as destination.
9435 It's not correct for flow to treat them as a unit. */
9436 if (GET_CODE (target) != CONCAT)
9437 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
9438 else
9439 emit_insn (insns);
9441 return target;
9444 case TRY_CATCH_EXPR:
9446 tree handler = TREE_OPERAND (exp, 1);
9448 expand_eh_region_start ();
9450 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9452 expand_eh_region_end_cleanup (handler);
9454 return op0;
9457 case TRY_FINALLY_EXPR:
9459 tree try_block = TREE_OPERAND (exp, 0);
9460 tree finally_block = TREE_OPERAND (exp, 1);
9462 if (!optimize || unsafe_for_reeval (finally_block) > 1)
9464 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
9465 is not sufficient, so we cannot expand the block twice.
9466 So we play games with GOTO_SUBROUTINE_EXPR to let us
9467 expand the thing only once. */
9468 /* When not optimizing, we go ahead with this form since
9469 (1) user breakpoints operate more predictably without
9470 code duplication, and
9471 (2) we're not running any of the global optimizers
9472 that would explode in time/space with the highly
9473 connected CFG created by the indirect branching. */
9475 rtx finally_label = gen_label_rtx ();
9476 rtx done_label = gen_label_rtx ();
9477 rtx return_link = gen_reg_rtx (Pmode);
9478 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
9479 (tree) finally_label, (tree) return_link);
9480 TREE_SIDE_EFFECTS (cleanup) = 1;
9482 /* Start a new binding layer that will keep track of all cleanup
9483 actions to be performed. */
9484 expand_start_bindings (2);
9485 target_temp_slot_level = temp_slot_level;
9487 expand_decl_cleanup (NULL_TREE, cleanup);
9488 op0 = expand_expr (try_block, target, tmode, modifier);
9490 preserve_temp_slots (op0);
9491 expand_end_bindings (NULL_TREE, 0, 0);
9492 emit_jump (done_label);
9493 emit_label (finally_label);
9494 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
9495 emit_indirect_jump (return_link);
9496 emit_label (done_label);
9498 else
9500 expand_start_bindings (2);
9501 target_temp_slot_level = temp_slot_level;
9503 expand_decl_cleanup (NULL_TREE, finally_block);
9504 op0 = expand_expr (try_block, target, tmode, modifier);
9506 preserve_temp_slots (op0);
9507 expand_end_bindings (NULL_TREE, 0, 0);
9510 return op0;
9513 case GOTO_SUBROUTINE_EXPR:
9515 rtx subr = (rtx) TREE_OPERAND (exp, 0);
9516 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
9517 rtx return_address = gen_label_rtx ();
9518 emit_move_insn (return_link,
9519 gen_rtx_LABEL_REF (Pmode, return_address));
9520 emit_jump (subr);
9521 emit_label (return_address);
9522 return const0_rtx;
9525 case VA_ARG_EXPR:
9526 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
9528 case EXC_PTR_EXPR:
9529 return get_exception_pointer (cfun);
9531 case FDESC_EXPR:
9532 /* Function descriptors are not valid except for as
9533 initialization constants, and should not be expanded. */
9534 abort ();
9536 default:
9537 return (*lang_hooks.expand_expr) (exp, original_target, tmode, modifier);
9540 /* Here to do an ordinary binary operator, generating an instruction
9541 from the optab already placed in `this_optab'. */
9542 binop:
9543 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
9544 subtarget = 0;
9545 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
9546 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9547 binop2:
9548 if (modifier == EXPAND_STACK_PARM)
9549 target = 0;
9550 temp = expand_binop (mode, this_optab, op0, op1, target,
9551 unsignedp, OPTAB_LIB_WIDEN);
9552 if (temp == 0)
9553 abort ();
9554 return temp;
9557 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9558 when applied to the address of EXP produces an address known to be
9559 aligned more than BIGGEST_ALIGNMENT. */
9561 static int
9562 is_aligning_offset (tree offset, tree exp)
9564 /* Strip off any conversions and WITH_RECORD_EXPR nodes. */
9565 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9566 || TREE_CODE (offset) == NOP_EXPR
9567 || TREE_CODE (offset) == CONVERT_EXPR
9568 || TREE_CODE (offset) == WITH_RECORD_EXPR)
9569 offset = TREE_OPERAND (offset, 0);
9571 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9572 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9573 if (TREE_CODE (offset) != BIT_AND_EXPR
9574 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9575 || compare_tree_int (TREE_OPERAND (offset, 1), BIGGEST_ALIGNMENT) <= 0
9576 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9577 return 0;
9579 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9580 It must be NEGATE_EXPR. Then strip any more conversions. */
9581 offset = TREE_OPERAND (offset, 0);
9582 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9583 || TREE_CODE (offset) == NOP_EXPR
9584 || TREE_CODE (offset) == CONVERT_EXPR)
9585 offset = TREE_OPERAND (offset, 0);
9587 if (TREE_CODE (offset) != NEGATE_EXPR)
9588 return 0;
9590 offset = TREE_OPERAND (offset, 0);
9591 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9592 || TREE_CODE (offset) == NOP_EXPR
9593 || TREE_CODE (offset) == CONVERT_EXPR)
9594 offset = TREE_OPERAND (offset, 0);
9596 /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
9597 whose type is the same as EXP. */
9598 return (TREE_CODE (offset) == ADDR_EXPR
9599 && (TREE_OPERAND (offset, 0) == exp
9600 || (TREE_CODE (TREE_OPERAND (offset, 0)) == PLACEHOLDER_EXPR
9601 && (TREE_TYPE (TREE_OPERAND (offset, 0))
9602 == TREE_TYPE (exp)))));
9605 /* Return the tree node if an ARG corresponds to a string constant or zero
9606 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9607 in bytes within the string that ARG is accessing. The type of the
9608 offset will be `sizetype'. */
9610 tree
9611 string_constant (tree arg, tree *ptr_offset)
9613 STRIP_NOPS (arg);
9615 if (TREE_CODE (arg) == ADDR_EXPR
9616 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9618 *ptr_offset = size_zero_node;
9619 return TREE_OPERAND (arg, 0);
9621 else if (TREE_CODE (arg) == PLUS_EXPR)
9623 tree arg0 = TREE_OPERAND (arg, 0);
9624 tree arg1 = TREE_OPERAND (arg, 1);
9626 STRIP_NOPS (arg0);
9627 STRIP_NOPS (arg1);
9629 if (TREE_CODE (arg0) == ADDR_EXPR
9630 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9632 *ptr_offset = convert (sizetype, arg1);
9633 return TREE_OPERAND (arg0, 0);
9635 else if (TREE_CODE (arg1) == ADDR_EXPR
9636 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9638 *ptr_offset = convert (sizetype, arg0);
9639 return TREE_OPERAND (arg1, 0);
9643 return 0;
9646 /* Expand code for a post- or pre- increment or decrement
9647 and return the RTX for the result.
9648 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9650 static rtx
9651 expand_increment (tree exp, int post, int ignore)
9653 rtx op0, op1;
9654 rtx temp, value;
9655 tree incremented = TREE_OPERAND (exp, 0);
9656 optab this_optab = add_optab;
9657 int icode;
9658 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9659 int op0_is_copy = 0;
9660 int single_insn = 0;
9661 /* 1 means we can't store into OP0 directly,
9662 because it is a subreg narrower than a word,
9663 and we don't dare clobber the rest of the word. */
9664 int bad_subreg = 0;
9666 /* Stabilize any component ref that might need to be
9667 evaluated more than once below. */
9668 if (!post
9669 || TREE_CODE (incremented) == BIT_FIELD_REF
9670 || (TREE_CODE (incremented) == COMPONENT_REF
9671 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9672 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9673 incremented = stabilize_reference (incremented);
9674 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9675 ones into save exprs so that they don't accidentally get evaluated
9676 more than once by the code below. */
9677 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9678 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9679 incremented = save_expr (incremented);
9681 /* Compute the operands as RTX.
9682 Note whether OP0 is the actual lvalue or a copy of it:
9683 I believe it is a copy iff it is a register or subreg
9684 and insns were generated in computing it. */
9686 temp = get_last_insn ();
9687 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9689 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9690 in place but instead must do sign- or zero-extension during assignment,
9691 so we copy it into a new register and let the code below use it as
9692 a copy.
9694 Note that we can safely modify this SUBREG since it is know not to be
9695 shared (it was made by the expand_expr call above). */
9697 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9699 if (post)
9700 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9701 else
9702 bad_subreg = 1;
9704 else if (GET_CODE (op0) == SUBREG
9705 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9707 /* We cannot increment this SUBREG in place. If we are
9708 post-incrementing, get a copy of the old value. Otherwise,
9709 just mark that we cannot increment in place. */
9710 if (post)
9711 op0 = copy_to_reg (op0);
9712 else
9713 bad_subreg = 1;
9716 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9717 && temp != get_last_insn ());
9718 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9720 /* Decide whether incrementing or decrementing. */
9721 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9722 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9723 this_optab = sub_optab;
9725 /* Convert decrement by a constant into a negative increment. */
9726 if (this_optab == sub_optab
9727 && GET_CODE (op1) == CONST_INT)
9729 op1 = GEN_INT (-INTVAL (op1));
9730 this_optab = add_optab;
9733 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9734 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9736 /* For a preincrement, see if we can do this with a single instruction. */
9737 if (!post)
9739 icode = (int) this_optab->handlers[(int) mode].insn_code;
9740 if (icode != (int) CODE_FOR_nothing
9741 /* Make sure that OP0 is valid for operands 0 and 1
9742 of the insn we want to queue. */
9743 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9744 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9745 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9746 single_insn = 1;
9749 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9750 then we cannot just increment OP0. We must therefore contrive to
9751 increment the original value. Then, for postincrement, we can return
9752 OP0 since it is a copy of the old value. For preincrement, expand here
9753 unless we can do it with a single insn.
9755 Likewise if storing directly into OP0 would clobber high bits
9756 we need to preserve (bad_subreg). */
9757 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9759 /* This is the easiest way to increment the value wherever it is.
9760 Problems with multiple evaluation of INCREMENTED are prevented
9761 because either (1) it is a component_ref or preincrement,
9762 in which case it was stabilized above, or (2) it is an array_ref
9763 with constant index in an array in a register, which is
9764 safe to reevaluate. */
9765 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9766 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9767 ? MINUS_EXPR : PLUS_EXPR),
9768 TREE_TYPE (exp),
9769 incremented,
9770 TREE_OPERAND (exp, 1));
9772 while (TREE_CODE (incremented) == NOP_EXPR
9773 || TREE_CODE (incremented) == CONVERT_EXPR)
9775 newexp = convert (TREE_TYPE (incremented), newexp);
9776 incremented = TREE_OPERAND (incremented, 0);
9779 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9780 return post ? op0 : temp;
9783 if (post)
9785 /* We have a true reference to the value in OP0.
9786 If there is an insn to add or subtract in this mode, queue it.
9787 Queueing the increment insn avoids the register shuffling
9788 that often results if we must increment now and first save
9789 the old value for subsequent use. */
9791 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9792 op0 = stabilize (op0);
9793 #endif
9795 icode = (int) this_optab->handlers[(int) mode].insn_code;
9796 if (icode != (int) CODE_FOR_nothing
9797 /* Make sure that OP0 is valid for operands 0 and 1
9798 of the insn we want to queue. */
9799 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9800 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9802 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9803 op1 = force_reg (mode, op1);
9805 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9807 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9809 rtx addr = (general_operand (XEXP (op0, 0), mode)
9810 ? force_reg (Pmode, XEXP (op0, 0))
9811 : copy_to_reg (XEXP (op0, 0)));
9812 rtx temp, result;
9814 op0 = replace_equiv_address (op0, addr);
9815 temp = force_reg (GET_MODE (op0), op0);
9816 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9817 op1 = force_reg (mode, op1);
9819 /* The increment queue is LIFO, thus we have to `queue'
9820 the instructions in reverse order. */
9821 enqueue_insn (op0, gen_move_insn (op0, temp));
9822 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9823 return result;
9827 /* Preincrement, or we can't increment with one simple insn. */
9828 if (post)
9829 /* Save a copy of the value before inc or dec, to return it later. */
9830 temp = value = copy_to_reg (op0);
9831 else
9832 /* Arrange to return the incremented value. */
9833 /* Copy the rtx because expand_binop will protect from the queue,
9834 and the results of that would be invalid for us to return
9835 if our caller does emit_queue before using our result. */
9836 temp = copy_rtx (value = op0);
9838 /* Increment however we can. */
9839 op1 = expand_binop (mode, this_optab, value, op1, op0,
9840 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9842 /* Make sure the value is stored into OP0. */
9843 if (op1 != op0)
9844 emit_move_insn (op0, op1);
9846 return temp;
9849 /* Generate code to calculate EXP using a store-flag instruction
9850 and return an rtx for the result. EXP is either a comparison
9851 or a TRUTH_NOT_EXPR whose operand is a comparison.
9853 If TARGET is nonzero, store the result there if convenient.
9855 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9856 cheap.
9858 Return zero if there is no suitable set-flag instruction
9859 available on this machine.
9861 Once expand_expr has been called on the arguments of the comparison,
9862 we are committed to doing the store flag, since it is not safe to
9863 re-evaluate the expression. We emit the store-flag insn by calling
9864 emit_store_flag, but only expand the arguments if we have a reason
9865 to believe that emit_store_flag will be successful. If we think that
9866 it will, but it isn't, we have to simulate the store-flag with a
9867 set/jump/set sequence. */
9869 static rtx
9870 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
9872 enum rtx_code code;
9873 tree arg0, arg1, type;
9874 tree tem;
9875 enum machine_mode operand_mode;
9876 int invert = 0;
9877 int unsignedp;
9878 rtx op0, op1;
9879 enum insn_code icode;
9880 rtx subtarget = target;
9881 rtx result, label;
9883 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9884 result at the end. We can't simply invert the test since it would
9885 have already been inverted if it were valid. This case occurs for
9886 some floating-point comparisons. */
9888 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9889 invert = 1, exp = TREE_OPERAND (exp, 0);
9891 arg0 = TREE_OPERAND (exp, 0);
9892 arg1 = TREE_OPERAND (exp, 1);
9894 /* Don't crash if the comparison was erroneous. */
9895 if (arg0 == error_mark_node || arg1 == error_mark_node)
9896 return const0_rtx;
9898 type = TREE_TYPE (arg0);
9899 operand_mode = TYPE_MODE (type);
9900 unsignedp = TREE_UNSIGNED (type);
9902 /* We won't bother with BLKmode store-flag operations because it would mean
9903 passing a lot of information to emit_store_flag. */
9904 if (operand_mode == BLKmode)
9905 return 0;
9907 /* We won't bother with store-flag operations involving function pointers
9908 when function pointers must be canonicalized before comparisons. */
9909 #ifdef HAVE_canonicalize_funcptr_for_compare
9910 if (HAVE_canonicalize_funcptr_for_compare
9911 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9912 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9913 == FUNCTION_TYPE))
9914 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9915 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9916 == FUNCTION_TYPE))))
9917 return 0;
9918 #endif
9920 STRIP_NOPS (arg0);
9921 STRIP_NOPS (arg1);
9923 /* Get the rtx comparison code to use. We know that EXP is a comparison
9924 operation of some type. Some comparisons against 1 and -1 can be
9925 converted to comparisons with zero. Do so here so that the tests
9926 below will be aware that we have a comparison with zero. These
9927 tests will not catch constants in the first operand, but constants
9928 are rarely passed as the first operand. */
9930 switch (TREE_CODE (exp))
9932 case EQ_EXPR:
9933 code = EQ;
9934 break;
9935 case NE_EXPR:
9936 code = NE;
9937 break;
9938 case LT_EXPR:
9939 if (integer_onep (arg1))
9940 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9941 else
9942 code = unsignedp ? LTU : LT;
9943 break;
9944 case LE_EXPR:
9945 if (! unsignedp && integer_all_onesp (arg1))
9946 arg1 = integer_zero_node, code = LT;
9947 else
9948 code = unsignedp ? LEU : LE;
9949 break;
9950 case GT_EXPR:
9951 if (! unsignedp && integer_all_onesp (arg1))
9952 arg1 = integer_zero_node, code = GE;
9953 else
9954 code = unsignedp ? GTU : GT;
9955 break;
9956 case GE_EXPR:
9957 if (integer_onep (arg1))
9958 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9959 else
9960 code = unsignedp ? GEU : GE;
9961 break;
9963 case UNORDERED_EXPR:
9964 code = UNORDERED;
9965 break;
9966 case ORDERED_EXPR:
9967 code = ORDERED;
9968 break;
9969 case UNLT_EXPR:
9970 code = UNLT;
9971 break;
9972 case UNLE_EXPR:
9973 code = UNLE;
9974 break;
9975 case UNGT_EXPR:
9976 code = UNGT;
9977 break;
9978 case UNGE_EXPR:
9979 code = UNGE;
9980 break;
9981 case UNEQ_EXPR:
9982 code = UNEQ;
9983 break;
9985 default:
9986 abort ();
9989 /* Put a constant second. */
9990 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9992 tem = arg0; arg0 = arg1; arg1 = tem;
9993 code = swap_condition (code);
9996 /* If this is an equality or inequality test of a single bit, we can
9997 do this by shifting the bit being tested to the low-order bit and
9998 masking the result with the constant 1. If the condition was EQ,
9999 we xor it with 1. This does not require an scc insn and is faster
10000 than an scc insn even if we have it.
10002 The code to make this transformation was moved into fold_single_bit_test,
10003 so we just call into the folder and expand its result. */
10005 if ((code == NE || code == EQ)
10006 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10007 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10009 tree type = (*lang_hooks.types.type_for_mode) (mode, unsignedp);
10010 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
10011 arg0, arg1, type),
10012 target, VOIDmode, EXPAND_NORMAL);
10015 /* Now see if we are likely to be able to do this. Return if not. */
10016 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10017 return 0;
10019 icode = setcc_gen_code[(int) code];
10020 if (icode == CODE_FOR_nothing
10021 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10023 /* We can only do this if it is one of the special cases that
10024 can be handled without an scc insn. */
10025 if ((code == LT && integer_zerop (arg1))
10026 || (! only_cheap && code == GE && integer_zerop (arg1)))
10028 else if (BRANCH_COST >= 0
10029 && ! only_cheap && (code == NE || code == EQ)
10030 && TREE_CODE (type) != REAL_TYPE
10031 && ((abs_optab->handlers[(int) operand_mode].insn_code
10032 != CODE_FOR_nothing)
10033 || (ffs_optab->handlers[(int) operand_mode].insn_code
10034 != CODE_FOR_nothing)))
10036 else
10037 return 0;
10040 if (! get_subtarget (target)
10041 || GET_MODE (subtarget) != operand_mode
10042 || ! safe_from_p (subtarget, arg1, 1))
10043 subtarget = 0;
10045 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10046 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10048 if (target == 0)
10049 target = gen_reg_rtx (mode);
10051 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10052 because, if the emit_store_flag does anything it will succeed and
10053 OP0 and OP1 will not be used subsequently. */
10055 result = emit_store_flag (target, code,
10056 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10057 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10058 operand_mode, unsignedp, 1);
10060 if (result)
10062 if (invert)
10063 result = expand_binop (mode, xor_optab, result, const1_rtx,
10064 result, 0, OPTAB_LIB_WIDEN);
10065 return result;
10068 /* If this failed, we have to do this with set/compare/jump/set code. */
10069 if (GET_CODE (target) != REG
10070 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10071 target = gen_reg_rtx (GET_MODE (target));
10073 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10074 result = compare_from_rtx (op0, op1, code, unsignedp,
10075 operand_mode, NULL_RTX);
10076 if (GET_CODE (result) == CONST_INT)
10077 return (((result == const0_rtx && ! invert)
10078 || (result != const0_rtx && invert))
10079 ? const0_rtx : const1_rtx);
10081 /* The code of RESULT may not match CODE if compare_from_rtx
10082 decided to swap its operands and reverse the original code.
10084 We know that compare_from_rtx returns either a CONST_INT or
10085 a new comparison code, so it is safe to just extract the
10086 code from RESULT. */
10087 code = GET_CODE (result);
10089 label = gen_label_rtx ();
10090 if (bcc_gen_fctn[(int) code] == 0)
10091 abort ();
10093 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10094 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10095 emit_label (label);
10097 return target;
10101 /* Stubs in case we haven't got a casesi insn. */
10102 #ifndef HAVE_casesi
10103 # define HAVE_casesi 0
10104 # define gen_casesi(a, b, c, d, e) (0)
10105 # define CODE_FOR_casesi CODE_FOR_nothing
10106 #endif
10108 /* If the machine does not have a case insn that compares the bounds,
10109 this means extra overhead for dispatch tables, which raises the
10110 threshold for using them. */
10111 #ifndef CASE_VALUES_THRESHOLD
10112 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10113 #endif /* CASE_VALUES_THRESHOLD */
10115 unsigned int
10116 case_values_threshold (void)
10118 return CASE_VALUES_THRESHOLD;
10121 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10122 0 otherwise (i.e. if there is no casesi instruction). */
10124 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
10125 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
10127 enum machine_mode index_mode = SImode;
10128 int index_bits = GET_MODE_BITSIZE (index_mode);
10129 rtx op1, op2, index;
10130 enum machine_mode op_mode;
10132 if (! HAVE_casesi)
10133 return 0;
10135 /* Convert the index to SImode. */
10136 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10138 enum machine_mode omode = TYPE_MODE (index_type);
10139 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
10141 /* We must handle the endpoints in the original mode. */
10142 index_expr = build (MINUS_EXPR, index_type,
10143 index_expr, minval);
10144 minval = integer_zero_node;
10145 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10146 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10147 omode, 1, default_label);
10148 /* Now we can safely truncate. */
10149 index = convert_to_mode (index_mode, index, 0);
10151 else
10153 if (TYPE_MODE (index_type) != index_mode)
10155 index_expr = convert ((*lang_hooks.types.type_for_size)
10156 (index_bits, 0), index_expr);
10157 index_type = TREE_TYPE (index_expr);
10160 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10162 emit_queue ();
10163 index = protect_from_queue (index, 0);
10164 do_pending_stack_adjust ();
10166 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10167 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10168 (index, op_mode))
10169 index = copy_to_mode_reg (op_mode, index);
10171 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10173 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10174 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10175 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
10176 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10177 (op1, op_mode))
10178 op1 = copy_to_mode_reg (op_mode, op1);
10180 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10182 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10183 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10184 op2, TREE_UNSIGNED (TREE_TYPE (range)));
10185 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10186 (op2, op_mode))
10187 op2 = copy_to_mode_reg (op_mode, op2);
10189 emit_jump_insn (gen_casesi (index, op1, op2,
10190 table_label, default_label));
10191 return 1;
10194 /* Attempt to generate a tablejump instruction; same concept. */
10195 #ifndef HAVE_tablejump
10196 #define HAVE_tablejump 0
10197 #define gen_tablejump(x, y) (0)
10198 #endif
10200 /* Subroutine of the next function.
10202 INDEX is the value being switched on, with the lowest value
10203 in the table already subtracted.
10204 MODE is its expected mode (needed if INDEX is constant).
10205 RANGE is the length of the jump table.
10206 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10208 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10209 index value is out of range. */
10211 static void
10212 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
10213 rtx default_label)
10215 rtx temp, vector;
10217 if (INTVAL (range) > cfun->max_jumptable_ents)
10218 cfun->max_jumptable_ents = INTVAL (range);
10220 /* Do an unsigned comparison (in the proper mode) between the index
10221 expression and the value which represents the length of the range.
10222 Since we just finished subtracting the lower bound of the range
10223 from the index expression, this comparison allows us to simultaneously
10224 check that the original index expression value is both greater than
10225 or equal to the minimum value of the range and less than or equal to
10226 the maximum value of the range. */
10228 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10229 default_label);
10231 /* If index is in range, it must fit in Pmode.
10232 Convert to Pmode so we can index with it. */
10233 if (mode != Pmode)
10234 index = convert_to_mode (Pmode, index, 1);
10236 /* Don't let a MEM slip thru, because then INDEX that comes
10237 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10238 and break_out_memory_refs will go to work on it and mess it up. */
10239 #ifdef PIC_CASE_VECTOR_ADDRESS
10240 if (flag_pic && GET_CODE (index) != REG)
10241 index = copy_to_mode_reg (Pmode, index);
10242 #endif
10244 /* If flag_force_addr were to affect this address
10245 it could interfere with the tricky assumptions made
10246 about addresses that contain label-refs,
10247 which may be valid only very near the tablejump itself. */
10248 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10249 GET_MODE_SIZE, because this indicates how large insns are. The other
10250 uses should all be Pmode, because they are addresses. This code
10251 could fail if addresses and insns are not the same size. */
10252 index = gen_rtx_PLUS (Pmode,
10253 gen_rtx_MULT (Pmode, index,
10254 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10255 gen_rtx_LABEL_REF (Pmode, table_label));
10256 #ifdef PIC_CASE_VECTOR_ADDRESS
10257 if (flag_pic)
10258 index = PIC_CASE_VECTOR_ADDRESS (index);
10259 else
10260 #endif
10261 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10262 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10263 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10264 RTX_UNCHANGING_P (vector) = 1;
10265 MEM_NOTRAP_P (vector) = 1;
10266 convert_move (temp, vector, 0);
10268 emit_jump_insn (gen_tablejump (temp, table_label));
10270 /* If we are generating PIC code or if the table is PC-relative, the
10271 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10272 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10273 emit_barrier ();
10277 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
10278 rtx table_label, rtx default_label)
10280 rtx index;
10282 if (! HAVE_tablejump)
10283 return 0;
10285 index_expr = fold (build (MINUS_EXPR, index_type,
10286 convert (index_type, index_expr),
10287 convert (index_type, minval)));
10288 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10289 emit_queue ();
10290 index = protect_from_queue (index, 0);
10291 do_pending_stack_adjust ();
10293 do_tablejump (index, TYPE_MODE (index_type),
10294 convert_modes (TYPE_MODE (index_type),
10295 TYPE_MODE (TREE_TYPE (range)),
10296 expand_expr (range, NULL_RTX,
10297 VOIDmode, 0),
10298 TREE_UNSIGNED (TREE_TYPE (range))),
10299 table_label, default_label);
10300 return 1;
10303 /* Nonzero if the mode is a valid vector mode for this architecture.
10304 This returns nonzero even if there is no hardware support for the
10305 vector mode, but we can emulate with narrower modes. */
10308 vector_mode_valid_p (enum machine_mode mode)
10310 enum mode_class class = GET_MODE_CLASS (mode);
10311 enum machine_mode innermode;
10313 /* Doh! What's going on? */
10314 if (class != MODE_VECTOR_INT
10315 && class != MODE_VECTOR_FLOAT)
10316 return 0;
10318 /* Hardware support. Woo hoo! */
10319 if (VECTOR_MODE_SUPPORTED_P (mode))
10320 return 1;
10322 innermode = GET_MODE_INNER (mode);
10324 /* We should probably return 1 if requesting V4DI and we have no DI,
10325 but we have V2DI, but this is probably very unlikely. */
10327 /* If we have support for the inner mode, we can safely emulate it.
10328 We may not have V2DI, but me can emulate with a pair of DIs. */
10329 return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
10332 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
10333 static rtx
10334 const_vector_from_tree (tree exp)
10336 rtvec v;
10337 int units, i;
10338 tree link, elt;
10339 enum machine_mode inner, mode;
10341 mode = TYPE_MODE (TREE_TYPE (exp));
10343 if (is_zeros_p (exp))
10344 return CONST0_RTX (mode);
10346 units = GET_MODE_NUNITS (mode);
10347 inner = GET_MODE_INNER (mode);
10349 v = rtvec_alloc (units);
10351 link = TREE_VECTOR_CST_ELTS (exp);
10352 for (i = 0; link; link = TREE_CHAIN (link), ++i)
10354 elt = TREE_VALUE (link);
10356 if (TREE_CODE (elt) == REAL_CST)
10357 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
10358 inner);
10359 else
10360 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
10361 TREE_INT_CST_HIGH (elt),
10362 inner);
10365 /* Initialize remaining elements to 0. */
10366 for (; i < units; ++i)
10367 RTVEC_ELT (v, i) = CONST0_RTX (inner);
10369 return gen_rtx_raw_CONST_VECTOR (mode, v);
10372 #include "gt-expr.h"