* mklibgcc.in: Remove obsolete MAYBE_USE_COLLECT2.
[official-gcc.git] / gcc / expr.c
blobb909c7c7b8fcfd3e332daf5f8ad0ef008de2d04c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "flags.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "except.h"
34 #include "function.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38 #include "expr.h"
39 #include "optabs.h"
40 #include "libfuncs.h"
41 #include "recog.h"
42 #include "reload.h"
43 #include "output.h"
44 #include "typeclass.h"
45 #include "toplev.h"
46 #include "ggc.h"
47 #include "langhooks.h"
48 #include "intl.h"
49 #include "tm_p.h"
50 #include "target.h"
52 /* Decide whether a function's arguments should be processed
53 from first to last or from last to first.
55 They should if the stack and args grow in opposite directions, but
56 only if we have push insns. */
58 #ifdef PUSH_ROUNDING
60 #ifndef PUSH_ARGS_REVERSED
61 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
62 #define PUSH_ARGS_REVERSED /* If it's last to first. */
63 #endif
64 #endif
66 #endif
68 #ifndef STACK_PUSH_CODE
69 #ifdef STACK_GROWS_DOWNWARD
70 #define STACK_PUSH_CODE PRE_DEC
71 #else
72 #define STACK_PUSH_CODE PRE_INC
73 #endif
74 #endif
76 /* Convert defined/undefined to boolean. */
77 #ifdef TARGET_MEM_FUNCTIONS
78 #undef TARGET_MEM_FUNCTIONS
79 #define TARGET_MEM_FUNCTIONS 1
80 #else
81 #define TARGET_MEM_FUNCTIONS 0
82 #endif
85 /* If this is nonzero, we do not bother generating VOLATILE
86 around volatile memory references, and we are willing to
87 output indirect addresses. If cse is to follow, we reject
88 indirect addresses so a useful potential cse is generated;
89 if it is used only once, instruction combination will produce
90 the same indirect address eventually. */
91 int cse_not_expected;
93 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
94 tree placeholder_list = 0;
96 /* This structure is used by move_by_pieces to describe the move to
97 be performed. */
98 struct move_by_pieces
100 rtx to;
101 rtx to_addr;
102 int autinc_to;
103 int explicit_inc_to;
104 rtx from;
105 rtx from_addr;
106 int autinc_from;
107 int explicit_inc_from;
108 unsigned HOST_WIDE_INT len;
109 HOST_WIDE_INT offset;
110 int reverse;
113 /* This structure is used by store_by_pieces to describe the clear to
114 be performed. */
116 struct store_by_pieces
118 rtx to;
119 rtx to_addr;
120 int autinc_to;
121 int explicit_inc_to;
122 unsigned HOST_WIDE_INT len;
123 HOST_WIDE_INT offset;
124 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
125 void *constfundata;
126 int reverse;
129 static rtx enqueue_insn (rtx, rtx);
130 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
131 unsigned int);
132 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
133 struct move_by_pieces *);
134 static bool block_move_libcall_safe_for_call_parm (void);
135 static bool emit_block_move_via_movstr (rtx, rtx, rtx, unsigned);
136 static rtx emit_block_move_via_libcall (rtx, rtx, rtx);
137 static tree emit_block_move_libcall_fn (int);
138 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
139 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
140 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
141 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
142 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
143 struct store_by_pieces *);
144 static bool clear_storage_via_clrstr (rtx, rtx, unsigned);
145 static rtx clear_storage_via_libcall (rtx, rtx);
146 static tree clear_storage_libcall_fn (int);
147 static rtx compress_float_constant (rtx, rtx);
148 static rtx get_subtarget (rtx);
149 static int is_zeros_p (tree);
150 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
151 HOST_WIDE_INT, enum machine_mode,
152 tree, tree, int, int);
153 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
154 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
155 tree, enum machine_mode, int, tree, int);
156 static rtx var_rtx (tree);
158 static unsigned HOST_WIDE_INT highest_pow2_factor (tree);
159 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree);
161 static int is_aligning_offset (tree, tree);
162 static rtx expand_increment (tree, int, int);
163 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
164 enum expand_modifier);
165 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
166 #ifdef PUSH_ROUNDING
167 static void emit_single_push_insn (enum machine_mode, rtx, tree);
168 #endif
169 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
170 static rtx const_vector_from_tree (tree);
172 /* Record for each mode whether we can move a register directly to or
173 from an object of that mode in memory. If we can't, we won't try
174 to use that mode directly when accessing a field of that mode. */
176 static char direct_load[NUM_MACHINE_MODES];
177 static char direct_store[NUM_MACHINE_MODES];
179 /* Record for each mode whether we can float-extend from memory. */
181 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
183 /* This macro is used to determine whether move_by_pieces should be called
184 to perform a structure copy. */
185 #ifndef MOVE_BY_PIECES_P
186 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
187 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
188 #endif
190 /* This macro is used to determine whether clear_by_pieces should be
191 called to clear storage. */
192 #ifndef CLEAR_BY_PIECES_P
193 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
194 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
195 #endif
197 /* This macro is used to determine whether store_by_pieces should be
198 called to "memset" storage with byte values other than zero, or
199 to "memcpy" storage when the source is a constant string. */
200 #ifndef STORE_BY_PIECES_P
201 #define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
202 #endif
204 /* This array records the insn_code of insns to perform block moves. */
205 enum insn_code movstr_optab[NUM_MACHINE_MODES];
207 /* This array records the insn_code of insns to perform block clears. */
208 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
210 /* These arrays record the insn_code of two different kinds of insns
211 to perform block compares. */
212 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
213 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
215 /* Stack of EXPR_WITH_FILE_LOCATION nested expressions. */
216 struct file_stack *expr_wfl_stack;
218 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
220 #ifndef SLOW_UNALIGNED_ACCESS
221 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
222 #endif
224 /* This is run once per compilation to set up which modes can be used
225 directly in memory and to initialize the block move optab. */
227 void
228 init_expr_once (void)
230 rtx insn, pat;
231 enum machine_mode mode;
232 int num_clobbers;
233 rtx mem, mem1;
234 rtx reg;
236 /* Try indexing by frame ptr and try by stack ptr.
237 It is known that on the Convex the stack ptr isn't a valid index.
238 With luck, one or the other is valid on any machine. */
239 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
240 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
242 /* A scratch register we can modify in-place below to avoid
243 useless RTL allocations. */
244 reg = gen_rtx_REG (VOIDmode, -1);
246 insn = rtx_alloc (INSN);
247 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
248 PATTERN (insn) = pat;
250 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
251 mode = (enum machine_mode) ((int) mode + 1))
253 int regno;
255 direct_load[(int) mode] = direct_store[(int) mode] = 0;
256 PUT_MODE (mem, mode);
257 PUT_MODE (mem1, mode);
258 PUT_MODE (reg, mode);
260 /* See if there is some register that can be used in this mode and
261 directly loaded or stored from memory. */
263 if (mode != VOIDmode && mode != BLKmode)
264 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
265 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
266 regno++)
268 if (! HARD_REGNO_MODE_OK (regno, mode))
269 continue;
271 REGNO (reg) = regno;
273 SET_SRC (pat) = mem;
274 SET_DEST (pat) = reg;
275 if (recog (pat, insn, &num_clobbers) >= 0)
276 direct_load[(int) mode] = 1;
278 SET_SRC (pat) = mem1;
279 SET_DEST (pat) = reg;
280 if (recog (pat, insn, &num_clobbers) >= 0)
281 direct_load[(int) mode] = 1;
283 SET_SRC (pat) = reg;
284 SET_DEST (pat) = mem;
285 if (recog (pat, insn, &num_clobbers) >= 0)
286 direct_store[(int) mode] = 1;
288 SET_SRC (pat) = reg;
289 SET_DEST (pat) = mem1;
290 if (recog (pat, insn, &num_clobbers) >= 0)
291 direct_store[(int) mode] = 1;
295 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
297 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
298 mode = GET_MODE_WIDER_MODE (mode))
300 enum machine_mode srcmode;
301 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
302 srcmode = GET_MODE_WIDER_MODE (srcmode))
304 enum insn_code ic;
306 ic = can_extend_p (mode, srcmode, 0);
307 if (ic == CODE_FOR_nothing)
308 continue;
310 PUT_MODE (mem, srcmode);
312 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
313 float_extend_from_mem[mode][srcmode] = true;
318 /* This is run at the start of compiling a function. */
320 void
321 init_expr (void)
323 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
326 /* Small sanity check that the queue is empty at the end of a function. */
328 void
329 finish_expr_for_function (void)
331 if (pending_chain)
332 abort ();
335 /* Manage the queue of increment instructions to be output
336 for POSTINCREMENT_EXPR expressions, etc. */
338 /* Queue up to increment (or change) VAR later. BODY says how:
339 BODY should be the same thing you would pass to emit_insn
340 to increment right away. It will go to emit_insn later on.
342 The value is a QUEUED expression to be used in place of VAR
343 where you want to guarantee the pre-incrementation value of VAR. */
345 static rtx
346 enqueue_insn (rtx var, rtx body)
348 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
349 body, pending_chain);
350 return pending_chain;
353 /* Use protect_from_queue to convert a QUEUED expression
354 into something that you can put immediately into an instruction.
355 If the queued incrementation has not happened yet,
356 protect_from_queue returns the variable itself.
357 If the incrementation has happened, protect_from_queue returns a temp
358 that contains a copy of the old value of the variable.
360 Any time an rtx which might possibly be a QUEUED is to be put
361 into an instruction, it must be passed through protect_from_queue first.
362 QUEUED expressions are not meaningful in instructions.
364 Do not pass a value through protect_from_queue and then hold
365 on to it for a while before putting it in an instruction!
366 If the queue is flushed in between, incorrect code will result. */
369 protect_from_queue (rtx x, int modify)
371 RTX_CODE code = GET_CODE (x);
373 #if 0 /* A QUEUED can hang around after the queue is forced out. */
374 /* Shortcut for most common case. */
375 if (pending_chain == 0)
376 return x;
377 #endif
379 if (code != QUEUED)
381 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
382 use of autoincrement. Make a copy of the contents of the memory
383 location rather than a copy of the address, but not if the value is
384 of mode BLKmode. Don't modify X in place since it might be
385 shared. */
386 if (code == MEM && GET_MODE (x) != BLKmode
387 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
389 rtx y = XEXP (x, 0);
390 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
392 if (QUEUED_INSN (y))
394 rtx temp = gen_reg_rtx (GET_MODE (x));
396 emit_insn_before (gen_move_insn (temp, new),
397 QUEUED_INSN (y));
398 return temp;
401 /* Copy the address into a pseudo, so that the returned value
402 remains correct across calls to emit_queue. */
403 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
406 /* Otherwise, recursively protect the subexpressions of all
407 the kinds of rtx's that can contain a QUEUED. */
408 if (code == MEM)
410 rtx tem = protect_from_queue (XEXP (x, 0), 0);
411 if (tem != XEXP (x, 0))
413 x = copy_rtx (x);
414 XEXP (x, 0) = tem;
417 else if (code == PLUS || code == MULT)
419 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
420 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
421 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
423 x = copy_rtx (x);
424 XEXP (x, 0) = new0;
425 XEXP (x, 1) = new1;
428 return x;
430 /* If the increment has not happened, use the variable itself. Copy it
431 into a new pseudo so that the value remains correct across calls to
432 emit_queue. */
433 if (QUEUED_INSN (x) == 0)
434 return copy_to_reg (QUEUED_VAR (x));
435 /* If the increment has happened and a pre-increment copy exists,
436 use that copy. */
437 if (QUEUED_COPY (x) != 0)
438 return QUEUED_COPY (x);
439 /* The increment has happened but we haven't set up a pre-increment copy.
440 Set one up now, and use it. */
441 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
442 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
443 QUEUED_INSN (x));
444 return QUEUED_COPY (x);
447 /* Return nonzero if X contains a QUEUED expression:
448 if it contains anything that will be altered by a queued increment.
449 We handle only combinations of MEM, PLUS, MINUS and MULT operators
450 since memory addresses generally contain only those. */
453 queued_subexp_p (rtx x)
455 enum rtx_code code = GET_CODE (x);
456 switch (code)
458 case QUEUED:
459 return 1;
460 case MEM:
461 return queued_subexp_p (XEXP (x, 0));
462 case MULT:
463 case PLUS:
464 case MINUS:
465 return (queued_subexp_p (XEXP (x, 0))
466 || queued_subexp_p (XEXP (x, 1)));
467 default:
468 return 0;
472 /* Perform all the pending incrementations. */
474 void
475 emit_queue (void)
477 rtx p;
478 while ((p = pending_chain))
480 rtx body = QUEUED_BODY (p);
482 switch (GET_CODE (body))
484 case INSN:
485 case JUMP_INSN:
486 case CALL_INSN:
487 case CODE_LABEL:
488 case BARRIER:
489 case NOTE:
490 QUEUED_INSN (p) = body;
491 emit_insn (body);
492 break;
494 #ifdef ENABLE_CHECKING
495 case SEQUENCE:
496 abort ();
497 break;
498 #endif
500 default:
501 QUEUED_INSN (p) = emit_insn (body);
502 break;
505 pending_chain = QUEUED_NEXT (p);
509 /* Copy data from FROM to TO, where the machine modes are not the same.
510 Both modes may be integer, or both may be floating.
511 UNSIGNEDP should be nonzero if FROM is an unsigned type.
512 This causes zero-extension instead of sign-extension. */
514 void
515 convert_move (rtx to, rtx from, int unsignedp)
517 enum machine_mode to_mode = GET_MODE (to);
518 enum machine_mode from_mode = GET_MODE (from);
519 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
520 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
521 enum insn_code code;
522 rtx libcall;
524 /* rtx code for making an equivalent value. */
525 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
526 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
528 to = protect_from_queue (to, 1);
529 from = protect_from_queue (from, 0);
531 if (to_real != from_real)
532 abort ();
534 /* If FROM is a SUBREG that indicates that we have already done at least
535 the required extension, strip it. We don't handle such SUBREGs as
536 TO here. */
538 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
539 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
540 >= GET_MODE_SIZE (to_mode))
541 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
542 from = gen_lowpart (to_mode, from), from_mode = to_mode;
544 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
545 abort ();
547 if (to_mode == from_mode
548 || (from_mode == VOIDmode && CONSTANT_P (from)))
550 emit_move_insn (to, from);
551 return;
554 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
556 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
557 abort ();
559 if (VECTOR_MODE_P (to_mode))
560 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
561 else
562 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
564 emit_move_insn (to, from);
565 return;
568 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
570 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
571 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
572 return;
575 if (to_real)
577 rtx value, insns;
578 convert_optab tab;
580 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
581 tab = sext_optab;
582 else if (GET_MODE_PRECISION (from_mode) > GET_MODE_PRECISION (to_mode))
583 tab = trunc_optab;
584 else
585 abort ();
587 /* Try converting directly if the insn is supported. */
589 code = tab->handlers[to_mode][from_mode].insn_code;
590 if (code != CODE_FOR_nothing)
592 emit_unop_insn (code, to, from,
593 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
594 return;
597 /* Otherwise use a libcall. */
598 libcall = tab->handlers[to_mode][from_mode].libfunc;
600 if (!libcall)
601 /* This conversion is not implemented yet. */
602 abort ();
604 start_sequence ();
605 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
606 1, from, from_mode);
607 insns = get_insns ();
608 end_sequence ();
609 emit_libcall_block (insns, to, value,
610 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
611 from)
612 : gen_rtx_FLOAT_EXTEND (to_mode, from));
613 return;
616 /* Handle pointer conversion. */ /* SPEE 900220. */
617 /* Targets are expected to provide conversion insns between PxImode and
618 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
619 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
621 enum machine_mode full_mode
622 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
624 if (trunc_optab->handlers[to_mode][full_mode].insn_code
625 == CODE_FOR_nothing)
626 abort ();
628 if (full_mode != from_mode)
629 from = convert_to_mode (full_mode, from, unsignedp);
630 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
631 to, from, UNKNOWN);
632 return;
634 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
636 enum machine_mode full_mode
637 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
639 if (sext_optab->handlers[full_mode][from_mode].insn_code
640 == CODE_FOR_nothing)
641 abort ();
643 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
644 to, from, UNKNOWN);
645 if (to_mode == full_mode)
646 return;
648 /* else proceed to integer conversions below. */
649 from_mode = full_mode;
652 /* Now both modes are integers. */
654 /* Handle expanding beyond a word. */
655 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
656 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
658 rtx insns;
659 rtx lowpart;
660 rtx fill_value;
661 rtx lowfrom;
662 int i;
663 enum machine_mode lowpart_mode;
664 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
666 /* Try converting directly if the insn is supported. */
667 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
668 != CODE_FOR_nothing)
670 /* If FROM is a SUBREG, put it into a register. Do this
671 so that we always generate the same set of insns for
672 better cse'ing; if an intermediate assignment occurred,
673 we won't be doing the operation directly on the SUBREG. */
674 if (optimize > 0 && GET_CODE (from) == SUBREG)
675 from = force_reg (from_mode, from);
676 emit_unop_insn (code, to, from, equiv_code);
677 return;
679 /* Next, try converting via full word. */
680 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
681 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
682 != CODE_FOR_nothing))
684 if (GET_CODE (to) == REG)
686 if (reg_overlap_mentioned_p (to, from))
687 from = force_reg (from_mode, from);
688 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
690 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
691 emit_unop_insn (code, to,
692 gen_lowpart (word_mode, to), equiv_code);
693 return;
696 /* No special multiword conversion insn; do it by hand. */
697 start_sequence ();
699 /* Since we will turn this into a no conflict block, we must ensure
700 that the source does not overlap the target. */
702 if (reg_overlap_mentioned_p (to, from))
703 from = force_reg (from_mode, from);
705 /* Get a copy of FROM widened to a word, if necessary. */
706 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
707 lowpart_mode = word_mode;
708 else
709 lowpart_mode = from_mode;
711 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
713 lowpart = gen_lowpart (lowpart_mode, to);
714 emit_move_insn (lowpart, lowfrom);
716 /* Compute the value to put in each remaining word. */
717 if (unsignedp)
718 fill_value = const0_rtx;
719 else
721 #ifdef HAVE_slt
722 if (HAVE_slt
723 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
724 && STORE_FLAG_VALUE == -1)
726 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
727 lowpart_mode, 0);
728 fill_value = gen_reg_rtx (word_mode);
729 emit_insn (gen_slt (fill_value));
731 else
732 #endif
734 fill_value
735 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
736 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
737 NULL_RTX, 0);
738 fill_value = convert_to_mode (word_mode, fill_value, 1);
742 /* Fill the remaining words. */
743 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
745 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
746 rtx subword = operand_subword (to, index, 1, to_mode);
748 if (subword == 0)
749 abort ();
751 if (fill_value != subword)
752 emit_move_insn (subword, fill_value);
755 insns = get_insns ();
756 end_sequence ();
758 emit_no_conflict_block (insns, to, from, NULL_RTX,
759 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
760 return;
763 /* Truncating multi-word to a word or less. */
764 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
765 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
767 if (!((GET_CODE (from) == MEM
768 && ! MEM_VOLATILE_P (from)
769 && direct_load[(int) to_mode]
770 && ! mode_dependent_address_p (XEXP (from, 0)))
771 || GET_CODE (from) == REG
772 || GET_CODE (from) == SUBREG))
773 from = force_reg (from_mode, from);
774 convert_move (to, gen_lowpart (word_mode, from), 0);
775 return;
778 /* Now follow all the conversions between integers
779 no more than a word long. */
781 /* For truncation, usually we can just refer to FROM in a narrower mode. */
782 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
783 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
784 GET_MODE_BITSIZE (from_mode)))
786 if (!((GET_CODE (from) == MEM
787 && ! MEM_VOLATILE_P (from)
788 && direct_load[(int) to_mode]
789 && ! mode_dependent_address_p (XEXP (from, 0)))
790 || GET_CODE (from) == REG
791 || GET_CODE (from) == SUBREG))
792 from = force_reg (from_mode, from);
793 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
794 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
795 from = copy_to_reg (from);
796 emit_move_insn (to, gen_lowpart (to_mode, from));
797 return;
800 /* Handle extension. */
801 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
803 /* Convert directly if that works. */
804 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
805 != CODE_FOR_nothing)
807 if (flag_force_mem)
808 from = force_not_mem (from);
810 emit_unop_insn (code, to, from, equiv_code);
811 return;
813 else
815 enum machine_mode intermediate;
816 rtx tmp;
817 tree shift_amount;
819 /* Search for a mode to convert via. */
820 for (intermediate = from_mode; intermediate != VOIDmode;
821 intermediate = GET_MODE_WIDER_MODE (intermediate))
822 if (((can_extend_p (to_mode, intermediate, unsignedp)
823 != CODE_FOR_nothing)
824 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
825 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
826 GET_MODE_BITSIZE (intermediate))))
827 && (can_extend_p (intermediate, from_mode, unsignedp)
828 != CODE_FOR_nothing))
830 convert_move (to, convert_to_mode (intermediate, from,
831 unsignedp), unsignedp);
832 return;
835 /* No suitable intermediate mode.
836 Generate what we need with shifts. */
837 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
838 - GET_MODE_BITSIZE (from_mode), 0);
839 from = gen_lowpart (to_mode, force_reg (from_mode, from));
840 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
841 to, unsignedp);
842 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
843 to, unsignedp);
844 if (tmp != to)
845 emit_move_insn (to, tmp);
846 return;
850 /* Support special truncate insns for certain modes. */
851 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
853 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
854 to, from, UNKNOWN);
855 return;
858 /* Handle truncation of volatile memrefs, and so on;
859 the things that couldn't be truncated directly,
860 and for which there was no special instruction.
862 ??? Code above formerly short-circuited this, for most integer
863 mode pairs, with a force_reg in from_mode followed by a recursive
864 call to this routine. Appears always to have been wrong. */
865 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
867 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
868 emit_move_insn (to, temp);
869 return;
872 /* Mode combination is not recognized. */
873 abort ();
876 /* Return an rtx for a value that would result
877 from converting X to mode MODE.
878 Both X and MODE may be floating, or both integer.
879 UNSIGNEDP is nonzero if X is an unsigned value.
880 This can be done by referring to a part of X in place
881 or by copying to a new temporary with conversion.
883 This function *must not* call protect_from_queue
884 except when putting X into an insn (in which case convert_move does it). */
887 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
889 return convert_modes (mode, VOIDmode, x, unsignedp);
892 /* Return an rtx for a value that would result
893 from converting X from mode OLDMODE to mode MODE.
894 Both modes may be floating, or both integer.
895 UNSIGNEDP is nonzero if X is an unsigned value.
897 This can be done by referring to a part of X in place
898 or by copying to a new temporary with conversion.
900 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
902 This function *must not* call protect_from_queue
903 except when putting X into an insn (in which case convert_move does it). */
906 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
908 rtx temp;
910 /* If FROM is a SUBREG that indicates that we have already done at least
911 the required extension, strip it. */
913 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
914 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
915 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
916 x = gen_lowpart (mode, x);
918 if (GET_MODE (x) != VOIDmode)
919 oldmode = GET_MODE (x);
921 if (mode == oldmode)
922 return x;
924 /* There is one case that we must handle specially: If we are converting
925 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
926 we are to interpret the constant as unsigned, gen_lowpart will do
927 the wrong if the constant appears negative. What we want to do is
928 make the high-order word of the constant zero, not all ones. */
930 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
931 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
932 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
934 HOST_WIDE_INT val = INTVAL (x);
936 if (oldmode != VOIDmode
937 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
939 int width = GET_MODE_BITSIZE (oldmode);
941 /* We need to zero extend VAL. */
942 val &= ((HOST_WIDE_INT) 1 << width) - 1;
945 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
948 /* We can do this with a gen_lowpart if both desired and current modes
949 are integer, and this is either a constant integer, a register, or a
950 non-volatile MEM. Except for the constant case where MODE is no
951 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
953 if ((GET_CODE (x) == CONST_INT
954 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
955 || (GET_MODE_CLASS (mode) == MODE_INT
956 && GET_MODE_CLASS (oldmode) == MODE_INT
957 && (GET_CODE (x) == CONST_DOUBLE
958 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
959 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
960 && direct_load[(int) mode])
961 || (GET_CODE (x) == REG
962 && (! HARD_REGISTER_P (x)
963 || HARD_REGNO_MODE_OK (REGNO (x), mode))
964 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
965 GET_MODE_BITSIZE (GET_MODE (x)))))))))
967 /* ?? If we don't know OLDMODE, we have to assume here that
968 X does not need sign- or zero-extension. This may not be
969 the case, but it's the best we can do. */
970 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
971 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
973 HOST_WIDE_INT val = INTVAL (x);
974 int width = GET_MODE_BITSIZE (oldmode);
976 /* We must sign or zero-extend in this case. Start by
977 zero-extending, then sign extend if we need to. */
978 val &= ((HOST_WIDE_INT) 1 << width) - 1;
979 if (! unsignedp
980 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
981 val |= (HOST_WIDE_INT) (-1) << width;
983 return gen_int_mode (val, mode);
986 return gen_lowpart (mode, x);
989 /* Converting from integer constant into mode is always equivalent to an
990 subreg operation. */
991 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
993 if (GET_MODE_BITSIZE (mode) != GET_MODE_BITSIZE (oldmode))
994 abort ();
995 return simplify_gen_subreg (mode, x, oldmode, 0);
998 temp = gen_reg_rtx (mode);
999 convert_move (temp, x, unsignedp);
1000 return temp;
1003 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1004 store efficiently. Due to internal GCC limitations, this is
1005 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1006 for an immediate constant. */
1008 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1010 /* Determine whether the LEN bytes can be moved by using several move
1011 instructions. Return nonzero if a call to move_by_pieces should
1012 succeed. */
1015 can_move_by_pieces (unsigned HOST_WIDE_INT len,
1016 unsigned int align ATTRIBUTE_UNUSED)
1018 return MOVE_BY_PIECES_P (len, align);
1021 /* Generate several move instructions to copy LEN bytes from block FROM to
1022 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1023 and TO through protect_from_queue before calling.
1025 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1026 used to push FROM to the stack.
1028 ALIGN is maximum stack alignment we can assume.
1030 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
1031 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
1032 stpcpy. */
1035 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
1036 unsigned int align, int endp)
1038 struct move_by_pieces data;
1039 rtx to_addr, from_addr = XEXP (from, 0);
1040 unsigned int max_size = MOVE_MAX_PIECES + 1;
1041 enum machine_mode mode = VOIDmode, tmode;
1042 enum insn_code icode;
1044 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
1046 data.offset = 0;
1047 data.from_addr = from_addr;
1048 if (to)
1050 to_addr = XEXP (to, 0);
1051 data.to = to;
1052 data.autinc_to
1053 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1054 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1055 data.reverse
1056 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1058 else
1060 to_addr = NULL_RTX;
1061 data.to = NULL_RTX;
1062 data.autinc_to = 1;
1063 #ifdef STACK_GROWS_DOWNWARD
1064 data.reverse = 1;
1065 #else
1066 data.reverse = 0;
1067 #endif
1069 data.to_addr = to_addr;
1070 data.from = from;
1071 data.autinc_from
1072 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1073 || GET_CODE (from_addr) == POST_INC
1074 || GET_CODE (from_addr) == POST_DEC);
1076 data.explicit_inc_from = 0;
1077 data.explicit_inc_to = 0;
1078 if (data.reverse) data.offset = len;
1079 data.len = len;
1081 /* If copying requires more than two move insns,
1082 copy addresses to registers (to make displacements shorter)
1083 and use post-increment if available. */
1084 if (!(data.autinc_from && data.autinc_to)
1085 && move_by_pieces_ninsns (len, align) > 2)
1087 /* Find the mode of the largest move... */
1088 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1089 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1090 if (GET_MODE_SIZE (tmode) < max_size)
1091 mode = tmode;
1093 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1095 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1096 data.autinc_from = 1;
1097 data.explicit_inc_from = -1;
1099 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1101 data.from_addr = copy_addr_to_reg (from_addr);
1102 data.autinc_from = 1;
1103 data.explicit_inc_from = 1;
1105 if (!data.autinc_from && CONSTANT_P (from_addr))
1106 data.from_addr = copy_addr_to_reg (from_addr);
1107 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1109 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1110 data.autinc_to = 1;
1111 data.explicit_inc_to = -1;
1113 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1115 data.to_addr = copy_addr_to_reg (to_addr);
1116 data.autinc_to = 1;
1117 data.explicit_inc_to = 1;
1119 if (!data.autinc_to && CONSTANT_P (to_addr))
1120 data.to_addr = copy_addr_to_reg (to_addr);
1123 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1124 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1125 align = MOVE_MAX * BITS_PER_UNIT;
1127 /* First move what we can in the largest integer mode, then go to
1128 successively smaller modes. */
1130 while (max_size > 1)
1132 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1133 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1134 if (GET_MODE_SIZE (tmode) < max_size)
1135 mode = tmode;
1137 if (mode == VOIDmode)
1138 break;
1140 icode = mov_optab->handlers[(int) mode].insn_code;
1141 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1142 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1144 max_size = GET_MODE_SIZE (mode);
1147 /* The code above should have handled everything. */
1148 if (data.len > 0)
1149 abort ();
1151 if (endp)
1153 rtx to1;
1155 if (data.reverse)
1156 abort ();
1157 if (data.autinc_to)
1159 if (endp == 2)
1161 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1162 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1163 else
1164 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1165 -1));
1167 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1168 data.offset);
1170 else
1172 if (endp == 2)
1173 --data.offset;
1174 to1 = adjust_address (data.to, QImode, data.offset);
1176 return to1;
1178 else
1179 return data.to;
1182 /* Return number of insns required to move L bytes by pieces.
1183 ALIGN (in bits) is maximum alignment we can assume. */
1185 static unsigned HOST_WIDE_INT
1186 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align)
1188 unsigned HOST_WIDE_INT n_insns = 0;
1189 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1191 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1192 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1193 align = MOVE_MAX * BITS_PER_UNIT;
1195 while (max_size > 1)
1197 enum machine_mode mode = VOIDmode, tmode;
1198 enum insn_code icode;
1200 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1201 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1202 if (GET_MODE_SIZE (tmode) < max_size)
1203 mode = tmode;
1205 if (mode == VOIDmode)
1206 break;
1208 icode = mov_optab->handlers[(int) mode].insn_code;
1209 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1210 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1212 max_size = GET_MODE_SIZE (mode);
1215 if (l)
1216 abort ();
1217 return n_insns;
1220 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1221 with move instructions for mode MODE. GENFUN is the gen_... function
1222 to make a move insn for that mode. DATA has all the other info. */
1224 static void
1225 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1226 struct move_by_pieces *data)
1228 unsigned int size = GET_MODE_SIZE (mode);
1229 rtx to1 = NULL_RTX, from1;
1231 while (data->len >= size)
1233 if (data->reverse)
1234 data->offset -= size;
1236 if (data->to)
1238 if (data->autinc_to)
1239 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1240 data->offset);
1241 else
1242 to1 = adjust_address (data->to, mode, data->offset);
1245 if (data->autinc_from)
1246 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1247 data->offset);
1248 else
1249 from1 = adjust_address (data->from, mode, data->offset);
1251 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1252 emit_insn (gen_add2_insn (data->to_addr,
1253 GEN_INT (-(HOST_WIDE_INT)size)));
1254 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1255 emit_insn (gen_add2_insn (data->from_addr,
1256 GEN_INT (-(HOST_WIDE_INT)size)));
1258 if (data->to)
1259 emit_insn ((*genfun) (to1, from1));
1260 else
1262 #ifdef PUSH_ROUNDING
1263 emit_single_push_insn (mode, from1, NULL);
1264 #else
1265 abort ();
1266 #endif
1269 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1270 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1271 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1272 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1274 if (! data->reverse)
1275 data->offset += size;
1277 data->len -= size;
1281 /* Emit code to move a block Y to a block X. This may be done with
1282 string-move instructions, with multiple scalar move instructions,
1283 or with a library call.
1285 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1286 SIZE is an rtx that says how long they are.
1287 ALIGN is the maximum alignment we can assume they have.
1288 METHOD describes what kind of copy this is, and what mechanisms may be used.
1290 Return the address of the new block, if memcpy is called and returns it,
1291 0 otherwise. */
1294 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1296 bool may_use_call;
1297 rtx retval = 0;
1298 unsigned int align;
1300 switch (method)
1302 case BLOCK_OP_NORMAL:
1303 may_use_call = true;
1304 break;
1306 case BLOCK_OP_CALL_PARM:
1307 may_use_call = block_move_libcall_safe_for_call_parm ();
1309 /* Make inhibit_defer_pop nonzero around the library call
1310 to force it to pop the arguments right away. */
1311 NO_DEFER_POP;
1312 break;
1314 case BLOCK_OP_NO_LIBCALL:
1315 may_use_call = false;
1316 break;
1318 default:
1319 abort ();
1322 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1324 if (GET_MODE (x) != BLKmode)
1325 abort ();
1326 if (GET_MODE (y) != BLKmode)
1327 abort ();
1329 x = protect_from_queue (x, 1);
1330 y = protect_from_queue (y, 0);
1331 size = protect_from_queue (size, 0);
1333 if (GET_CODE (x) != MEM)
1334 abort ();
1335 if (GET_CODE (y) != MEM)
1336 abort ();
1337 if (size == 0)
1338 abort ();
1340 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1341 can be incorrect is coming from __builtin_memcpy. */
1342 if (GET_CODE (size) == CONST_INT)
1344 if (INTVAL (size) == 0)
1345 return 0;
1347 x = shallow_copy_rtx (x);
1348 y = shallow_copy_rtx (y);
1349 set_mem_size (x, size);
1350 set_mem_size (y, size);
1353 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1354 move_by_pieces (x, y, INTVAL (size), align, 0);
1355 else if (emit_block_move_via_movstr (x, y, size, align))
1357 else if (may_use_call)
1358 retval = emit_block_move_via_libcall (x, y, size);
1359 else
1360 emit_block_move_via_loop (x, y, size, align);
1362 if (method == BLOCK_OP_CALL_PARM)
1363 OK_DEFER_POP;
1365 return retval;
1368 /* A subroutine of emit_block_move. Returns true if calling the
1369 block move libcall will not clobber any parameters which may have
1370 already been placed on the stack. */
1372 static bool
1373 block_move_libcall_safe_for_call_parm (void)
1375 /* If arguments are pushed on the stack, then they're safe. */
1376 if (PUSH_ARGS)
1377 return true;
1379 /* If registers go on the stack anyway, any argument is sure to clobber
1380 an outgoing argument. */
1381 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1383 tree fn = emit_block_move_libcall_fn (false);
1384 (void) fn;
1385 if (REG_PARM_STACK_SPACE (fn) != 0)
1386 return false;
1388 #endif
1390 /* If any argument goes in memory, then it might clobber an outgoing
1391 argument. */
1393 CUMULATIVE_ARGS args_so_far;
1394 tree fn, arg;
1396 fn = emit_block_move_libcall_fn (false);
1397 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1399 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1400 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1402 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1403 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1404 if (!tmp || !REG_P (tmp))
1405 return false;
1406 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1407 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1408 NULL_TREE, 1))
1409 return false;
1410 #endif
1411 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1414 return true;
1417 /* A subroutine of emit_block_move. Expand a movstr pattern;
1418 return true if successful. */
1420 static bool
1421 emit_block_move_via_movstr (rtx x, rtx y, rtx size, unsigned int align)
1423 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1424 int save_volatile_ok = volatile_ok;
1425 enum machine_mode mode;
1427 /* Since this is a move insn, we don't care about volatility. */
1428 volatile_ok = 1;
1430 /* Try the most limited insn first, because there's no point
1431 including more than one in the machine description unless
1432 the more limited one has some advantage. */
1434 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1435 mode = GET_MODE_WIDER_MODE (mode))
1437 enum insn_code code = movstr_optab[(int) mode];
1438 insn_operand_predicate_fn pred;
1440 if (code != CODE_FOR_nothing
1441 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1442 here because if SIZE is less than the mode mask, as it is
1443 returned by the macro, it will definitely be less than the
1444 actual mode mask. */
1445 && ((GET_CODE (size) == CONST_INT
1446 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1447 <= (GET_MODE_MASK (mode) >> 1)))
1448 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1449 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1450 || (*pred) (x, BLKmode))
1451 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1452 || (*pred) (y, BLKmode))
1453 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1454 || (*pred) (opalign, VOIDmode)))
1456 rtx op2;
1457 rtx last = get_last_insn ();
1458 rtx pat;
1460 op2 = convert_to_mode (mode, size, 1);
1461 pred = insn_data[(int) code].operand[2].predicate;
1462 if (pred != 0 && ! (*pred) (op2, mode))
1463 op2 = copy_to_mode_reg (mode, op2);
1465 /* ??? When called via emit_block_move_for_call, it'd be
1466 nice if there were some way to inform the backend, so
1467 that it doesn't fail the expansion because it thinks
1468 emitting the libcall would be more efficient. */
1470 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1471 if (pat)
1473 emit_insn (pat);
1474 volatile_ok = save_volatile_ok;
1475 return true;
1477 else
1478 delete_insns_since (last);
1482 volatile_ok = save_volatile_ok;
1483 return false;
1486 /* A subroutine of emit_block_move. Expand a call to memcpy or bcopy.
1487 Return the return value from memcpy, 0 otherwise. */
1489 static rtx
1490 emit_block_move_via_libcall (rtx dst, rtx src, rtx size)
1492 rtx dst_addr, src_addr;
1493 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1494 enum machine_mode size_mode;
1495 rtx retval;
1497 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
1499 It is unsafe to save the value generated by protect_from_queue and reuse
1500 it later. Consider what happens if emit_queue is called before the
1501 return value from protect_from_queue is used.
1503 Expansion of the CALL_EXPR below will call emit_queue before we are
1504 finished emitting RTL for argument setup. So if we are not careful we
1505 could get the wrong value for an argument.
1507 To avoid this problem we go ahead and emit code to copy the addresses of
1508 DST and SRC and SIZE into new pseudos. We can then place those new
1509 pseudos into an RTL_EXPR and use them later, even after a call to
1510 emit_queue.
1512 Note this is not strictly needed for library calls since they do not call
1513 emit_queue before loading their arguments. However, we may need to have
1514 library calls call emit_queue in the future since failing to do so could
1515 cause problems for targets which define SMALL_REGISTER_CLASSES and pass
1516 arguments in registers. */
1518 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1519 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1521 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1522 src_addr = convert_memory_address (ptr_mode, src_addr);
1524 dst_tree = make_tree (ptr_type_node, dst_addr);
1525 src_tree = make_tree (ptr_type_node, src_addr);
1527 if (TARGET_MEM_FUNCTIONS)
1528 size_mode = TYPE_MODE (sizetype);
1529 else
1530 size_mode = TYPE_MODE (unsigned_type_node);
1532 size = convert_to_mode (size_mode, size, 1);
1533 size = copy_to_mode_reg (size_mode, size);
1535 /* It is incorrect to use the libcall calling conventions to call
1536 memcpy in this context. This could be a user call to memcpy and
1537 the user may wish to examine the return value from memcpy. For
1538 targets where libcalls and normal calls have different conventions
1539 for returning pointers, we could end up generating incorrect code.
1541 For convenience, we generate the call to bcopy this way as well. */
1543 if (TARGET_MEM_FUNCTIONS)
1544 size_tree = make_tree (sizetype, size);
1545 else
1546 size_tree = make_tree (unsigned_type_node, size);
1548 fn = emit_block_move_libcall_fn (true);
1549 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1550 if (TARGET_MEM_FUNCTIONS)
1552 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1553 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1555 else
1557 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1558 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1561 /* Now we have to build up the CALL_EXPR itself. */
1562 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1563 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1564 call_expr, arg_list, NULL_TREE);
1566 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1568 /* If we are initializing a readonly value, show the above call clobbered
1569 it. Otherwise, a load from it may erroneously be hoisted from a loop, or
1570 the delay slot scheduler might overlook conflicts and take nasty
1571 decisions. */
1572 if (RTX_UNCHANGING_P (dst))
1573 add_function_usage_to
1574 (last_call_insn (), gen_rtx_EXPR_LIST (VOIDmode,
1575 gen_rtx_CLOBBER (VOIDmode, dst),
1576 NULL_RTX));
1578 return TARGET_MEM_FUNCTIONS ? retval : NULL_RTX;
1581 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1582 for the function we use for block copies. The first time FOR_CALL
1583 is true, we call assemble_external. */
1585 static GTY(()) tree block_move_fn;
1587 void
1588 init_block_move_fn (const char *asmspec)
1590 if (!block_move_fn)
1592 tree args, fn;
1594 if (TARGET_MEM_FUNCTIONS)
1596 fn = get_identifier ("memcpy");
1597 args = build_function_type_list (ptr_type_node, ptr_type_node,
1598 const_ptr_type_node, sizetype,
1599 NULL_TREE);
1601 else
1603 fn = get_identifier ("bcopy");
1604 args = build_function_type_list (void_type_node, const_ptr_type_node,
1605 ptr_type_node, unsigned_type_node,
1606 NULL_TREE);
1609 fn = build_decl (FUNCTION_DECL, fn, args);
1610 DECL_EXTERNAL (fn) = 1;
1611 TREE_PUBLIC (fn) = 1;
1612 DECL_ARTIFICIAL (fn) = 1;
1613 TREE_NOTHROW (fn) = 1;
1615 block_move_fn = fn;
1618 if (asmspec)
1620 SET_DECL_RTL (block_move_fn, NULL_RTX);
1621 SET_DECL_ASSEMBLER_NAME (block_move_fn, get_identifier (asmspec));
1625 static tree
1626 emit_block_move_libcall_fn (int for_call)
1628 static bool emitted_extern;
1630 if (!block_move_fn)
1631 init_block_move_fn (NULL);
1633 if (for_call && !emitted_extern)
1635 emitted_extern = true;
1636 make_decl_rtl (block_move_fn, NULL);
1637 assemble_external (block_move_fn);
1640 return block_move_fn;
1643 /* A subroutine of emit_block_move. Copy the data via an explicit
1644 loop. This is used only when libcalls are forbidden. */
1645 /* ??? It'd be nice to copy in hunks larger than QImode. */
1647 static void
1648 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1649 unsigned int align ATTRIBUTE_UNUSED)
1651 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1652 enum machine_mode iter_mode;
1654 iter_mode = GET_MODE (size);
1655 if (iter_mode == VOIDmode)
1656 iter_mode = word_mode;
1658 top_label = gen_label_rtx ();
1659 cmp_label = gen_label_rtx ();
1660 iter = gen_reg_rtx (iter_mode);
1662 emit_move_insn (iter, const0_rtx);
1664 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1665 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1666 do_pending_stack_adjust ();
1668 emit_note (NOTE_INSN_LOOP_BEG);
1670 emit_jump (cmp_label);
1671 emit_label (top_label);
1673 tmp = convert_modes (Pmode, iter_mode, iter, true);
1674 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1675 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1676 x = change_address (x, QImode, x_addr);
1677 y = change_address (y, QImode, y_addr);
1679 emit_move_insn (x, y);
1681 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1682 true, OPTAB_LIB_WIDEN);
1683 if (tmp != iter)
1684 emit_move_insn (iter, tmp);
1686 emit_note (NOTE_INSN_LOOP_CONT);
1687 emit_label (cmp_label);
1689 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1690 true, top_label);
1692 emit_note (NOTE_INSN_LOOP_END);
1695 /* Copy all or part of a value X into registers starting at REGNO.
1696 The number of registers to be filled is NREGS. */
1698 void
1699 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1701 int i;
1702 #ifdef HAVE_load_multiple
1703 rtx pat;
1704 rtx last;
1705 #endif
1707 if (nregs == 0)
1708 return;
1710 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1711 x = validize_mem (force_const_mem (mode, x));
1713 /* See if the machine can do this with a load multiple insn. */
1714 #ifdef HAVE_load_multiple
1715 if (HAVE_load_multiple)
1717 last = get_last_insn ();
1718 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1719 GEN_INT (nregs));
1720 if (pat)
1722 emit_insn (pat);
1723 return;
1725 else
1726 delete_insns_since (last);
1728 #endif
1730 for (i = 0; i < nregs; i++)
1731 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1732 operand_subword_force (x, i, mode));
1735 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1736 The number of registers to be filled is NREGS. */
1738 void
1739 move_block_from_reg (int regno, rtx x, int nregs)
1741 int i;
1743 if (nregs == 0)
1744 return;
1746 /* See if the machine can do this with a store multiple insn. */
1747 #ifdef HAVE_store_multiple
1748 if (HAVE_store_multiple)
1750 rtx last = get_last_insn ();
1751 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1752 GEN_INT (nregs));
1753 if (pat)
1755 emit_insn (pat);
1756 return;
1758 else
1759 delete_insns_since (last);
1761 #endif
1763 for (i = 0; i < nregs; i++)
1765 rtx tem = operand_subword (x, i, 1, BLKmode);
1767 if (tem == 0)
1768 abort ();
1770 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1774 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1775 ORIG, where ORIG is a non-consecutive group of registers represented by
1776 a PARALLEL. The clone is identical to the original except in that the
1777 original set of registers is replaced by a new set of pseudo registers.
1778 The new set has the same modes as the original set. */
1781 gen_group_rtx (rtx orig)
1783 int i, length;
1784 rtx *tmps;
1786 if (GET_CODE (orig) != PARALLEL)
1787 abort ();
1789 length = XVECLEN (orig, 0);
1790 tmps = alloca (sizeof (rtx) * length);
1792 /* Skip a NULL entry in first slot. */
1793 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1795 if (i)
1796 tmps[0] = 0;
1798 for (; i < length; i++)
1800 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1801 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1803 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1806 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1809 /* Emit code to move a block ORIG_SRC of type TYPE to a block DST,
1810 where DST is non-consecutive registers represented by a PARALLEL.
1811 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1812 if not known. */
1814 void
1815 emit_group_load (rtx dst, rtx orig_src, tree type ATTRIBUTE_UNUSED, int ssize)
1817 rtx *tmps, src;
1818 int start, i;
1820 if (GET_CODE (dst) != PARALLEL)
1821 abort ();
1823 /* Check for a NULL entry, used to indicate that the parameter goes
1824 both on the stack and in registers. */
1825 if (XEXP (XVECEXP (dst, 0, 0), 0))
1826 start = 0;
1827 else
1828 start = 1;
1830 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1832 /* Process the pieces. */
1833 for (i = start; i < XVECLEN (dst, 0); i++)
1835 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1836 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1837 unsigned int bytelen = GET_MODE_SIZE (mode);
1838 int shift = 0;
1840 /* Handle trailing fragments that run over the size of the struct. */
1841 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1843 /* Arrange to shift the fragment to where it belongs.
1844 extract_bit_field loads to the lsb of the reg. */
1845 if (
1846 #ifdef BLOCK_REG_PADDING
1847 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1848 == (BYTES_BIG_ENDIAN ? upward : downward)
1849 #else
1850 BYTES_BIG_ENDIAN
1851 #endif
1853 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1854 bytelen = ssize - bytepos;
1855 if (bytelen <= 0)
1856 abort ();
1859 /* If we won't be loading directly from memory, protect the real source
1860 from strange tricks we might play; but make sure that the source can
1861 be loaded directly into the destination. */
1862 src = orig_src;
1863 if (GET_CODE (orig_src) != MEM
1864 && (!CONSTANT_P (orig_src)
1865 || (GET_MODE (orig_src) != mode
1866 && GET_MODE (orig_src) != VOIDmode)))
1868 if (GET_MODE (orig_src) == VOIDmode)
1869 src = gen_reg_rtx (mode);
1870 else
1871 src = gen_reg_rtx (GET_MODE (orig_src));
1873 emit_move_insn (src, orig_src);
1876 /* Optimize the access just a bit. */
1877 if (GET_CODE (src) == MEM
1878 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1879 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1880 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1881 && bytelen == GET_MODE_SIZE (mode))
1883 tmps[i] = gen_reg_rtx (mode);
1884 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1886 else if (GET_CODE (src) == CONCAT)
1888 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1889 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1891 if ((bytepos == 0 && bytelen == slen0)
1892 || (bytepos != 0 && bytepos + bytelen <= slen))
1894 /* The following assumes that the concatenated objects all
1895 have the same size. In this case, a simple calculation
1896 can be used to determine the object and the bit field
1897 to be extracted. */
1898 tmps[i] = XEXP (src, bytepos / slen0);
1899 if (! CONSTANT_P (tmps[i])
1900 && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
1901 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1902 (bytepos % slen0) * BITS_PER_UNIT,
1903 1, NULL_RTX, mode, mode, ssize);
1905 else if (bytepos == 0)
1907 rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
1908 emit_move_insn (mem, src);
1909 tmps[i] = adjust_address (mem, mode, 0);
1911 else
1912 abort ();
1914 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1915 SIMD register, which is currently broken. While we get GCC
1916 to emit proper RTL for these cases, let's dump to memory. */
1917 else if (VECTOR_MODE_P (GET_MODE (dst))
1918 && GET_CODE (src) == REG)
1920 int slen = GET_MODE_SIZE (GET_MODE (src));
1921 rtx mem;
1923 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1924 emit_move_insn (mem, src);
1925 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1927 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1928 && XVECLEN (dst, 0) > 1)
1929 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1930 else if (CONSTANT_P (src)
1931 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
1932 tmps[i] = src;
1933 else
1934 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1935 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1936 mode, mode, ssize);
1938 if (shift)
1939 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
1940 tmps[i], 0, OPTAB_WIDEN);
1943 emit_queue ();
1945 /* Copy the extracted pieces into the proper (probable) hard regs. */
1946 for (i = start; i < XVECLEN (dst, 0); i++)
1947 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
1950 /* Emit code to move a block SRC to block DST, where SRC and DST are
1951 non-consecutive groups of registers, each represented by a PARALLEL. */
1953 void
1954 emit_group_move (rtx dst, rtx src)
1956 int i;
1958 if (GET_CODE (src) != PARALLEL
1959 || GET_CODE (dst) != PARALLEL
1960 || XVECLEN (src, 0) != XVECLEN (dst, 0))
1961 abort ();
1963 /* Skip first entry if NULL. */
1964 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1965 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1966 XEXP (XVECEXP (src, 0, i), 0));
1969 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1970 where SRC is non-consecutive registers represented by a PARALLEL.
1971 SSIZE represents the total size of block ORIG_DST, or -1 if not
1972 known. */
1974 void
1975 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1977 rtx *tmps, dst;
1978 int start, i;
1980 if (GET_CODE (src) != PARALLEL)
1981 abort ();
1983 /* Check for a NULL entry, used to indicate that the parameter goes
1984 both on the stack and in registers. */
1985 if (XEXP (XVECEXP (src, 0, 0), 0))
1986 start = 0;
1987 else
1988 start = 1;
1990 tmps = alloca (sizeof (rtx) * XVECLEN (src, 0));
1992 /* Copy the (probable) hard regs into pseudos. */
1993 for (i = start; i < XVECLEN (src, 0); i++)
1995 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1996 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1997 emit_move_insn (tmps[i], reg);
1999 emit_queue ();
2001 /* If we won't be storing directly into memory, protect the real destination
2002 from strange tricks we might play. */
2003 dst = orig_dst;
2004 if (GET_CODE (dst) == PARALLEL)
2006 rtx temp;
2008 /* We can get a PARALLEL dst if there is a conditional expression in
2009 a return statement. In that case, the dst and src are the same,
2010 so no action is necessary. */
2011 if (rtx_equal_p (dst, src))
2012 return;
2014 /* It is unclear if we can ever reach here, but we may as well handle
2015 it. Allocate a temporary, and split this into a store/load to/from
2016 the temporary. */
2018 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2019 emit_group_store (temp, src, type, ssize);
2020 emit_group_load (dst, temp, type, ssize);
2021 return;
2023 else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
2025 dst = gen_reg_rtx (GET_MODE (orig_dst));
2026 /* Make life a bit easier for combine. */
2027 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
2030 /* Process the pieces. */
2031 for (i = start; i < XVECLEN (src, 0); i++)
2033 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2034 enum machine_mode mode = GET_MODE (tmps[i]);
2035 unsigned int bytelen = GET_MODE_SIZE (mode);
2036 rtx dest = dst;
2038 /* Handle trailing fragments that run over the size of the struct. */
2039 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2041 /* store_bit_field always takes its value from the lsb.
2042 Move the fragment to the lsb if it's not already there. */
2043 if (
2044 #ifdef BLOCK_REG_PADDING
2045 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2046 == (BYTES_BIG_ENDIAN ? upward : downward)
2047 #else
2048 BYTES_BIG_ENDIAN
2049 #endif
2052 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2053 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2054 tmps[i], 0, OPTAB_WIDEN);
2056 bytelen = ssize - bytepos;
2059 if (GET_CODE (dst) == CONCAT)
2061 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2062 dest = XEXP (dst, 0);
2063 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2065 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2066 dest = XEXP (dst, 1);
2068 else if (bytepos == 0 && XVECLEN (src, 0))
2070 dest = assign_stack_temp (GET_MODE (dest),
2071 GET_MODE_SIZE (GET_MODE (dest)), 0);
2072 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2073 tmps[i]);
2074 dst = dest;
2075 break;
2077 else
2078 abort ();
2081 /* Optimize the access just a bit. */
2082 if (GET_CODE (dest) == MEM
2083 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2084 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2085 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2086 && bytelen == GET_MODE_SIZE (mode))
2087 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2088 else
2089 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2090 mode, tmps[i], ssize);
2093 emit_queue ();
2095 /* Copy from the pseudo into the (probable) hard reg. */
2096 if (orig_dst != dst)
2097 emit_move_insn (orig_dst, dst);
2100 /* Generate code to copy a BLKmode object of TYPE out of a
2101 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2102 is null, a stack temporary is created. TGTBLK is returned.
2104 The purpose of this routine is to handle functions that return
2105 BLKmode structures in registers. Some machines (the PA for example)
2106 want to return all small structures in registers regardless of the
2107 structure's alignment. */
2110 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2112 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2113 rtx src = NULL, dst = NULL;
2114 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2115 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2117 if (tgtblk == 0)
2119 tgtblk = assign_temp (build_qualified_type (type,
2120 (TYPE_QUALS (type)
2121 | TYPE_QUAL_CONST)),
2122 0, 1, 1);
2123 preserve_temp_slots (tgtblk);
2126 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2127 into a new pseudo which is a full word. */
2129 if (GET_MODE (srcreg) != BLKmode
2130 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2131 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2133 /* If the structure doesn't take up a whole number of words, see whether
2134 SRCREG is padded on the left or on the right. If it's on the left,
2135 set PADDING_CORRECTION to the number of bits to skip.
2137 In most ABIs, the structure will be returned at the least end of
2138 the register, which translates to right padding on little-endian
2139 targets and left padding on big-endian targets. The opposite
2140 holds if the structure is returned at the most significant
2141 end of the register. */
2142 if (bytes % UNITS_PER_WORD != 0
2143 && (targetm.calls.return_in_msb (type)
2144 ? !BYTES_BIG_ENDIAN
2145 : BYTES_BIG_ENDIAN))
2146 padding_correction
2147 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2149 /* Copy the structure BITSIZE bites at a time.
2151 We could probably emit more efficient code for machines which do not use
2152 strict alignment, but it doesn't seem worth the effort at the current
2153 time. */
2154 for (bitpos = 0, xbitpos = padding_correction;
2155 bitpos < bytes * BITS_PER_UNIT;
2156 bitpos += bitsize, xbitpos += bitsize)
2158 /* We need a new source operand each time xbitpos is on a
2159 word boundary and when xbitpos == padding_correction
2160 (the first time through). */
2161 if (xbitpos % BITS_PER_WORD == 0
2162 || xbitpos == padding_correction)
2163 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2164 GET_MODE (srcreg));
2166 /* We need a new destination operand each time bitpos is on
2167 a word boundary. */
2168 if (bitpos % BITS_PER_WORD == 0)
2169 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2171 /* Use xbitpos for the source extraction (right justified) and
2172 xbitpos for the destination store (left justified). */
2173 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2174 extract_bit_field (src, bitsize,
2175 xbitpos % BITS_PER_WORD, 1,
2176 NULL_RTX, word_mode, word_mode,
2177 BITS_PER_WORD),
2178 BITS_PER_WORD);
2181 return tgtblk;
2184 /* Add a USE expression for REG to the (possibly empty) list pointed
2185 to by CALL_FUSAGE. REG must denote a hard register. */
2187 void
2188 use_reg (rtx *call_fusage, rtx reg)
2190 if (GET_CODE (reg) != REG
2191 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2192 abort ();
2194 *call_fusage
2195 = gen_rtx_EXPR_LIST (VOIDmode,
2196 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2199 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2200 starting at REGNO. All of these registers must be hard registers. */
2202 void
2203 use_regs (rtx *call_fusage, int regno, int nregs)
2205 int i;
2207 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2208 abort ();
2210 for (i = 0; i < nregs; i++)
2211 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2214 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2215 PARALLEL REGS. This is for calls that pass values in multiple
2216 non-contiguous locations. The Irix 6 ABI has examples of this. */
2218 void
2219 use_group_regs (rtx *call_fusage, rtx regs)
2221 int i;
2223 for (i = 0; i < XVECLEN (regs, 0); i++)
2225 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2227 /* A NULL entry means the parameter goes both on the stack and in
2228 registers. This can also be a MEM for targets that pass values
2229 partially on the stack and partially in registers. */
2230 if (reg != 0 && GET_CODE (reg) == REG)
2231 use_reg (call_fusage, reg);
2236 /* Determine whether the LEN bytes generated by CONSTFUN can be
2237 stored to memory using several move instructions. CONSTFUNDATA is
2238 a pointer which will be passed as argument in every CONSTFUN call.
2239 ALIGN is maximum alignment we can assume. Return nonzero if a
2240 call to store_by_pieces should succeed. */
2243 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2244 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2245 void *constfundata, unsigned int align)
2247 unsigned HOST_WIDE_INT max_size, l;
2248 HOST_WIDE_INT offset = 0;
2249 enum machine_mode mode, tmode;
2250 enum insn_code icode;
2251 int reverse;
2252 rtx cst;
2254 if (len == 0)
2255 return 1;
2257 if (! STORE_BY_PIECES_P (len, align))
2258 return 0;
2260 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2261 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2262 align = MOVE_MAX * BITS_PER_UNIT;
2264 /* We would first store what we can in the largest integer mode, then go to
2265 successively smaller modes. */
2267 for (reverse = 0;
2268 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2269 reverse++)
2271 l = len;
2272 mode = VOIDmode;
2273 max_size = STORE_MAX_PIECES + 1;
2274 while (max_size > 1)
2276 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2277 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2278 if (GET_MODE_SIZE (tmode) < max_size)
2279 mode = tmode;
2281 if (mode == VOIDmode)
2282 break;
2284 icode = mov_optab->handlers[(int) mode].insn_code;
2285 if (icode != CODE_FOR_nothing
2286 && align >= GET_MODE_ALIGNMENT (mode))
2288 unsigned int size = GET_MODE_SIZE (mode);
2290 while (l >= size)
2292 if (reverse)
2293 offset -= size;
2295 cst = (*constfun) (constfundata, offset, mode);
2296 if (!LEGITIMATE_CONSTANT_P (cst))
2297 return 0;
2299 if (!reverse)
2300 offset += size;
2302 l -= size;
2306 max_size = GET_MODE_SIZE (mode);
2309 /* The code above should have handled everything. */
2310 if (l != 0)
2311 abort ();
2314 return 1;
2317 /* Generate several move instructions to store LEN bytes generated by
2318 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2319 pointer which will be passed as argument in every CONSTFUN call.
2320 ALIGN is maximum alignment we can assume.
2321 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2322 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2323 stpcpy. */
2326 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2327 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2328 void *constfundata, unsigned int align, int endp)
2330 struct store_by_pieces data;
2332 if (len == 0)
2334 if (endp == 2)
2335 abort ();
2336 return to;
2339 if (! STORE_BY_PIECES_P (len, align))
2340 abort ();
2341 to = protect_from_queue (to, 1);
2342 data.constfun = constfun;
2343 data.constfundata = constfundata;
2344 data.len = len;
2345 data.to = to;
2346 store_by_pieces_1 (&data, align);
2347 if (endp)
2349 rtx to1;
2351 if (data.reverse)
2352 abort ();
2353 if (data.autinc_to)
2355 if (endp == 2)
2357 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2358 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2359 else
2360 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2361 -1));
2363 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2364 data.offset);
2366 else
2368 if (endp == 2)
2369 --data.offset;
2370 to1 = adjust_address (data.to, QImode, data.offset);
2372 return to1;
2374 else
2375 return data.to;
2378 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2379 rtx with BLKmode). The caller must pass TO through protect_from_queue
2380 before calling. ALIGN is maximum alignment we can assume. */
2382 static void
2383 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2385 struct store_by_pieces data;
2387 if (len == 0)
2388 return;
2390 data.constfun = clear_by_pieces_1;
2391 data.constfundata = NULL;
2392 data.len = len;
2393 data.to = to;
2394 store_by_pieces_1 (&data, align);
2397 /* Callback routine for clear_by_pieces.
2398 Return const0_rtx unconditionally. */
2400 static rtx
2401 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2402 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2403 enum machine_mode mode ATTRIBUTE_UNUSED)
2405 return const0_rtx;
2408 /* Subroutine of clear_by_pieces and store_by_pieces.
2409 Generate several move instructions to store LEN bytes of block TO. (A MEM
2410 rtx with BLKmode). The caller must pass TO through protect_from_queue
2411 before calling. ALIGN is maximum alignment we can assume. */
2413 static void
2414 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2415 unsigned int align ATTRIBUTE_UNUSED)
2417 rtx to_addr = XEXP (data->to, 0);
2418 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
2419 enum machine_mode mode = VOIDmode, tmode;
2420 enum insn_code icode;
2422 data->offset = 0;
2423 data->to_addr = to_addr;
2424 data->autinc_to
2425 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2426 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2428 data->explicit_inc_to = 0;
2429 data->reverse
2430 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2431 if (data->reverse)
2432 data->offset = data->len;
2434 /* If storing requires more than two move insns,
2435 copy addresses to registers (to make displacements shorter)
2436 and use post-increment if available. */
2437 if (!data->autinc_to
2438 && move_by_pieces_ninsns (data->len, align) > 2)
2440 /* Determine the main mode we'll be using. */
2441 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2442 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2443 if (GET_MODE_SIZE (tmode) < max_size)
2444 mode = tmode;
2446 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2448 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2449 data->autinc_to = 1;
2450 data->explicit_inc_to = -1;
2453 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2454 && ! data->autinc_to)
2456 data->to_addr = copy_addr_to_reg (to_addr);
2457 data->autinc_to = 1;
2458 data->explicit_inc_to = 1;
2461 if ( !data->autinc_to && CONSTANT_P (to_addr))
2462 data->to_addr = copy_addr_to_reg (to_addr);
2465 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2466 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2467 align = MOVE_MAX * BITS_PER_UNIT;
2469 /* First store what we can in the largest integer mode, then go to
2470 successively smaller modes. */
2472 while (max_size > 1)
2474 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2475 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2476 if (GET_MODE_SIZE (tmode) < max_size)
2477 mode = tmode;
2479 if (mode == VOIDmode)
2480 break;
2482 icode = mov_optab->handlers[(int) mode].insn_code;
2483 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2484 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2486 max_size = GET_MODE_SIZE (mode);
2489 /* The code above should have handled everything. */
2490 if (data->len != 0)
2491 abort ();
2494 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2495 with move instructions for mode MODE. GENFUN is the gen_... function
2496 to make a move insn for that mode. DATA has all the other info. */
2498 static void
2499 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2500 struct store_by_pieces *data)
2502 unsigned int size = GET_MODE_SIZE (mode);
2503 rtx to1, cst;
2505 while (data->len >= size)
2507 if (data->reverse)
2508 data->offset -= size;
2510 if (data->autinc_to)
2511 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2512 data->offset);
2513 else
2514 to1 = adjust_address (data->to, mode, data->offset);
2516 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2517 emit_insn (gen_add2_insn (data->to_addr,
2518 GEN_INT (-(HOST_WIDE_INT) size)));
2520 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2521 emit_insn ((*genfun) (to1, cst));
2523 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2524 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2526 if (! data->reverse)
2527 data->offset += size;
2529 data->len -= size;
2533 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2534 its length in bytes. */
2537 clear_storage (rtx object, rtx size)
2539 rtx retval = 0;
2540 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2541 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2543 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2544 just move a zero. Otherwise, do this a piece at a time. */
2545 if (GET_MODE (object) != BLKmode
2546 && GET_CODE (size) == CONST_INT
2547 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
2548 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2549 else
2551 object = protect_from_queue (object, 1);
2552 size = protect_from_queue (size, 0);
2554 if (size == const0_rtx)
2556 else if (GET_CODE (size) == CONST_INT
2557 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2558 clear_by_pieces (object, INTVAL (size), align);
2559 else if (clear_storage_via_clrstr (object, size, align))
2561 else
2562 retval = clear_storage_via_libcall (object, size);
2565 return retval;
2568 /* A subroutine of clear_storage. Expand a clrstr pattern;
2569 return true if successful. */
2571 static bool
2572 clear_storage_via_clrstr (rtx object, rtx size, unsigned int align)
2574 /* Try the most limited insn first, because there's no point
2575 including more than one in the machine description unless
2576 the more limited one has some advantage. */
2578 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2579 enum machine_mode mode;
2581 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2582 mode = GET_MODE_WIDER_MODE (mode))
2584 enum insn_code code = clrstr_optab[(int) mode];
2585 insn_operand_predicate_fn pred;
2587 if (code != CODE_FOR_nothing
2588 /* We don't need MODE to be narrower than
2589 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2590 the mode mask, as it is returned by the macro, it will
2591 definitely be less than the actual mode mask. */
2592 && ((GET_CODE (size) == CONST_INT
2593 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2594 <= (GET_MODE_MASK (mode) >> 1)))
2595 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2596 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2597 || (*pred) (object, BLKmode))
2598 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2599 || (*pred) (opalign, VOIDmode)))
2601 rtx op1;
2602 rtx last = get_last_insn ();
2603 rtx pat;
2605 op1 = convert_to_mode (mode, size, 1);
2606 pred = insn_data[(int) code].operand[1].predicate;
2607 if (pred != 0 && ! (*pred) (op1, mode))
2608 op1 = copy_to_mode_reg (mode, op1);
2610 pat = GEN_FCN ((int) code) (object, op1, opalign);
2611 if (pat)
2613 emit_insn (pat);
2614 return true;
2616 else
2617 delete_insns_since (last);
2621 return false;
2624 /* A subroutine of clear_storage. Expand a call to memset or bzero.
2625 Return the return value of memset, 0 otherwise. */
2627 static rtx
2628 clear_storage_via_libcall (rtx object, rtx size)
2630 tree call_expr, arg_list, fn, object_tree, size_tree;
2631 enum machine_mode size_mode;
2632 rtx retval;
2634 /* OBJECT or SIZE may have been passed through protect_from_queue.
2636 It is unsafe to save the value generated by protect_from_queue
2637 and reuse it later. Consider what happens if emit_queue is
2638 called before the return value from protect_from_queue is used.
2640 Expansion of the CALL_EXPR below will call emit_queue before
2641 we are finished emitting RTL for argument setup. So if we are
2642 not careful we could get the wrong value for an argument.
2644 To avoid this problem we go ahead and emit code to copy OBJECT
2645 and SIZE into new pseudos. We can then place those new pseudos
2646 into an RTL_EXPR and use them later, even after a call to
2647 emit_queue.
2649 Note this is not strictly needed for library calls since they
2650 do not call emit_queue before loading their arguments. However,
2651 we may need to have library calls call emit_queue in the future
2652 since failing to do so could cause problems for targets which
2653 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2655 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2657 if (TARGET_MEM_FUNCTIONS)
2658 size_mode = TYPE_MODE (sizetype);
2659 else
2660 size_mode = TYPE_MODE (unsigned_type_node);
2661 size = convert_to_mode (size_mode, size, 1);
2662 size = copy_to_mode_reg (size_mode, size);
2664 /* It is incorrect to use the libcall calling conventions to call
2665 memset in this context. This could be a user call to memset and
2666 the user may wish to examine the return value from memset. For
2667 targets where libcalls and normal calls have different conventions
2668 for returning pointers, we could end up generating incorrect code.
2670 For convenience, we generate the call to bzero this way as well. */
2672 object_tree = make_tree (ptr_type_node, object);
2673 if (TARGET_MEM_FUNCTIONS)
2674 size_tree = make_tree (sizetype, size);
2675 else
2676 size_tree = make_tree (unsigned_type_node, size);
2678 fn = clear_storage_libcall_fn (true);
2679 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2680 if (TARGET_MEM_FUNCTIONS)
2681 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
2682 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2684 /* Now we have to build up the CALL_EXPR itself. */
2685 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2686 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2687 call_expr, arg_list, NULL_TREE);
2689 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2691 /* If we are initializing a readonly value, show the above call
2692 clobbered it. Otherwise, a load from it may erroneously be
2693 hoisted from a loop. */
2694 if (RTX_UNCHANGING_P (object))
2695 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
2697 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
2700 /* A subroutine of clear_storage_via_libcall. Create the tree node
2701 for the function we use for block clears. The first time FOR_CALL
2702 is true, we call assemble_external. */
2704 static GTY(()) tree block_clear_fn;
2706 void
2707 init_block_clear_fn (const char *asmspec)
2709 if (!block_clear_fn)
2711 tree fn, args;
2713 if (TARGET_MEM_FUNCTIONS)
2715 fn = get_identifier ("memset");
2716 args = build_function_type_list (ptr_type_node, ptr_type_node,
2717 integer_type_node, sizetype,
2718 NULL_TREE);
2720 else
2722 fn = get_identifier ("bzero");
2723 args = build_function_type_list (void_type_node, ptr_type_node,
2724 unsigned_type_node, NULL_TREE);
2727 fn = build_decl (FUNCTION_DECL, fn, args);
2728 DECL_EXTERNAL (fn) = 1;
2729 TREE_PUBLIC (fn) = 1;
2730 DECL_ARTIFICIAL (fn) = 1;
2731 TREE_NOTHROW (fn) = 1;
2733 block_clear_fn = fn;
2736 if (asmspec)
2738 SET_DECL_RTL (block_clear_fn, NULL_RTX);
2739 SET_DECL_ASSEMBLER_NAME (block_clear_fn, get_identifier (asmspec));
2743 static tree
2744 clear_storage_libcall_fn (int for_call)
2746 static bool emitted_extern;
2748 if (!block_clear_fn)
2749 init_block_clear_fn (NULL);
2751 if (for_call && !emitted_extern)
2753 emitted_extern = true;
2754 make_decl_rtl (block_clear_fn, NULL);
2755 assemble_external (block_clear_fn);
2758 return block_clear_fn;
2761 /* Generate code to copy Y into X.
2762 Both Y and X must have the same mode, except that
2763 Y can be a constant with VOIDmode.
2764 This mode cannot be BLKmode; use emit_block_move for that.
2766 Return the last instruction emitted. */
2769 emit_move_insn (rtx x, rtx y)
2771 enum machine_mode mode = GET_MODE (x);
2772 rtx y_cst = NULL_RTX;
2773 rtx last_insn, set;
2775 x = protect_from_queue (x, 1);
2776 y = protect_from_queue (y, 0);
2778 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2779 abort ();
2781 /* Never force constant_p_rtx to memory. */
2782 if (GET_CODE (y) == CONSTANT_P_RTX)
2784 else if (CONSTANT_P (y))
2786 if (optimize
2787 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
2788 && (last_insn = compress_float_constant (x, y)))
2789 return last_insn;
2791 y_cst = y;
2793 if (!LEGITIMATE_CONSTANT_P (y))
2795 y = force_const_mem (mode, y);
2797 /* If the target's cannot_force_const_mem prevented the spill,
2798 assume that the target's move expanders will also take care
2799 of the non-legitimate constant. */
2800 if (!y)
2801 y = y_cst;
2805 /* If X or Y are memory references, verify that their addresses are valid
2806 for the machine. */
2807 if (GET_CODE (x) == MEM
2808 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2809 && ! push_operand (x, GET_MODE (x)))
2810 || (flag_force_addr
2811 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2812 x = validize_mem (x);
2814 if (GET_CODE (y) == MEM
2815 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2816 || (flag_force_addr
2817 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2818 y = validize_mem (y);
2820 if (mode == BLKmode)
2821 abort ();
2823 last_insn = emit_move_insn_1 (x, y);
2825 if (y_cst && GET_CODE (x) == REG
2826 && (set = single_set (last_insn)) != NULL_RTX
2827 && SET_DEST (set) == x
2828 && ! rtx_equal_p (y_cst, SET_SRC (set)))
2829 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
2831 return last_insn;
2834 /* Low level part of emit_move_insn.
2835 Called just like emit_move_insn, but assumes X and Y
2836 are basically valid. */
2839 emit_move_insn_1 (rtx x, rtx y)
2841 enum machine_mode mode = GET_MODE (x);
2842 enum machine_mode submode;
2843 enum mode_class class = GET_MODE_CLASS (mode);
2845 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2846 abort ();
2848 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2849 return
2850 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2852 /* Expand complex moves by moving real part and imag part, if possible. */
2853 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2854 && BLKmode != (submode = GET_MODE_INNER (mode))
2855 && (mov_optab->handlers[(int) submode].insn_code
2856 != CODE_FOR_nothing))
2858 /* Don't split destination if it is a stack push. */
2859 int stack = push_operand (x, GET_MODE (x));
2861 #ifdef PUSH_ROUNDING
2862 /* In case we output to the stack, but the size is smaller than the
2863 machine can push exactly, we need to use move instructions. */
2864 if (stack
2865 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
2866 != GET_MODE_SIZE (submode)))
2868 rtx temp;
2869 HOST_WIDE_INT offset1, offset2;
2871 /* Do not use anti_adjust_stack, since we don't want to update
2872 stack_pointer_delta. */
2873 temp = expand_binop (Pmode,
2874 #ifdef STACK_GROWS_DOWNWARD
2875 sub_optab,
2876 #else
2877 add_optab,
2878 #endif
2879 stack_pointer_rtx,
2880 GEN_INT
2881 (PUSH_ROUNDING
2882 (GET_MODE_SIZE (GET_MODE (x)))),
2883 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2885 if (temp != stack_pointer_rtx)
2886 emit_move_insn (stack_pointer_rtx, temp);
2888 #ifdef STACK_GROWS_DOWNWARD
2889 offset1 = 0;
2890 offset2 = GET_MODE_SIZE (submode);
2891 #else
2892 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2893 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2894 + GET_MODE_SIZE (submode));
2895 #endif
2897 emit_move_insn (change_address (x, submode,
2898 gen_rtx_PLUS (Pmode,
2899 stack_pointer_rtx,
2900 GEN_INT (offset1))),
2901 gen_realpart (submode, y));
2902 emit_move_insn (change_address (x, submode,
2903 gen_rtx_PLUS (Pmode,
2904 stack_pointer_rtx,
2905 GEN_INT (offset2))),
2906 gen_imagpart (submode, y));
2908 else
2909 #endif
2910 /* If this is a stack, push the highpart first, so it
2911 will be in the argument order.
2913 In that case, change_address is used only to convert
2914 the mode, not to change the address. */
2915 if (stack)
2917 /* Note that the real part always precedes the imag part in memory
2918 regardless of machine's endianness. */
2919 #ifdef STACK_GROWS_DOWNWARD
2920 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2921 gen_imagpart (submode, y));
2922 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2923 gen_realpart (submode, y));
2924 #else
2925 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2926 gen_realpart (submode, y));
2927 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2928 gen_imagpart (submode, y));
2929 #endif
2931 else
2933 rtx realpart_x, realpart_y;
2934 rtx imagpart_x, imagpart_y;
2936 /* If this is a complex value with each part being smaller than a
2937 word, the usual calling sequence will likely pack the pieces into
2938 a single register. Unfortunately, SUBREG of hard registers only
2939 deals in terms of words, so we have a problem converting input
2940 arguments to the CONCAT of two registers that is used elsewhere
2941 for complex values. If this is before reload, we can copy it into
2942 memory and reload. FIXME, we should see about using extract and
2943 insert on integer registers, but complex short and complex char
2944 variables should be rarely used. */
2945 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2946 && (reload_in_progress | reload_completed) == 0)
2948 int packed_dest_p
2949 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2950 int packed_src_p
2951 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2953 if (packed_dest_p || packed_src_p)
2955 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2956 ? MODE_FLOAT : MODE_INT);
2958 enum machine_mode reg_mode
2959 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2961 if (reg_mode != BLKmode)
2963 rtx mem = assign_stack_temp (reg_mode,
2964 GET_MODE_SIZE (mode), 0);
2965 rtx cmem = adjust_address (mem, mode, 0);
2967 cfun->cannot_inline
2968 = N_("function using short complex types cannot be inline");
2970 if (packed_dest_p)
2972 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2974 emit_move_insn_1 (cmem, y);
2975 return emit_move_insn_1 (sreg, mem);
2977 else
2979 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2981 emit_move_insn_1 (mem, sreg);
2982 return emit_move_insn_1 (x, cmem);
2988 realpart_x = gen_realpart (submode, x);
2989 realpart_y = gen_realpart (submode, y);
2990 imagpart_x = gen_imagpart (submode, x);
2991 imagpart_y = gen_imagpart (submode, y);
2993 /* Show the output dies here. This is necessary for SUBREGs
2994 of pseudos since we cannot track their lifetimes correctly;
2995 hard regs shouldn't appear here except as return values.
2996 We never want to emit such a clobber after reload. */
2997 if (x != y
2998 && ! (reload_in_progress || reload_completed)
2999 && (GET_CODE (realpart_x) == SUBREG
3000 || GET_CODE (imagpart_x) == SUBREG))
3001 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3003 emit_move_insn (realpart_x, realpart_y);
3004 emit_move_insn (imagpart_x, imagpart_y);
3007 return get_last_insn ();
3010 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
3011 find a mode to do it in. If we have a movcc, use it. Otherwise,
3012 find the MODE_INT mode of the same width. */
3013 else if (GET_MODE_CLASS (mode) == MODE_CC
3014 && mov_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
3016 enum insn_code insn_code;
3017 enum machine_mode tmode = VOIDmode;
3018 rtx x1 = x, y1 = y;
3020 if (mode != CCmode
3021 && mov_optab->handlers[(int) CCmode].insn_code != CODE_FOR_nothing)
3022 tmode = CCmode;
3023 else
3024 for (tmode = QImode; tmode != VOIDmode;
3025 tmode = GET_MODE_WIDER_MODE (tmode))
3026 if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode))
3027 break;
3029 if (tmode == VOIDmode)
3030 abort ();
3032 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
3033 may call change_address which is not appropriate if we were
3034 called when a reload was in progress. We don't have to worry
3035 about changing the address since the size in bytes is supposed to
3036 be the same. Copy the MEM to change the mode and move any
3037 substitutions from the old MEM to the new one. */
3039 if (reload_in_progress)
3041 x = gen_lowpart_common (tmode, x1);
3042 if (x == 0 && GET_CODE (x1) == MEM)
3044 x = adjust_address_nv (x1, tmode, 0);
3045 copy_replacements (x1, x);
3048 y = gen_lowpart_common (tmode, y1);
3049 if (y == 0 && GET_CODE (y1) == MEM)
3051 y = adjust_address_nv (y1, tmode, 0);
3052 copy_replacements (y1, y);
3055 else
3057 x = gen_lowpart (tmode, x);
3058 y = gen_lowpart (tmode, y);
3061 insn_code = mov_optab->handlers[(int) tmode].insn_code;
3062 return emit_insn (GEN_FCN (insn_code) (x, y));
3065 /* Try using a move pattern for the corresponding integer mode. This is
3066 only safe when simplify_subreg can convert MODE constants into integer
3067 constants. At present, it can only do this reliably if the value
3068 fits within a HOST_WIDE_INT. */
3069 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3070 && (submode = int_mode_for_mode (mode)) != BLKmode
3071 && mov_optab->handlers[submode].insn_code != CODE_FOR_nothing)
3072 return emit_insn (GEN_FCN (mov_optab->handlers[submode].insn_code)
3073 (simplify_gen_subreg (submode, x, mode, 0),
3074 simplify_gen_subreg (submode, y, mode, 0)));
3076 /* This will handle any multi-word or full-word mode that lacks a move_insn
3077 pattern. However, you will get better code if you define such patterns,
3078 even if they must turn into multiple assembler instructions. */
3079 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
3081 rtx last_insn = 0;
3082 rtx seq, inner;
3083 int need_clobber;
3084 int i;
3086 #ifdef PUSH_ROUNDING
3088 /* If X is a push on the stack, do the push now and replace
3089 X with a reference to the stack pointer. */
3090 if (push_operand (x, GET_MODE (x)))
3092 rtx temp;
3093 enum rtx_code code;
3095 /* Do not use anti_adjust_stack, since we don't want to update
3096 stack_pointer_delta. */
3097 temp = expand_binop (Pmode,
3098 #ifdef STACK_GROWS_DOWNWARD
3099 sub_optab,
3100 #else
3101 add_optab,
3102 #endif
3103 stack_pointer_rtx,
3104 GEN_INT
3105 (PUSH_ROUNDING
3106 (GET_MODE_SIZE (GET_MODE (x)))),
3107 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3109 if (temp != stack_pointer_rtx)
3110 emit_move_insn (stack_pointer_rtx, temp);
3112 code = GET_CODE (XEXP (x, 0));
3114 /* Just hope that small offsets off SP are OK. */
3115 if (code == POST_INC)
3116 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3117 GEN_INT (-((HOST_WIDE_INT)
3118 GET_MODE_SIZE (GET_MODE (x)))));
3119 else if (code == POST_DEC)
3120 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3121 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3122 else
3123 temp = stack_pointer_rtx;
3125 x = change_address (x, VOIDmode, temp);
3127 #endif
3129 /* If we are in reload, see if either operand is a MEM whose address
3130 is scheduled for replacement. */
3131 if (reload_in_progress && GET_CODE (x) == MEM
3132 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3133 x = replace_equiv_address_nv (x, inner);
3134 if (reload_in_progress && GET_CODE (y) == MEM
3135 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3136 y = replace_equiv_address_nv (y, inner);
3138 start_sequence ();
3140 need_clobber = 0;
3141 for (i = 0;
3142 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3143 i++)
3145 rtx xpart = operand_subword (x, i, 1, mode);
3146 rtx ypart = operand_subword (y, i, 1, mode);
3148 /* If we can't get a part of Y, put Y into memory if it is a
3149 constant. Otherwise, force it into a register. If we still
3150 can't get a part of Y, abort. */
3151 if (ypart == 0 && CONSTANT_P (y))
3153 y = force_const_mem (mode, y);
3154 ypart = operand_subword (y, i, 1, mode);
3156 else if (ypart == 0)
3157 ypart = operand_subword_force (y, i, mode);
3159 if (xpart == 0 || ypart == 0)
3160 abort ();
3162 need_clobber |= (GET_CODE (xpart) == SUBREG);
3164 last_insn = emit_move_insn (xpart, ypart);
3167 seq = get_insns ();
3168 end_sequence ();
3170 /* Show the output dies here. This is necessary for SUBREGs
3171 of pseudos since we cannot track their lifetimes correctly;
3172 hard regs shouldn't appear here except as return values.
3173 We never want to emit such a clobber after reload. */
3174 if (x != y
3175 && ! (reload_in_progress || reload_completed)
3176 && need_clobber != 0)
3177 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3179 emit_insn (seq);
3181 return last_insn;
3183 else
3184 abort ();
3187 /* If Y is representable exactly in a narrower mode, and the target can
3188 perform the extension directly from constant or memory, then emit the
3189 move as an extension. */
3191 static rtx
3192 compress_float_constant (rtx x, rtx y)
3194 enum machine_mode dstmode = GET_MODE (x);
3195 enum machine_mode orig_srcmode = GET_MODE (y);
3196 enum machine_mode srcmode;
3197 REAL_VALUE_TYPE r;
3199 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3201 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3202 srcmode != orig_srcmode;
3203 srcmode = GET_MODE_WIDER_MODE (srcmode))
3205 enum insn_code ic;
3206 rtx trunc_y, last_insn;
3208 /* Skip if the target can't extend this way. */
3209 ic = can_extend_p (dstmode, srcmode, 0);
3210 if (ic == CODE_FOR_nothing)
3211 continue;
3213 /* Skip if the narrowed value isn't exact. */
3214 if (! exact_real_truncate (srcmode, &r))
3215 continue;
3217 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3219 if (LEGITIMATE_CONSTANT_P (trunc_y))
3221 /* Skip if the target needs extra instructions to perform
3222 the extension. */
3223 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3224 continue;
3226 else if (float_extend_from_mem[dstmode][srcmode])
3227 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3228 else
3229 continue;
3231 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3232 last_insn = get_last_insn ();
3234 if (GET_CODE (x) == REG)
3235 set_unique_reg_note (last_insn, REG_EQUAL, y);
3237 return last_insn;
3240 return NULL_RTX;
3243 /* Pushing data onto the stack. */
3245 /* Push a block of length SIZE (perhaps variable)
3246 and return an rtx to address the beginning of the block.
3247 Note that it is not possible for the value returned to be a QUEUED.
3248 The value may be virtual_outgoing_args_rtx.
3250 EXTRA is the number of bytes of padding to push in addition to SIZE.
3251 BELOW nonzero means this padding comes at low addresses;
3252 otherwise, the padding comes at high addresses. */
3255 push_block (rtx size, int extra, int below)
3257 rtx temp;
3259 size = convert_modes (Pmode, ptr_mode, size, 1);
3260 if (CONSTANT_P (size))
3261 anti_adjust_stack (plus_constant (size, extra));
3262 else if (GET_CODE (size) == REG && extra == 0)
3263 anti_adjust_stack (size);
3264 else
3266 temp = copy_to_mode_reg (Pmode, size);
3267 if (extra != 0)
3268 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3269 temp, 0, OPTAB_LIB_WIDEN);
3270 anti_adjust_stack (temp);
3273 #ifndef STACK_GROWS_DOWNWARD
3274 if (0)
3275 #else
3276 if (1)
3277 #endif
3279 temp = virtual_outgoing_args_rtx;
3280 if (extra != 0 && below)
3281 temp = plus_constant (temp, extra);
3283 else
3285 if (GET_CODE (size) == CONST_INT)
3286 temp = plus_constant (virtual_outgoing_args_rtx,
3287 -INTVAL (size) - (below ? 0 : extra));
3288 else if (extra != 0 && !below)
3289 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3290 negate_rtx (Pmode, plus_constant (size, extra)));
3291 else
3292 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3293 negate_rtx (Pmode, size));
3296 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3299 #ifdef PUSH_ROUNDING
3301 /* Emit single push insn. */
3303 static void
3304 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3306 rtx dest_addr;
3307 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3308 rtx dest;
3309 enum insn_code icode;
3310 insn_operand_predicate_fn pred;
3312 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3313 /* If there is push pattern, use it. Otherwise try old way of throwing
3314 MEM representing push operation to move expander. */
3315 icode = push_optab->handlers[(int) mode].insn_code;
3316 if (icode != CODE_FOR_nothing)
3318 if (((pred = insn_data[(int) icode].operand[0].predicate)
3319 && !((*pred) (x, mode))))
3320 x = force_reg (mode, x);
3321 emit_insn (GEN_FCN (icode) (x));
3322 return;
3324 if (GET_MODE_SIZE (mode) == rounded_size)
3325 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3326 /* If we are to pad downward, adjust the stack pointer first and
3327 then store X into the stack location using an offset. This is
3328 because emit_move_insn does not know how to pad; it does not have
3329 access to type. */
3330 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3332 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3333 HOST_WIDE_INT offset;
3335 emit_move_insn (stack_pointer_rtx,
3336 expand_binop (Pmode,
3337 #ifdef STACK_GROWS_DOWNWARD
3338 sub_optab,
3339 #else
3340 add_optab,
3341 #endif
3342 stack_pointer_rtx,
3343 GEN_INT (rounded_size),
3344 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3346 offset = (HOST_WIDE_INT) padding_size;
3347 #ifdef STACK_GROWS_DOWNWARD
3348 if (STACK_PUSH_CODE == POST_DEC)
3349 /* We have already decremented the stack pointer, so get the
3350 previous value. */
3351 offset += (HOST_WIDE_INT) rounded_size;
3352 #else
3353 if (STACK_PUSH_CODE == POST_INC)
3354 /* We have already incremented the stack pointer, so get the
3355 previous value. */
3356 offset -= (HOST_WIDE_INT) rounded_size;
3357 #endif
3358 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3360 else
3362 #ifdef STACK_GROWS_DOWNWARD
3363 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3364 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3365 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3366 #else
3367 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3368 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3369 GEN_INT (rounded_size));
3370 #endif
3371 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3374 dest = gen_rtx_MEM (mode, dest_addr);
3376 if (type != 0)
3378 set_mem_attributes (dest, type, 1);
3380 if (flag_optimize_sibling_calls)
3381 /* Function incoming arguments may overlap with sibling call
3382 outgoing arguments and we cannot allow reordering of reads
3383 from function arguments with stores to outgoing arguments
3384 of sibling calls. */
3385 set_mem_alias_set (dest, 0);
3387 emit_move_insn (dest, x);
3389 #endif
3391 /* Generate code to push X onto the stack, assuming it has mode MODE and
3392 type TYPE.
3393 MODE is redundant except when X is a CONST_INT (since they don't
3394 carry mode info).
3395 SIZE is an rtx for the size of data to be copied (in bytes),
3396 needed only if X is BLKmode.
3398 ALIGN (in bits) is maximum alignment we can assume.
3400 If PARTIAL and REG are both nonzero, then copy that many of the first
3401 words of X into registers starting with REG, and push the rest of X.
3402 The amount of space pushed is decreased by PARTIAL words,
3403 rounded *down* to a multiple of PARM_BOUNDARY.
3404 REG must be a hard register in this case.
3405 If REG is zero but PARTIAL is not, take any all others actions for an
3406 argument partially in registers, but do not actually load any
3407 registers.
3409 EXTRA is the amount in bytes of extra space to leave next to this arg.
3410 This is ignored if an argument block has already been allocated.
3412 On a machine that lacks real push insns, ARGS_ADDR is the address of
3413 the bottom of the argument block for this call. We use indexing off there
3414 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3415 argument block has not been preallocated.
3417 ARGS_SO_FAR is the size of args previously pushed for this call.
3419 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3420 for arguments passed in registers. If nonzero, it will be the number
3421 of bytes required. */
3423 void
3424 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3425 unsigned int align, int partial, rtx reg, int extra,
3426 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3427 rtx alignment_pad)
3429 rtx xinner;
3430 enum direction stack_direction
3431 #ifdef STACK_GROWS_DOWNWARD
3432 = downward;
3433 #else
3434 = upward;
3435 #endif
3437 /* Decide where to pad the argument: `downward' for below,
3438 `upward' for above, or `none' for don't pad it.
3439 Default is below for small data on big-endian machines; else above. */
3440 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3442 /* Invert direction if stack is post-decrement.
3443 FIXME: why? */
3444 if (STACK_PUSH_CODE == POST_DEC)
3445 if (where_pad != none)
3446 where_pad = (where_pad == downward ? upward : downward);
3448 xinner = x = protect_from_queue (x, 0);
3450 if (mode == BLKmode)
3452 /* Copy a block into the stack, entirely or partially. */
3454 rtx temp;
3455 int used = partial * UNITS_PER_WORD;
3456 int offset;
3457 int skip;
3459 if (reg && GET_CODE (reg) == PARALLEL)
3461 /* Use the size of the elt to compute offset. */
3462 rtx elt = XEXP (XVECEXP (reg, 0, 0), 0);
3463 used = partial * GET_MODE_SIZE (GET_MODE (elt));
3464 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3466 else
3467 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3469 if (size == 0)
3470 abort ();
3472 used -= offset;
3474 /* USED is now the # of bytes we need not copy to the stack
3475 because registers will take care of them. */
3477 if (partial != 0)
3478 xinner = adjust_address (xinner, BLKmode, used);
3480 /* If the partial register-part of the arg counts in its stack size,
3481 skip the part of stack space corresponding to the registers.
3482 Otherwise, start copying to the beginning of the stack space,
3483 by setting SKIP to 0. */
3484 skip = (reg_parm_stack_space == 0) ? 0 : used;
3486 #ifdef PUSH_ROUNDING
3487 /* Do it with several push insns if that doesn't take lots of insns
3488 and if there is no difficulty with push insns that skip bytes
3489 on the stack for alignment purposes. */
3490 if (args_addr == 0
3491 && PUSH_ARGS
3492 && GET_CODE (size) == CONST_INT
3493 && skip == 0
3494 && MEM_ALIGN (xinner) >= align
3495 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3496 /* Here we avoid the case of a structure whose weak alignment
3497 forces many pushes of a small amount of data,
3498 and such small pushes do rounding that causes trouble. */
3499 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3500 || align >= BIGGEST_ALIGNMENT
3501 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3502 == (align / BITS_PER_UNIT)))
3503 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3505 /* Push padding now if padding above and stack grows down,
3506 or if padding below and stack grows up.
3507 But if space already allocated, this has already been done. */
3508 if (extra && args_addr == 0
3509 && where_pad != none && where_pad != stack_direction)
3510 anti_adjust_stack (GEN_INT (extra));
3512 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3514 else
3515 #endif /* PUSH_ROUNDING */
3517 rtx target;
3519 /* Otherwise make space on the stack and copy the data
3520 to the address of that space. */
3522 /* Deduct words put into registers from the size we must copy. */
3523 if (partial != 0)
3525 if (GET_CODE (size) == CONST_INT)
3526 size = GEN_INT (INTVAL (size) - used);
3527 else
3528 size = expand_binop (GET_MODE (size), sub_optab, size,
3529 GEN_INT (used), NULL_RTX, 0,
3530 OPTAB_LIB_WIDEN);
3533 /* Get the address of the stack space.
3534 In this case, we do not deal with EXTRA separately.
3535 A single stack adjust will do. */
3536 if (! args_addr)
3538 temp = push_block (size, extra, where_pad == downward);
3539 extra = 0;
3541 else if (GET_CODE (args_so_far) == CONST_INT)
3542 temp = memory_address (BLKmode,
3543 plus_constant (args_addr,
3544 skip + INTVAL (args_so_far)));
3545 else
3546 temp = memory_address (BLKmode,
3547 plus_constant (gen_rtx_PLUS (Pmode,
3548 args_addr,
3549 args_so_far),
3550 skip));
3552 if (!ACCUMULATE_OUTGOING_ARGS)
3554 /* If the source is referenced relative to the stack pointer,
3555 copy it to another register to stabilize it. We do not need
3556 to do this if we know that we won't be changing sp. */
3558 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3559 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3560 temp = copy_to_reg (temp);
3563 target = gen_rtx_MEM (BLKmode, temp);
3565 if (type != 0)
3567 set_mem_attributes (target, type, 1);
3568 /* Function incoming arguments may overlap with sibling call
3569 outgoing arguments and we cannot allow reordering of reads
3570 from function arguments with stores to outgoing arguments
3571 of sibling calls. */
3572 set_mem_alias_set (target, 0);
3575 /* ALIGN may well be better aligned than TYPE, e.g. due to
3576 PARM_BOUNDARY. Assume the caller isn't lying. */
3577 set_mem_align (target, align);
3579 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3582 else if (partial > 0)
3584 /* Scalar partly in registers. */
3586 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3587 int i;
3588 int not_stack;
3589 /* # words of start of argument
3590 that we must make space for but need not store. */
3591 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3592 int args_offset = INTVAL (args_so_far);
3593 int skip;
3595 /* Push padding now if padding above and stack grows down,
3596 or if padding below and stack grows up.
3597 But if space already allocated, this has already been done. */
3598 if (extra && args_addr == 0
3599 && where_pad != none && where_pad != stack_direction)
3600 anti_adjust_stack (GEN_INT (extra));
3602 /* If we make space by pushing it, we might as well push
3603 the real data. Otherwise, we can leave OFFSET nonzero
3604 and leave the space uninitialized. */
3605 if (args_addr == 0)
3606 offset = 0;
3608 /* Now NOT_STACK gets the number of words that we don't need to
3609 allocate on the stack. */
3610 not_stack = partial - offset;
3612 /* If the partial register-part of the arg counts in its stack size,
3613 skip the part of stack space corresponding to the registers.
3614 Otherwise, start copying to the beginning of the stack space,
3615 by setting SKIP to 0. */
3616 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3618 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3619 x = validize_mem (force_const_mem (mode, x));
3621 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3622 SUBREGs of such registers are not allowed. */
3623 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3624 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3625 x = copy_to_reg (x);
3627 /* Loop over all the words allocated on the stack for this arg. */
3628 /* We can do it by words, because any scalar bigger than a word
3629 has a size a multiple of a word. */
3630 #ifndef PUSH_ARGS_REVERSED
3631 for (i = not_stack; i < size; i++)
3632 #else
3633 for (i = size - 1; i >= not_stack; i--)
3634 #endif
3635 if (i >= not_stack + offset)
3636 emit_push_insn (operand_subword_force (x, i, mode),
3637 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3638 0, args_addr,
3639 GEN_INT (args_offset + ((i - not_stack + skip)
3640 * UNITS_PER_WORD)),
3641 reg_parm_stack_space, alignment_pad);
3643 else
3645 rtx addr;
3646 rtx dest;
3648 /* Push padding now if padding above and stack grows down,
3649 or if padding below and stack grows up.
3650 But if space already allocated, this has already been done. */
3651 if (extra && args_addr == 0
3652 && where_pad != none && where_pad != stack_direction)
3653 anti_adjust_stack (GEN_INT (extra));
3655 #ifdef PUSH_ROUNDING
3656 if (args_addr == 0 && PUSH_ARGS)
3657 emit_single_push_insn (mode, x, type);
3658 else
3659 #endif
3661 if (GET_CODE (args_so_far) == CONST_INT)
3662 addr
3663 = memory_address (mode,
3664 plus_constant (args_addr,
3665 INTVAL (args_so_far)));
3666 else
3667 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3668 args_so_far));
3669 dest = gen_rtx_MEM (mode, addr);
3670 if (type != 0)
3672 set_mem_attributes (dest, type, 1);
3673 /* Function incoming arguments may overlap with sibling call
3674 outgoing arguments and we cannot allow reordering of reads
3675 from function arguments with stores to outgoing arguments
3676 of sibling calls. */
3677 set_mem_alias_set (dest, 0);
3680 emit_move_insn (dest, x);
3684 /* If part should go in registers, copy that part
3685 into the appropriate registers. Do this now, at the end,
3686 since mem-to-mem copies above may do function calls. */
3687 if (partial > 0 && reg != 0)
3689 /* Handle calls that pass values in multiple non-contiguous locations.
3690 The Irix 6 ABI has examples of this. */
3691 if (GET_CODE (reg) == PARALLEL)
3692 emit_group_load (reg, x, type, -1);
3693 else
3694 move_block_to_reg (REGNO (reg), x, partial, mode);
3697 if (extra && args_addr == 0 && where_pad == stack_direction)
3698 anti_adjust_stack (GEN_INT (extra));
3700 if (alignment_pad && args_addr == 0)
3701 anti_adjust_stack (alignment_pad);
3704 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3705 operations. */
3707 static rtx
3708 get_subtarget (rtx x)
3710 return ((x == 0
3711 /* Only registers can be subtargets. */
3712 || GET_CODE (x) != REG
3713 /* If the register is readonly, it can't be set more than once. */
3714 || RTX_UNCHANGING_P (x)
3715 /* Don't use hard regs to avoid extending their life. */
3716 || REGNO (x) < FIRST_PSEUDO_REGISTER
3717 /* Avoid subtargets inside loops,
3718 since they hide some invariant expressions. */
3719 || preserve_subexpressions_p ())
3720 ? 0 : x);
3723 /* Expand an assignment that stores the value of FROM into TO.
3724 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3725 (This may contain a QUEUED rtx;
3726 if the value is constant, this rtx is a constant.)
3727 Otherwise, the returned value is NULL_RTX. */
3730 expand_assignment (tree to, tree from, int want_value)
3732 rtx to_rtx = 0;
3733 rtx result;
3735 /* Don't crash if the lhs of the assignment was erroneous. */
3737 if (TREE_CODE (to) == ERROR_MARK)
3739 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3740 return want_value ? result : NULL_RTX;
3743 /* Assignment of a structure component needs special treatment
3744 if the structure component's rtx is not simply a MEM.
3745 Assignment of an array element at a constant index, and assignment of
3746 an array element in an unaligned packed structure field, has the same
3747 problem. */
3749 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3750 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF
3751 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
3753 enum machine_mode mode1;
3754 HOST_WIDE_INT bitsize, bitpos;
3755 rtx orig_to_rtx;
3756 tree offset;
3757 int unsignedp;
3758 int volatilep = 0;
3759 tree tem;
3761 push_temp_slots ();
3762 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3763 &unsignedp, &volatilep);
3765 /* If we are going to use store_bit_field and extract_bit_field,
3766 make sure to_rtx will be safe for multiple use. */
3768 if (mode1 == VOIDmode && want_value)
3769 tem = stabilize_reference (tem);
3771 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3773 if (offset != 0)
3775 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
3777 if (GET_CODE (to_rtx) != MEM)
3778 abort ();
3780 #ifdef POINTERS_EXTEND_UNSIGNED
3781 if (GET_MODE (offset_rtx) != Pmode)
3782 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
3783 #else
3784 if (GET_MODE (offset_rtx) != ptr_mode)
3785 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3786 #endif
3788 /* A constant address in TO_RTX can have VOIDmode, we must not try
3789 to call force_reg for that case. Avoid that case. */
3790 if (GET_CODE (to_rtx) == MEM
3791 && GET_MODE (to_rtx) == BLKmode
3792 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3793 && bitsize > 0
3794 && (bitpos % bitsize) == 0
3795 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3796 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3798 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3799 bitpos = 0;
3802 to_rtx = offset_address (to_rtx, offset_rtx,
3803 highest_pow2_factor_for_target (to,
3804 offset));
3807 if (GET_CODE (to_rtx) == MEM)
3809 /* If the field is at offset zero, we could have been given the
3810 DECL_RTX of the parent struct. Don't munge it. */
3811 to_rtx = shallow_copy_rtx (to_rtx);
3813 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
3816 /* Deal with volatile and readonly fields. The former is only done
3817 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3818 if (volatilep && GET_CODE (to_rtx) == MEM)
3820 if (to_rtx == orig_to_rtx)
3821 to_rtx = copy_rtx (to_rtx);
3822 MEM_VOLATILE_P (to_rtx) = 1;
3825 if (TREE_CODE (to) == COMPONENT_REF
3826 && TREE_READONLY (TREE_OPERAND (to, 1))
3827 /* We can't assert that a MEM won't be set more than once
3828 if the component is not addressable because another
3829 non-addressable component may be referenced by the same MEM. */
3830 && ! (GET_CODE (to_rtx) == MEM && ! can_address_p (to)))
3832 if (to_rtx == orig_to_rtx)
3833 to_rtx = copy_rtx (to_rtx);
3834 RTX_UNCHANGING_P (to_rtx) = 1;
3837 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
3839 if (to_rtx == orig_to_rtx)
3840 to_rtx = copy_rtx (to_rtx);
3841 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3844 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3845 (want_value
3846 /* Spurious cast for HPUX compiler. */
3847 ? ((enum machine_mode)
3848 TYPE_MODE (TREE_TYPE (to)))
3849 : VOIDmode),
3850 unsignedp, TREE_TYPE (tem), get_alias_set (to));
3852 preserve_temp_slots (result);
3853 free_temp_slots ();
3854 pop_temp_slots ();
3856 /* If the value is meaningful, convert RESULT to the proper mode.
3857 Otherwise, return nothing. */
3858 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3859 TYPE_MODE (TREE_TYPE (from)),
3860 result,
3861 TREE_UNSIGNED (TREE_TYPE (to)))
3862 : NULL_RTX);
3865 /* If the rhs is a function call and its value is not an aggregate,
3866 call the function before we start to compute the lhs.
3867 This is needed for correct code for cases such as
3868 val = setjmp (buf) on machines where reference to val
3869 requires loading up part of an address in a separate insn.
3871 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3872 since it might be a promoted variable where the zero- or sign- extension
3873 needs to be done. Handling this in the normal way is safe because no
3874 computation is done before the call. */
3875 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
3876 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3877 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3878 && GET_CODE (DECL_RTL (to)) == REG))
3880 rtx value;
3882 push_temp_slots ();
3883 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3884 if (to_rtx == 0)
3885 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3887 /* Handle calls that return values in multiple non-contiguous locations.
3888 The Irix 6 ABI has examples of this. */
3889 if (GET_CODE (to_rtx) == PARALLEL)
3890 emit_group_load (to_rtx, value, TREE_TYPE (from),
3891 int_size_in_bytes (TREE_TYPE (from)));
3892 else if (GET_MODE (to_rtx) == BLKmode)
3893 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
3894 else
3896 if (POINTER_TYPE_P (TREE_TYPE (to)))
3897 value = convert_memory_address (GET_MODE (to_rtx), value);
3898 emit_move_insn (to_rtx, value);
3900 preserve_temp_slots (to_rtx);
3901 free_temp_slots ();
3902 pop_temp_slots ();
3903 return want_value ? to_rtx : NULL_RTX;
3906 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3907 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3909 if (to_rtx == 0)
3910 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3912 /* Don't move directly into a return register. */
3913 if (TREE_CODE (to) == RESULT_DECL
3914 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3916 rtx temp;
3918 push_temp_slots ();
3919 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3921 if (GET_CODE (to_rtx) == PARALLEL)
3922 emit_group_load (to_rtx, temp, TREE_TYPE (from),
3923 int_size_in_bytes (TREE_TYPE (from)));
3924 else
3925 emit_move_insn (to_rtx, temp);
3927 preserve_temp_slots (to_rtx);
3928 free_temp_slots ();
3929 pop_temp_slots ();
3930 return want_value ? to_rtx : NULL_RTX;
3933 /* In case we are returning the contents of an object which overlaps
3934 the place the value is being stored, use a safe function when copying
3935 a value through a pointer into a structure value return block. */
3936 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3937 && current_function_returns_struct
3938 && !current_function_returns_pcc_struct)
3940 rtx from_rtx, size;
3942 push_temp_slots ();
3943 size = expr_size (from);
3944 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
3946 if (TARGET_MEM_FUNCTIONS)
3947 emit_library_call (memmove_libfunc, LCT_NORMAL,
3948 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3949 XEXP (from_rtx, 0), Pmode,
3950 convert_to_mode (TYPE_MODE (sizetype),
3951 size, TREE_UNSIGNED (sizetype)),
3952 TYPE_MODE (sizetype));
3953 else
3954 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3955 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3956 XEXP (to_rtx, 0), Pmode,
3957 convert_to_mode (TYPE_MODE (integer_type_node),
3958 size,
3959 TREE_UNSIGNED (integer_type_node)),
3960 TYPE_MODE (integer_type_node));
3962 preserve_temp_slots (to_rtx);
3963 free_temp_slots ();
3964 pop_temp_slots ();
3965 return want_value ? to_rtx : NULL_RTX;
3968 /* Compute FROM and store the value in the rtx we got. */
3970 push_temp_slots ();
3971 result = store_expr (from, to_rtx, want_value);
3972 preserve_temp_slots (result);
3973 free_temp_slots ();
3974 pop_temp_slots ();
3975 return want_value ? result : NULL_RTX;
3978 /* Generate code for computing expression EXP,
3979 and storing the value into TARGET.
3980 TARGET may contain a QUEUED rtx.
3982 If WANT_VALUE & 1 is nonzero, return a copy of the value
3983 not in TARGET, so that we can be sure to use the proper
3984 value in a containing expression even if TARGET has something
3985 else stored in it. If possible, we copy the value through a pseudo
3986 and return that pseudo. Or, if the value is constant, we try to
3987 return the constant. In some cases, we return a pseudo
3988 copied *from* TARGET.
3990 If the mode is BLKmode then we may return TARGET itself.
3991 It turns out that in BLKmode it doesn't cause a problem.
3992 because C has no operators that could combine two different
3993 assignments into the same BLKmode object with different values
3994 with no sequence point. Will other languages need this to
3995 be more thorough?
3997 If WANT_VALUE & 1 is 0, we return NULL, to make sure
3998 to catch quickly any cases where the caller uses the value
3999 and fails to set WANT_VALUE.
4001 If WANT_VALUE & 2 is set, this is a store into a call param on the
4002 stack, and block moves may need to be treated specially. */
4005 store_expr (tree exp, rtx target, int want_value)
4007 rtx temp;
4008 rtx alt_rtl = NULL_RTX;
4009 int dont_return_target = 0;
4010 int dont_store_target = 0;
4012 if (VOID_TYPE_P (TREE_TYPE (exp)))
4014 /* C++ can generate ?: expressions with a throw expression in one
4015 branch and an rvalue in the other. Here, we resolve attempts to
4016 store the throw expression's nonexistent result. */
4017 if (want_value)
4018 abort ();
4019 expand_expr (exp, const0_rtx, VOIDmode, 0);
4020 return NULL_RTX;
4022 if (TREE_CODE (exp) == COMPOUND_EXPR)
4024 /* Perform first part of compound expression, then assign from second
4025 part. */
4026 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4027 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4028 emit_queue ();
4029 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4031 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4033 /* For conditional expression, get safe form of the target. Then
4034 test the condition, doing the appropriate assignment on either
4035 side. This avoids the creation of unnecessary temporaries.
4036 For non-BLKmode, it is more efficient not to do this. */
4038 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4040 emit_queue ();
4041 target = protect_from_queue (target, 1);
4043 do_pending_stack_adjust ();
4044 NO_DEFER_POP;
4045 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4046 start_cleanup_deferral ();
4047 store_expr (TREE_OPERAND (exp, 1), target, want_value & 2);
4048 end_cleanup_deferral ();
4049 emit_queue ();
4050 emit_jump_insn (gen_jump (lab2));
4051 emit_barrier ();
4052 emit_label (lab1);
4053 start_cleanup_deferral ();
4054 store_expr (TREE_OPERAND (exp, 2), target, want_value & 2);
4055 end_cleanup_deferral ();
4056 emit_queue ();
4057 emit_label (lab2);
4058 OK_DEFER_POP;
4060 return want_value & 1 ? target : NULL_RTX;
4062 else if (queued_subexp_p (target))
4063 /* If target contains a postincrement, let's not risk
4064 using it as the place to generate the rhs. */
4066 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4068 /* Expand EXP into a new pseudo. */
4069 temp = gen_reg_rtx (GET_MODE (target));
4070 temp = expand_expr (exp, temp, GET_MODE (target),
4071 (want_value & 2
4072 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4074 else
4075 temp = expand_expr (exp, NULL_RTX, GET_MODE (target),
4076 (want_value & 2
4077 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4079 /* If target is volatile, ANSI requires accessing the value
4080 *from* the target, if it is accessed. So make that happen.
4081 In no case return the target itself. */
4082 if (! MEM_VOLATILE_P (target) && (want_value & 1) != 0)
4083 dont_return_target = 1;
4085 else if ((want_value & 1) != 0
4086 && GET_CODE (target) == MEM
4087 && ! MEM_VOLATILE_P (target)
4088 && GET_MODE (target) != BLKmode)
4089 /* If target is in memory and caller wants value in a register instead,
4090 arrange that. Pass TARGET as target for expand_expr so that,
4091 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4092 We know expand_expr will not use the target in that case.
4093 Don't do this if TARGET is volatile because we are supposed
4094 to write it and then read it. */
4096 temp = expand_expr (exp, target, GET_MODE (target),
4097 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4098 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4100 /* If TEMP is already in the desired TARGET, only copy it from
4101 memory and don't store it there again. */
4102 if (temp == target
4103 || (rtx_equal_p (temp, target)
4104 && ! side_effects_p (temp) && ! side_effects_p (target)))
4105 dont_store_target = 1;
4106 temp = copy_to_reg (temp);
4108 dont_return_target = 1;
4110 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4111 /* If this is a scalar in a register that is stored in a wider mode
4112 than the declared mode, compute the result into its declared mode
4113 and then convert to the wider mode. Our value is the computed
4114 expression. */
4116 rtx inner_target = 0;
4118 /* If we don't want a value, we can do the conversion inside EXP,
4119 which will often result in some optimizations. Do the conversion
4120 in two steps: first change the signedness, if needed, then
4121 the extend. But don't do this if the type of EXP is a subtype
4122 of something else since then the conversion might involve
4123 more than just converting modes. */
4124 if ((want_value & 1) == 0
4125 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4126 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4128 if (TREE_UNSIGNED (TREE_TYPE (exp))
4129 != SUBREG_PROMOTED_UNSIGNED_P (target))
4130 exp = convert
4131 (lang_hooks.types.signed_or_unsigned_type
4132 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4134 exp = convert (lang_hooks.types.type_for_mode
4135 (GET_MODE (SUBREG_REG (target)),
4136 SUBREG_PROMOTED_UNSIGNED_P (target)),
4137 exp);
4139 inner_target = SUBREG_REG (target);
4142 temp = expand_expr (exp, inner_target, VOIDmode,
4143 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4145 /* If TEMP is a MEM and we want a result value, make the access
4146 now so it gets done only once. Strictly speaking, this is
4147 only necessary if the MEM is volatile, or if the address
4148 overlaps TARGET. But not performing the load twice also
4149 reduces the amount of rtl we generate and then have to CSE. */
4150 if (GET_CODE (temp) == MEM && (want_value & 1) != 0)
4151 temp = copy_to_reg (temp);
4153 /* If TEMP is a VOIDmode constant, use convert_modes to make
4154 sure that we properly convert it. */
4155 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4157 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4158 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4159 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4160 GET_MODE (target), temp,
4161 SUBREG_PROMOTED_UNSIGNED_P (target));
4164 convert_move (SUBREG_REG (target), temp,
4165 SUBREG_PROMOTED_UNSIGNED_P (target));
4167 /* If we promoted a constant, change the mode back down to match
4168 target. Otherwise, the caller might get confused by a result whose
4169 mode is larger than expected. */
4171 if ((want_value & 1) != 0 && GET_MODE (temp) != GET_MODE (target))
4173 if (GET_MODE (temp) != VOIDmode)
4175 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4176 SUBREG_PROMOTED_VAR_P (temp) = 1;
4177 SUBREG_PROMOTED_UNSIGNED_SET (temp,
4178 SUBREG_PROMOTED_UNSIGNED_P (target));
4180 else
4181 temp = convert_modes (GET_MODE (target),
4182 GET_MODE (SUBREG_REG (target)),
4183 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4186 return want_value & 1 ? temp : NULL_RTX;
4188 else
4190 temp = expand_expr_real (exp, target, GET_MODE (target),
4191 (want_value & 2
4192 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4193 &alt_rtl);
4194 /* Return TARGET if it's a specified hardware register.
4195 If TARGET is a volatile mem ref, either return TARGET
4196 or return a reg copied *from* TARGET; ANSI requires this.
4198 Otherwise, if TEMP is not TARGET, return TEMP
4199 if it is constant (for efficiency),
4200 or if we really want the correct value. */
4201 if (!(target && GET_CODE (target) == REG
4202 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4203 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4204 && ! rtx_equal_p (temp, target)
4205 && (CONSTANT_P (temp) || (want_value & 1) != 0))
4206 dont_return_target = 1;
4209 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4210 the same as that of TARGET, adjust the constant. This is needed, for
4211 example, in case it is a CONST_DOUBLE and we want only a word-sized
4212 value. */
4213 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4214 && TREE_CODE (exp) != ERROR_MARK
4215 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4216 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4217 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4219 /* If value was not generated in the target, store it there.
4220 Convert the value to TARGET's type first if necessary.
4221 If TEMP and TARGET compare equal according to rtx_equal_p, but
4222 one or both of them are volatile memory refs, we have to distinguish
4223 two cases:
4224 - expand_expr has used TARGET. In this case, we must not generate
4225 another copy. This can be detected by TARGET being equal according
4226 to == .
4227 - expand_expr has not used TARGET - that means that the source just
4228 happens to have the same RTX form. Since temp will have been created
4229 by expand_expr, it will compare unequal according to == .
4230 We must generate a copy in this case, to reach the correct number
4231 of volatile memory references. */
4233 if ((! rtx_equal_p (temp, target)
4234 || (temp != target && (side_effects_p (temp)
4235 || side_effects_p (target))))
4236 && TREE_CODE (exp) != ERROR_MARK
4237 && ! dont_store_target
4238 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4239 but TARGET is not valid memory reference, TEMP will differ
4240 from TARGET although it is really the same location. */
4241 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4242 /* If there's nothing to copy, don't bother. Don't call expr_size
4243 unless necessary, because some front-ends (C++) expr_size-hook
4244 aborts on objects that are not supposed to be bit-copied or
4245 bit-initialized. */
4246 && expr_size (exp) != const0_rtx)
4248 emit_queue();
4249 target = protect_from_queue (target, 1);
4250 temp = protect_from_queue (temp, 0);
4251 if (GET_MODE (temp) != GET_MODE (target)
4252 && GET_MODE (temp) != VOIDmode)
4254 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4255 if (dont_return_target)
4257 /* In this case, we will return TEMP,
4258 so make sure it has the proper mode.
4259 But don't forget to store the value into TARGET. */
4260 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4261 emit_move_insn (target, temp);
4263 else
4264 convert_move (target, temp, unsignedp);
4267 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4269 /* Handle copying a string constant into an array. The string
4270 constant may be shorter than the array. So copy just the string's
4271 actual length, and clear the rest. First get the size of the data
4272 type of the string, which is actually the size of the target. */
4273 rtx size = expr_size (exp);
4275 if (GET_CODE (size) == CONST_INT
4276 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4277 emit_block_move (target, temp, size,
4278 (want_value & 2
4279 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4280 else
4282 /* Compute the size of the data to copy from the string. */
4283 tree copy_size
4284 = size_binop (MIN_EXPR,
4285 make_tree (sizetype, size),
4286 size_int (TREE_STRING_LENGTH (exp)));
4287 rtx copy_size_rtx
4288 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4289 (want_value & 2
4290 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4291 rtx label = 0;
4293 /* Copy that much. */
4294 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4295 TREE_UNSIGNED (sizetype));
4296 emit_block_move (target, temp, copy_size_rtx,
4297 (want_value & 2
4298 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4300 /* Figure out how much is left in TARGET that we have to clear.
4301 Do all calculations in ptr_mode. */
4302 if (GET_CODE (copy_size_rtx) == CONST_INT)
4304 size = plus_constant (size, -INTVAL (copy_size_rtx));
4305 target = adjust_address (target, BLKmode,
4306 INTVAL (copy_size_rtx));
4308 else
4310 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4311 copy_size_rtx, NULL_RTX, 0,
4312 OPTAB_LIB_WIDEN);
4314 #ifdef POINTERS_EXTEND_UNSIGNED
4315 if (GET_MODE (copy_size_rtx) != Pmode)
4316 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4317 TREE_UNSIGNED (sizetype));
4318 #endif
4320 target = offset_address (target, copy_size_rtx,
4321 highest_pow2_factor (copy_size));
4322 label = gen_label_rtx ();
4323 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4324 GET_MODE (size), 0, label);
4327 if (size != const0_rtx)
4328 clear_storage (target, size);
4330 if (label)
4331 emit_label (label);
4334 /* Handle calls that return values in multiple non-contiguous locations.
4335 The Irix 6 ABI has examples of this. */
4336 else if (GET_CODE (target) == PARALLEL)
4337 emit_group_load (target, temp, TREE_TYPE (exp),
4338 int_size_in_bytes (TREE_TYPE (exp)));
4339 else if (GET_MODE (temp) == BLKmode)
4340 emit_block_move (target, temp, expr_size (exp),
4341 (want_value & 2
4342 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4343 else
4345 temp = force_operand (temp, target);
4346 if (temp != target)
4347 emit_move_insn (target, temp);
4351 /* If we don't want a value, return NULL_RTX. */
4352 if ((want_value & 1) == 0)
4353 return NULL_RTX;
4355 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4356 ??? The latter test doesn't seem to make sense. */
4357 else if (dont_return_target && GET_CODE (temp) != MEM)
4358 return temp;
4360 /* Return TARGET itself if it is a hard register. */
4361 else if ((want_value & 1) != 0
4362 && GET_MODE (target) != BLKmode
4363 && ! (GET_CODE (target) == REG
4364 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4365 return copy_to_reg (target);
4367 else
4368 return target;
4371 /* Return 1 if EXP just contains zeros. FIXME merge with initializer_zerop. */
4373 static int
4374 is_zeros_p (tree exp)
4376 tree elt;
4378 switch (TREE_CODE (exp))
4380 case CONVERT_EXPR:
4381 case NOP_EXPR:
4382 case NON_LVALUE_EXPR:
4383 case VIEW_CONVERT_EXPR:
4384 return is_zeros_p (TREE_OPERAND (exp, 0));
4386 case INTEGER_CST:
4387 return integer_zerop (exp);
4389 case COMPLEX_CST:
4390 return
4391 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4393 case REAL_CST:
4394 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4396 case VECTOR_CST:
4397 for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4398 elt = TREE_CHAIN (elt))
4399 if (!is_zeros_p (TREE_VALUE (elt)))
4400 return 0;
4402 return 1;
4404 case CONSTRUCTOR:
4405 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4406 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4407 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4408 if (! is_zeros_p (TREE_VALUE (elt)))
4409 return 0;
4411 return 1;
4413 default:
4414 return 0;
4418 /* Return 1 if EXP contains mostly (3/4) zeros. */
4421 mostly_zeros_p (tree exp)
4423 if (TREE_CODE (exp) == CONSTRUCTOR)
4425 int elts = 0, zeros = 0;
4426 tree elt = CONSTRUCTOR_ELTS (exp);
4427 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4429 /* If there are no ranges of true bits, it is all zero. */
4430 return elt == NULL_TREE;
4432 for (; elt; elt = TREE_CHAIN (elt))
4434 /* We do not handle the case where the index is a RANGE_EXPR,
4435 so the statistic will be somewhat inaccurate.
4436 We do make a more accurate count in store_constructor itself,
4437 so since this function is only used for nested array elements,
4438 this should be close enough. */
4439 if (mostly_zeros_p (TREE_VALUE (elt)))
4440 zeros++;
4441 elts++;
4444 return 4 * zeros >= 3 * elts;
4447 return is_zeros_p (exp);
4450 /* Helper function for store_constructor.
4451 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4452 TYPE is the type of the CONSTRUCTOR, not the element type.
4453 CLEARED is as for store_constructor.
4454 ALIAS_SET is the alias set to use for any stores.
4456 This provides a recursive shortcut back to store_constructor when it isn't
4457 necessary to go through store_field. This is so that we can pass through
4458 the cleared field to let store_constructor know that we may not have to
4459 clear a substructure if the outer structure has already been cleared. */
4461 static void
4462 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4463 HOST_WIDE_INT bitpos, enum machine_mode mode,
4464 tree exp, tree type, int cleared, int alias_set)
4466 if (TREE_CODE (exp) == CONSTRUCTOR
4467 && bitpos % BITS_PER_UNIT == 0
4468 /* If we have a nonzero bitpos for a register target, then we just
4469 let store_field do the bitfield handling. This is unlikely to
4470 generate unnecessary clear instructions anyways. */
4471 && (bitpos == 0 || GET_CODE (target) == MEM))
4473 if (GET_CODE (target) == MEM)
4474 target
4475 = adjust_address (target,
4476 GET_MODE (target) == BLKmode
4477 || 0 != (bitpos
4478 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4479 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4482 /* Update the alias set, if required. */
4483 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4484 && MEM_ALIAS_SET (target) != 0)
4486 target = copy_rtx (target);
4487 set_mem_alias_set (target, alias_set);
4490 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4492 else
4493 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4494 alias_set);
4497 /* Store the value of constructor EXP into the rtx TARGET.
4498 TARGET is either a REG or a MEM; we know it cannot conflict, since
4499 safe_from_p has been called.
4500 CLEARED is true if TARGET is known to have been zero'd.
4501 SIZE is the number of bytes of TARGET we are allowed to modify: this
4502 may not be the same as the size of EXP if we are assigning to a field
4503 which has been packed to exclude padding bits. */
4505 static void
4506 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4508 tree type = TREE_TYPE (exp);
4509 #ifdef WORD_REGISTER_OPERATIONS
4510 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4511 #endif
4513 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4514 || TREE_CODE (type) == QUAL_UNION_TYPE)
4516 tree elt;
4518 /* If size is zero or the target is already cleared, do nothing. */
4519 if (size == 0 || cleared)
4520 cleared = 1;
4521 /* We either clear the aggregate or indicate the value is dead. */
4522 else if ((TREE_CODE (type) == UNION_TYPE
4523 || TREE_CODE (type) == QUAL_UNION_TYPE)
4524 && ! CONSTRUCTOR_ELTS (exp))
4525 /* If the constructor is empty, clear the union. */
4527 clear_storage (target, expr_size (exp));
4528 cleared = 1;
4531 /* If we are building a static constructor into a register,
4532 set the initial value as zero so we can fold the value into
4533 a constant. But if more than one register is involved,
4534 this probably loses. */
4535 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4536 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4538 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4539 cleared = 1;
4542 /* If the constructor has fewer fields than the structure
4543 or if we are initializing the structure to mostly zeros,
4544 clear the whole structure first. Don't do this if TARGET is a
4545 register whose mode size isn't equal to SIZE since clear_storage
4546 can't handle this case. */
4547 else if (((list_length (CONSTRUCTOR_ELTS (exp)) != fields_length (type))
4548 || mostly_zeros_p (exp))
4549 && (GET_CODE (target) != REG
4550 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4551 == size)))
4553 rtx xtarget = target;
4555 if (readonly_fields_p (type))
4557 xtarget = copy_rtx (xtarget);
4558 RTX_UNCHANGING_P (xtarget) = 1;
4561 clear_storage (xtarget, GEN_INT (size));
4562 cleared = 1;
4565 if (! cleared)
4566 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4568 /* Store each element of the constructor into
4569 the corresponding field of TARGET. */
4571 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4573 tree field = TREE_PURPOSE (elt);
4574 tree value = TREE_VALUE (elt);
4575 enum machine_mode mode;
4576 HOST_WIDE_INT bitsize;
4577 HOST_WIDE_INT bitpos = 0;
4578 tree offset;
4579 rtx to_rtx = target;
4581 /* Just ignore missing fields.
4582 We cleared the whole structure, above,
4583 if any fields are missing. */
4584 if (field == 0)
4585 continue;
4587 if (cleared && is_zeros_p (value))
4588 continue;
4590 if (host_integerp (DECL_SIZE (field), 1))
4591 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4592 else
4593 bitsize = -1;
4595 mode = DECL_MODE (field);
4596 if (DECL_BIT_FIELD (field))
4597 mode = VOIDmode;
4599 offset = DECL_FIELD_OFFSET (field);
4600 if (host_integerp (offset, 0)
4601 && host_integerp (bit_position (field), 0))
4603 bitpos = int_bit_position (field);
4604 offset = 0;
4606 else
4607 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4609 if (offset)
4611 rtx offset_rtx;
4613 if (CONTAINS_PLACEHOLDER_P (offset))
4614 offset = build (WITH_RECORD_EXPR, sizetype,
4615 offset, make_tree (TREE_TYPE (exp), target));
4617 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4618 if (GET_CODE (to_rtx) != MEM)
4619 abort ();
4621 #ifdef POINTERS_EXTEND_UNSIGNED
4622 if (GET_MODE (offset_rtx) != Pmode)
4623 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4624 #else
4625 if (GET_MODE (offset_rtx) != ptr_mode)
4626 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4627 #endif
4629 to_rtx = offset_address (to_rtx, offset_rtx,
4630 highest_pow2_factor (offset));
4633 if (TREE_READONLY (field))
4635 if (GET_CODE (to_rtx) == MEM)
4636 to_rtx = copy_rtx (to_rtx);
4638 RTX_UNCHANGING_P (to_rtx) = 1;
4641 #ifdef WORD_REGISTER_OPERATIONS
4642 /* If this initializes a field that is smaller than a word, at the
4643 start of a word, try to widen it to a full word.
4644 This special case allows us to output C++ member function
4645 initializations in a form that the optimizers can understand. */
4646 if (GET_CODE (target) == REG
4647 && bitsize < BITS_PER_WORD
4648 && bitpos % BITS_PER_WORD == 0
4649 && GET_MODE_CLASS (mode) == MODE_INT
4650 && TREE_CODE (value) == INTEGER_CST
4651 && exp_size >= 0
4652 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4654 tree type = TREE_TYPE (value);
4656 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4658 type = lang_hooks.types.type_for_size
4659 (BITS_PER_WORD, TREE_UNSIGNED (type));
4660 value = convert (type, value);
4663 if (BYTES_BIG_ENDIAN)
4664 value
4665 = fold (build (LSHIFT_EXPR, type, value,
4666 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4667 bitsize = BITS_PER_WORD;
4668 mode = word_mode;
4670 #endif
4672 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4673 && DECL_NONADDRESSABLE_P (field))
4675 to_rtx = copy_rtx (to_rtx);
4676 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4679 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4680 value, type, cleared,
4681 get_alias_set (TREE_TYPE (field)));
4684 else if (TREE_CODE (type) == ARRAY_TYPE
4685 || TREE_CODE (type) == VECTOR_TYPE)
4687 tree elt;
4688 int i;
4689 int need_to_clear;
4690 tree domain = TYPE_DOMAIN (type);
4691 tree elttype = TREE_TYPE (type);
4692 int const_bounds_p;
4693 HOST_WIDE_INT minelt = 0;
4694 HOST_WIDE_INT maxelt = 0;
4695 int icode = 0;
4696 rtx *vector = NULL;
4697 int elt_size = 0;
4698 unsigned n_elts = 0;
4700 /* Vectors are like arrays, but the domain is stored via an array
4701 type indirectly. */
4702 if (TREE_CODE (type) == VECTOR_TYPE)
4704 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
4705 the same field as TYPE_DOMAIN, we are not guaranteed that
4706 it always will. */
4707 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
4708 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
4709 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
4711 enum machine_mode mode = GET_MODE (target);
4713 icode = (int) vec_init_optab->handlers[mode].insn_code;
4714 if (icode != CODE_FOR_nothing)
4716 unsigned int i;
4718 elt_size = GET_MODE_SIZE (GET_MODE_INNER (mode));
4719 n_elts = (GET_MODE_SIZE (mode) / elt_size);
4720 vector = alloca (n_elts);
4721 for (i = 0; i < n_elts; i++)
4722 vector [i] = CONST0_RTX (GET_MODE_INNER (mode));
4727 const_bounds_p = (TYPE_MIN_VALUE (domain)
4728 && TYPE_MAX_VALUE (domain)
4729 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4730 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4732 /* If we have constant bounds for the range of the type, get them. */
4733 if (const_bounds_p)
4735 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4736 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4739 /* If the constructor has fewer elements than the array,
4740 clear the whole array first. Similarly if this is
4741 static constructor of a non-BLKmode object. */
4742 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4743 need_to_clear = 1;
4744 else
4746 HOST_WIDE_INT count = 0, zero_count = 0;
4747 need_to_clear = ! const_bounds_p;
4749 /* This loop is a more accurate version of the loop in
4750 mostly_zeros_p (it handles RANGE_EXPR in an index).
4751 It is also needed to check for missing elements. */
4752 for (elt = CONSTRUCTOR_ELTS (exp);
4753 elt != NULL_TREE && ! need_to_clear;
4754 elt = TREE_CHAIN (elt))
4756 tree index = TREE_PURPOSE (elt);
4757 HOST_WIDE_INT this_node_count;
4759 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4761 tree lo_index = TREE_OPERAND (index, 0);
4762 tree hi_index = TREE_OPERAND (index, 1);
4764 if (! host_integerp (lo_index, 1)
4765 || ! host_integerp (hi_index, 1))
4767 need_to_clear = 1;
4768 break;
4771 this_node_count = (tree_low_cst (hi_index, 1)
4772 - tree_low_cst (lo_index, 1) + 1);
4774 else
4775 this_node_count = 1;
4777 count += this_node_count;
4778 if (mostly_zeros_p (TREE_VALUE (elt)))
4779 zero_count += this_node_count;
4782 /* Clear the entire array first if there are any missing elements,
4783 or if the incidence of zero elements is >= 75%. */
4784 if (! need_to_clear
4785 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4786 need_to_clear = 1;
4789 if (need_to_clear && size > 0 && !vector)
4791 if (! cleared)
4793 if (REG_P (target))
4794 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4795 else
4796 clear_storage (target, GEN_INT (size));
4798 cleared = 1;
4800 else if (REG_P (target))
4801 /* Inform later passes that the old value is dead. */
4802 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4804 /* Store each element of the constructor into
4805 the corresponding element of TARGET, determined
4806 by counting the elements. */
4807 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4808 elt;
4809 elt = TREE_CHAIN (elt), i++)
4811 enum machine_mode mode;
4812 HOST_WIDE_INT bitsize;
4813 HOST_WIDE_INT bitpos;
4814 int unsignedp;
4815 tree value = TREE_VALUE (elt);
4816 tree index = TREE_PURPOSE (elt);
4817 rtx xtarget = target;
4819 if (cleared && is_zeros_p (value))
4820 continue;
4822 unsignedp = TREE_UNSIGNED (elttype);
4823 mode = TYPE_MODE (elttype);
4824 if (mode == BLKmode)
4825 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4826 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4827 : -1);
4828 else
4829 bitsize = GET_MODE_BITSIZE (mode);
4831 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4833 tree lo_index = TREE_OPERAND (index, 0);
4834 tree hi_index = TREE_OPERAND (index, 1);
4835 rtx index_r, pos_rtx, loop_end;
4836 struct nesting *loop;
4837 HOST_WIDE_INT lo, hi, count;
4838 tree position;
4840 if (vector)
4841 abort ();
4843 /* If the range is constant and "small", unroll the loop. */
4844 if (const_bounds_p
4845 && host_integerp (lo_index, 0)
4846 && host_integerp (hi_index, 0)
4847 && (lo = tree_low_cst (lo_index, 0),
4848 hi = tree_low_cst (hi_index, 0),
4849 count = hi - lo + 1,
4850 (GET_CODE (target) != MEM
4851 || count <= 2
4852 || (host_integerp (TYPE_SIZE (elttype), 1)
4853 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4854 <= 40 * 8)))))
4856 lo -= minelt; hi -= minelt;
4857 for (; lo <= hi; lo++)
4859 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4861 if (GET_CODE (target) == MEM
4862 && !MEM_KEEP_ALIAS_SET_P (target)
4863 && TREE_CODE (type) == ARRAY_TYPE
4864 && TYPE_NONALIASED_COMPONENT (type))
4866 target = copy_rtx (target);
4867 MEM_KEEP_ALIAS_SET_P (target) = 1;
4870 store_constructor_field
4871 (target, bitsize, bitpos, mode, value, type, cleared,
4872 get_alias_set (elttype));
4875 else
4877 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4878 loop_end = gen_label_rtx ();
4880 unsignedp = TREE_UNSIGNED (domain);
4882 index = build_decl (VAR_DECL, NULL_TREE, domain);
4884 index_r
4885 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4886 &unsignedp, 0));
4887 SET_DECL_RTL (index, index_r);
4888 if (TREE_CODE (value) == SAVE_EXPR
4889 && SAVE_EXPR_RTL (value) == 0)
4891 /* Make sure value gets expanded once before the
4892 loop. */
4893 expand_expr (value, const0_rtx, VOIDmode, 0);
4894 emit_queue ();
4896 store_expr (lo_index, index_r, 0);
4897 loop = expand_start_loop (0);
4899 /* Assign value to element index. */
4900 position
4901 = convert (ssizetype,
4902 fold (build (MINUS_EXPR, TREE_TYPE (index),
4903 index, TYPE_MIN_VALUE (domain))));
4904 position = size_binop (MULT_EXPR, position,
4905 convert (ssizetype,
4906 TYPE_SIZE_UNIT (elttype)));
4908 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4909 xtarget = offset_address (target, pos_rtx,
4910 highest_pow2_factor (position));
4911 xtarget = adjust_address (xtarget, mode, 0);
4912 if (TREE_CODE (value) == CONSTRUCTOR)
4913 store_constructor (value, xtarget, cleared,
4914 bitsize / BITS_PER_UNIT);
4915 else
4916 store_expr (value, xtarget, 0);
4918 expand_exit_loop_if_false (loop,
4919 build (LT_EXPR, integer_type_node,
4920 index, hi_index));
4922 expand_increment (build (PREINCREMENT_EXPR,
4923 TREE_TYPE (index),
4924 index, integer_one_node), 0, 0);
4925 expand_end_loop ();
4926 emit_label (loop_end);
4929 else if ((index != 0 && ! host_integerp (index, 0))
4930 || ! host_integerp (TYPE_SIZE (elttype), 1))
4932 tree position;
4934 if (vector)
4935 abort ();
4937 if (index == 0)
4938 index = ssize_int (1);
4940 if (minelt)
4941 index = convert (ssizetype,
4942 fold (build (MINUS_EXPR, index,
4943 TYPE_MIN_VALUE (domain))));
4945 position = size_binop (MULT_EXPR, index,
4946 convert (ssizetype,
4947 TYPE_SIZE_UNIT (elttype)));
4948 xtarget = offset_address (target,
4949 expand_expr (position, 0, VOIDmode, 0),
4950 highest_pow2_factor (position));
4951 xtarget = adjust_address (xtarget, mode, 0);
4952 store_expr (value, xtarget, 0);
4954 else if (vector)
4956 int pos;
4958 if (index != 0)
4959 pos = tree_low_cst (index, 0) - minelt;
4960 else
4961 pos = i;
4962 vector[pos] = expand_expr (value, NULL_RTX, VOIDmode, 0);
4964 else
4966 if (index != 0)
4967 bitpos = ((tree_low_cst (index, 0) - minelt)
4968 * tree_low_cst (TYPE_SIZE (elttype), 1));
4969 else
4970 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4972 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
4973 && TREE_CODE (type) == ARRAY_TYPE
4974 && TYPE_NONALIASED_COMPONENT (type))
4976 target = copy_rtx (target);
4977 MEM_KEEP_ALIAS_SET_P (target) = 1;
4979 store_constructor_field (target, bitsize, bitpos, mode, value,
4980 type, cleared, get_alias_set (elttype));
4983 if (vector)
4985 emit_insn (GEN_FCN (icode) (target,
4986 gen_rtx_PARALLEL (GET_MODE (target),
4987 gen_rtvec_v (n_elts, vector))));
4991 /* Set constructor assignments. */
4992 else if (TREE_CODE (type) == SET_TYPE)
4994 tree elt = CONSTRUCTOR_ELTS (exp);
4995 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4996 tree domain = TYPE_DOMAIN (type);
4997 tree domain_min, domain_max, bitlength;
4999 /* The default implementation strategy is to extract the constant
5000 parts of the constructor, use that to initialize the target,
5001 and then "or" in whatever non-constant ranges we need in addition.
5003 If a large set is all zero or all ones, it is
5004 probably better to set it using memset (if available) or bzero.
5005 Also, if a large set has just a single range, it may also be
5006 better to first clear all the first clear the set (using
5007 bzero/memset), and set the bits we want. */
5009 /* Check for all zeros. */
5010 if (elt == NULL_TREE && size > 0)
5012 if (!cleared)
5013 clear_storage (target, GEN_INT (size));
5014 return;
5017 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
5018 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
5019 bitlength = size_binop (PLUS_EXPR,
5020 size_diffop (domain_max, domain_min),
5021 ssize_int (1));
5023 nbits = tree_low_cst (bitlength, 1);
5025 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5026 are "complicated" (more than one range), initialize (the
5027 constant parts) by copying from a constant. */
5028 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
5029 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
5031 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
5032 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
5033 char *bit_buffer = alloca (nbits);
5034 HOST_WIDE_INT word = 0;
5035 unsigned int bit_pos = 0;
5036 unsigned int ibit = 0;
5037 unsigned int offset = 0; /* In bytes from beginning of set. */
5039 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
5040 for (;;)
5042 if (bit_buffer[ibit])
5044 if (BYTES_BIG_ENDIAN)
5045 word |= (1 << (set_word_size - 1 - bit_pos));
5046 else
5047 word |= 1 << bit_pos;
5050 bit_pos++; ibit++;
5051 if (bit_pos >= set_word_size || ibit == nbits)
5053 if (word != 0 || ! cleared)
5055 rtx datum = GEN_INT (word);
5056 rtx to_rtx;
5058 /* The assumption here is that it is safe to use
5059 XEXP if the set is multi-word, but not if
5060 it's single-word. */
5061 if (GET_CODE (target) == MEM)
5062 to_rtx = adjust_address (target, mode, offset);
5063 else if (offset == 0)
5064 to_rtx = target;
5065 else
5066 abort ();
5067 emit_move_insn (to_rtx, datum);
5070 if (ibit == nbits)
5071 break;
5072 word = 0;
5073 bit_pos = 0;
5074 offset += set_word_size / BITS_PER_UNIT;
5078 else if (!cleared)
5079 /* Don't bother clearing storage if the set is all ones. */
5080 if (TREE_CHAIN (elt) != NULL_TREE
5081 || (TREE_PURPOSE (elt) == NULL_TREE
5082 ? nbits != 1
5083 : ( ! host_integerp (TREE_VALUE (elt), 0)
5084 || ! host_integerp (TREE_PURPOSE (elt), 0)
5085 || (tree_low_cst (TREE_VALUE (elt), 0)
5086 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5087 != (HOST_WIDE_INT) nbits))))
5088 clear_storage (target, expr_size (exp));
5090 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5092 /* Start of range of element or NULL. */
5093 tree startbit = TREE_PURPOSE (elt);
5094 /* End of range of element, or element value. */
5095 tree endbit = TREE_VALUE (elt);
5096 HOST_WIDE_INT startb, endb;
5097 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5099 bitlength_rtx = expand_expr (bitlength,
5100 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5102 /* Handle non-range tuple element like [ expr ]. */
5103 if (startbit == NULL_TREE)
5105 startbit = save_expr (endbit);
5106 endbit = startbit;
5109 startbit = convert (sizetype, startbit);
5110 endbit = convert (sizetype, endbit);
5111 if (! integer_zerop (domain_min))
5113 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5114 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5116 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5117 EXPAND_CONST_ADDRESS);
5118 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5119 EXPAND_CONST_ADDRESS);
5121 if (REG_P (target))
5123 targetx
5124 = assign_temp
5125 ((build_qualified_type (lang_hooks.types.type_for_mode
5126 (GET_MODE (target), 0),
5127 TYPE_QUAL_CONST)),
5128 0, 1, 1);
5129 emit_move_insn (targetx, target);
5132 else if (GET_CODE (target) == MEM)
5133 targetx = target;
5134 else
5135 abort ();
5137 /* Optimization: If startbit and endbit are constants divisible
5138 by BITS_PER_UNIT, call memset instead. */
5139 if (TARGET_MEM_FUNCTIONS
5140 && TREE_CODE (startbit) == INTEGER_CST
5141 && TREE_CODE (endbit) == INTEGER_CST
5142 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5143 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5145 emit_library_call (memset_libfunc, LCT_NORMAL,
5146 VOIDmode, 3,
5147 plus_constant (XEXP (targetx, 0),
5148 startb / BITS_PER_UNIT),
5149 Pmode,
5150 constm1_rtx, TYPE_MODE (integer_type_node),
5151 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5152 TYPE_MODE (sizetype));
5154 else
5155 emit_library_call (setbits_libfunc, LCT_NORMAL,
5156 VOIDmode, 4, XEXP (targetx, 0),
5157 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5158 startbit_rtx, TYPE_MODE (sizetype),
5159 endbit_rtx, TYPE_MODE (sizetype));
5161 if (REG_P (target))
5162 emit_move_insn (target, targetx);
5166 else
5167 abort ();
5170 /* Store the value of EXP (an expression tree)
5171 into a subfield of TARGET which has mode MODE and occupies
5172 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5173 If MODE is VOIDmode, it means that we are storing into a bit-field.
5175 If VALUE_MODE is VOIDmode, return nothing in particular.
5176 UNSIGNEDP is not used in this case.
5178 Otherwise, return an rtx for the value stored. This rtx
5179 has mode VALUE_MODE if that is convenient to do.
5180 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5182 TYPE is the type of the underlying object,
5184 ALIAS_SET is the alias set for the destination. This value will
5185 (in general) be different from that for TARGET, since TARGET is a
5186 reference to the containing structure. */
5188 static rtx
5189 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5190 enum machine_mode mode, tree exp, enum machine_mode value_mode,
5191 int unsignedp, tree type, int alias_set)
5193 HOST_WIDE_INT width_mask = 0;
5195 if (TREE_CODE (exp) == ERROR_MARK)
5196 return const0_rtx;
5198 /* If we have nothing to store, do nothing unless the expression has
5199 side-effects. */
5200 if (bitsize == 0)
5201 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5202 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5203 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5205 /* If we are storing into an unaligned field of an aligned union that is
5206 in a register, we may have the mode of TARGET being an integer mode but
5207 MODE == BLKmode. In that case, get an aligned object whose size and
5208 alignment are the same as TARGET and store TARGET into it (we can avoid
5209 the store if the field being stored is the entire width of TARGET). Then
5210 call ourselves recursively to store the field into a BLKmode version of
5211 that object. Finally, load from the object into TARGET. This is not
5212 very efficient in general, but should only be slightly more expensive
5213 than the otherwise-required unaligned accesses. Perhaps this can be
5214 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5215 twice, once with emit_move_insn and once via store_field. */
5217 if (mode == BLKmode
5218 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5220 rtx object = assign_temp (type, 0, 1, 1);
5221 rtx blk_object = adjust_address (object, BLKmode, 0);
5223 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5224 emit_move_insn (object, target);
5226 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5227 alias_set);
5229 emit_move_insn (target, object);
5231 /* We want to return the BLKmode version of the data. */
5232 return blk_object;
5235 if (GET_CODE (target) == CONCAT)
5237 /* We're storing into a struct containing a single __complex. */
5239 if (bitpos != 0)
5240 abort ();
5241 return store_expr (exp, target, 0);
5244 /* If the structure is in a register or if the component
5245 is a bit field, we cannot use addressing to access it.
5246 Use bit-field techniques or SUBREG to store in it. */
5248 if (mode == VOIDmode
5249 || (mode != BLKmode && ! direct_store[(int) mode]
5250 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5251 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5252 || GET_CODE (target) == REG
5253 || GET_CODE (target) == SUBREG
5254 /* If the field isn't aligned enough to store as an ordinary memref,
5255 store it as a bit field. */
5256 || (mode != BLKmode
5257 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5258 || bitpos % GET_MODE_ALIGNMENT (mode))
5259 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5260 || (bitpos % BITS_PER_UNIT != 0)))
5261 /* If the RHS and field are a constant size and the size of the
5262 RHS isn't the same size as the bitfield, we must use bitfield
5263 operations. */
5264 || (bitsize >= 0
5265 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5266 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5268 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5270 /* If BITSIZE is narrower than the size of the type of EXP
5271 we will be narrowing TEMP. Normally, what's wanted are the
5272 low-order bits. However, if EXP's type is a record and this is
5273 big-endian machine, we want the upper BITSIZE bits. */
5274 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5275 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5276 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5277 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5278 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5279 - bitsize),
5280 NULL_RTX, 1);
5282 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5283 MODE. */
5284 if (mode != VOIDmode && mode != BLKmode
5285 && mode != TYPE_MODE (TREE_TYPE (exp)))
5286 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5288 /* If the modes of TARGET and TEMP are both BLKmode, both
5289 must be in memory and BITPOS must be aligned on a byte
5290 boundary. If so, we simply do a block copy. */
5291 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5293 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5294 || bitpos % BITS_PER_UNIT != 0)
5295 abort ();
5297 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5298 emit_block_move (target, temp,
5299 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5300 / BITS_PER_UNIT),
5301 BLOCK_OP_NORMAL);
5303 return value_mode == VOIDmode ? const0_rtx : target;
5306 /* Store the value in the bitfield. */
5307 store_bit_field (target, bitsize, bitpos, mode, temp,
5308 int_size_in_bytes (type));
5310 if (value_mode != VOIDmode)
5312 /* The caller wants an rtx for the value.
5313 If possible, avoid refetching from the bitfield itself. */
5314 if (width_mask != 0
5315 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5317 tree count;
5318 enum machine_mode tmode;
5320 tmode = GET_MODE (temp);
5321 if (tmode == VOIDmode)
5322 tmode = value_mode;
5324 if (unsignedp)
5325 return expand_and (tmode, temp,
5326 gen_int_mode (width_mask, tmode),
5327 NULL_RTX);
5329 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5330 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5331 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5334 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5335 NULL_RTX, value_mode, VOIDmode,
5336 int_size_in_bytes (type));
5338 return const0_rtx;
5340 else
5342 rtx addr = XEXP (target, 0);
5343 rtx to_rtx = target;
5345 /* If a value is wanted, it must be the lhs;
5346 so make the address stable for multiple use. */
5348 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5349 && ! CONSTANT_ADDRESS_P (addr)
5350 /* A frame-pointer reference is already stable. */
5351 && ! (GET_CODE (addr) == PLUS
5352 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5353 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5354 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5355 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5357 /* Now build a reference to just the desired component. */
5359 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5361 if (to_rtx == target)
5362 to_rtx = copy_rtx (to_rtx);
5364 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5365 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5366 set_mem_alias_set (to_rtx, alias_set);
5368 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5372 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5373 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5374 codes and find the ultimate containing object, which we return.
5376 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5377 bit position, and *PUNSIGNEDP to the signedness of the field.
5378 If the position of the field is variable, we store a tree
5379 giving the variable offset (in units) in *POFFSET.
5380 This offset is in addition to the bit position.
5381 If the position is not variable, we store 0 in *POFFSET.
5383 If any of the extraction expressions is volatile,
5384 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5386 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5387 is a mode that can be used to access the field. In that case, *PBITSIZE
5388 is redundant.
5390 If the field describes a variable-sized object, *PMODE is set to
5391 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5392 this case, but the address of the object can be found. */
5394 tree
5395 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5396 HOST_WIDE_INT *pbitpos, tree *poffset,
5397 enum machine_mode *pmode, int *punsignedp,
5398 int *pvolatilep)
5400 tree size_tree = 0;
5401 enum machine_mode mode = VOIDmode;
5402 tree offset = size_zero_node;
5403 tree bit_offset = bitsize_zero_node;
5404 tree placeholder_ptr = 0;
5405 tree tem;
5407 /* First get the mode, signedness, and size. We do this from just the
5408 outermost expression. */
5409 if (TREE_CODE (exp) == COMPONENT_REF)
5411 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5412 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5413 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5415 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5417 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5419 size_tree = TREE_OPERAND (exp, 1);
5420 *punsignedp = TREE_UNSIGNED (exp);
5422 else
5424 mode = TYPE_MODE (TREE_TYPE (exp));
5425 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5427 if (mode == BLKmode)
5428 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5429 else
5430 *pbitsize = GET_MODE_BITSIZE (mode);
5433 if (size_tree != 0)
5435 if (! host_integerp (size_tree, 1))
5436 mode = BLKmode, *pbitsize = -1;
5437 else
5438 *pbitsize = tree_low_cst (size_tree, 1);
5441 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5442 and find the ultimate containing object. */
5443 while (1)
5445 if (TREE_CODE (exp) == BIT_FIELD_REF)
5446 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5447 else if (TREE_CODE (exp) == COMPONENT_REF)
5449 tree field = TREE_OPERAND (exp, 1);
5450 tree this_offset = DECL_FIELD_OFFSET (field);
5452 /* If this field hasn't been filled in yet, don't go
5453 past it. This should only happen when folding expressions
5454 made during type construction. */
5455 if (this_offset == 0)
5456 break;
5457 else if (CONTAINS_PLACEHOLDER_P (this_offset))
5458 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5460 offset = size_binop (PLUS_EXPR, offset, this_offset);
5461 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5462 DECL_FIELD_BIT_OFFSET (field));
5464 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5467 else if (TREE_CODE (exp) == ARRAY_REF
5468 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5470 tree index = TREE_OPERAND (exp, 1);
5471 tree array = TREE_OPERAND (exp, 0);
5472 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5473 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5474 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5476 /* We assume all arrays have sizes that are a multiple of a byte.
5477 First subtract the lower bound, if any, in the type of the
5478 index, then convert to sizetype and multiply by the size of the
5479 array element. */
5480 if (low_bound != 0 && ! integer_zerop (low_bound))
5481 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5482 index, low_bound));
5484 /* If the index has a self-referential type, pass it to a
5485 WITH_RECORD_EXPR; if the component size is, pass our
5486 component to one. */
5487 if (CONTAINS_PLACEHOLDER_P (index))
5488 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5489 if (CONTAINS_PLACEHOLDER_P (unit_size))
5490 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5492 offset = size_binop (PLUS_EXPR, offset,
5493 size_binop (MULT_EXPR,
5494 convert (sizetype, index),
5495 unit_size));
5498 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5500 tree new = find_placeholder (exp, &placeholder_ptr);
5502 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5503 We might have been called from tree optimization where we
5504 haven't set up an object yet. */
5505 if (new == 0)
5506 break;
5507 else
5508 exp = new;
5510 continue;
5513 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5514 conversions that don't change the mode, and all view conversions
5515 except those that need to "step up" the alignment. */
5516 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5517 && ! (TREE_CODE (exp) == VIEW_CONVERT_EXPR
5518 && ! ((TYPE_ALIGN (TREE_TYPE (exp))
5519 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5520 && STRICT_ALIGNMENT
5521 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5522 < BIGGEST_ALIGNMENT)
5523 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5524 || TYPE_ALIGN_OK (TREE_TYPE
5525 (TREE_OPERAND (exp, 0))))))
5526 && ! ((TREE_CODE (exp) == NOP_EXPR
5527 || TREE_CODE (exp) == CONVERT_EXPR)
5528 && (TYPE_MODE (TREE_TYPE (exp))
5529 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5530 break;
5532 /* If any reference in the chain is volatile, the effect is volatile. */
5533 if (TREE_THIS_VOLATILE (exp))
5534 *pvolatilep = 1;
5536 exp = TREE_OPERAND (exp, 0);
5539 /* If OFFSET is constant, see if we can return the whole thing as a
5540 constant bit position. Otherwise, split it up. */
5541 if (host_integerp (offset, 0)
5542 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5543 bitsize_unit_node))
5544 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5545 && host_integerp (tem, 0))
5546 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5547 else
5548 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5550 *pmode = mode;
5551 return exp;
5554 /* Return 1 if T is an expression that get_inner_reference handles. */
5557 handled_component_p (tree t)
5559 switch (TREE_CODE (t))
5561 case BIT_FIELD_REF:
5562 case COMPONENT_REF:
5563 case ARRAY_REF:
5564 case ARRAY_RANGE_REF:
5565 case NON_LVALUE_EXPR:
5566 case VIEW_CONVERT_EXPR:
5567 return 1;
5569 /* ??? Sure they are handled, but get_inner_reference may return
5570 a different PBITSIZE, depending upon whether the expression is
5571 wrapped up in a NOP_EXPR or not, e.g. for bitfields. */
5572 case NOP_EXPR:
5573 case CONVERT_EXPR:
5574 return (TYPE_MODE (TREE_TYPE (t))
5575 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5577 default:
5578 return 0;
5582 /* Given an rtx VALUE that may contain additions and multiplications, return
5583 an equivalent value that just refers to a register, memory, or constant.
5584 This is done by generating instructions to perform the arithmetic and
5585 returning a pseudo-register containing the value.
5587 The returned value may be a REG, SUBREG, MEM or constant. */
5590 force_operand (rtx value, rtx target)
5592 rtx op1, op2;
5593 /* Use subtarget as the target for operand 0 of a binary operation. */
5594 rtx subtarget = get_subtarget (target);
5595 enum rtx_code code = GET_CODE (value);
5597 /* Check for subreg applied to an expression produced by loop optimizer. */
5598 if (code == SUBREG
5599 && GET_CODE (SUBREG_REG (value)) != REG
5600 && GET_CODE (SUBREG_REG (value)) != MEM)
5602 value = simplify_gen_subreg (GET_MODE (value),
5603 force_reg (GET_MODE (SUBREG_REG (value)),
5604 force_operand (SUBREG_REG (value),
5605 NULL_RTX)),
5606 GET_MODE (SUBREG_REG (value)),
5607 SUBREG_BYTE (value));
5608 code = GET_CODE (value);
5611 /* Check for a PIC address load. */
5612 if ((code == PLUS || code == MINUS)
5613 && XEXP (value, 0) == pic_offset_table_rtx
5614 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5615 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5616 || GET_CODE (XEXP (value, 1)) == CONST))
5618 if (!subtarget)
5619 subtarget = gen_reg_rtx (GET_MODE (value));
5620 emit_move_insn (subtarget, value);
5621 return subtarget;
5624 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5626 if (!target)
5627 target = gen_reg_rtx (GET_MODE (value));
5628 convert_move (target, force_operand (XEXP (value, 0), NULL),
5629 code == ZERO_EXTEND);
5630 return target;
5633 if (ARITHMETIC_P (value))
5635 op2 = XEXP (value, 1);
5636 if (!CONSTANT_P (op2) && !(GET_CODE (op2) == REG && op2 != subtarget))
5637 subtarget = 0;
5638 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5640 code = PLUS;
5641 op2 = negate_rtx (GET_MODE (value), op2);
5644 /* Check for an addition with OP2 a constant integer and our first
5645 operand a PLUS of a virtual register and something else. In that
5646 case, we want to emit the sum of the virtual register and the
5647 constant first and then add the other value. This allows virtual
5648 register instantiation to simply modify the constant rather than
5649 creating another one around this addition. */
5650 if (code == PLUS && GET_CODE (op2) == CONST_INT
5651 && GET_CODE (XEXP (value, 0)) == PLUS
5652 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5653 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5654 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5656 rtx temp = expand_simple_binop (GET_MODE (value), code,
5657 XEXP (XEXP (value, 0), 0), op2,
5658 subtarget, 0, OPTAB_LIB_WIDEN);
5659 return expand_simple_binop (GET_MODE (value), code, temp,
5660 force_operand (XEXP (XEXP (value,
5661 0), 1), 0),
5662 target, 0, OPTAB_LIB_WIDEN);
5665 op1 = force_operand (XEXP (value, 0), subtarget);
5666 op2 = force_operand (op2, NULL_RTX);
5667 switch (code)
5669 case MULT:
5670 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5671 case DIV:
5672 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5673 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5674 target, 1, OPTAB_LIB_WIDEN);
5675 else
5676 return expand_divmod (0,
5677 FLOAT_MODE_P (GET_MODE (value))
5678 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5679 GET_MODE (value), op1, op2, target, 0);
5680 break;
5681 case MOD:
5682 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5683 target, 0);
5684 break;
5685 case UDIV:
5686 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5687 target, 1);
5688 break;
5689 case UMOD:
5690 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5691 target, 1);
5692 break;
5693 case ASHIFTRT:
5694 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5695 target, 0, OPTAB_LIB_WIDEN);
5696 break;
5697 default:
5698 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5699 target, 1, OPTAB_LIB_WIDEN);
5702 if (UNARY_P (value))
5704 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5705 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5708 #ifdef INSN_SCHEDULING
5709 /* On machines that have insn scheduling, we want all memory reference to be
5710 explicit, so we need to deal with such paradoxical SUBREGs. */
5711 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
5712 && (GET_MODE_SIZE (GET_MODE (value))
5713 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5714 value
5715 = simplify_gen_subreg (GET_MODE (value),
5716 force_reg (GET_MODE (SUBREG_REG (value)),
5717 force_operand (SUBREG_REG (value),
5718 NULL_RTX)),
5719 GET_MODE (SUBREG_REG (value)),
5720 SUBREG_BYTE (value));
5721 #endif
5723 return value;
5726 /* Subroutine of expand_expr: return nonzero iff there is no way that
5727 EXP can reference X, which is being modified. TOP_P is nonzero if this
5728 call is going to be used to determine whether we need a temporary
5729 for EXP, as opposed to a recursive call to this function.
5731 It is always safe for this routine to return zero since it merely
5732 searches for optimization opportunities. */
5735 safe_from_p (rtx x, tree exp, int top_p)
5737 rtx exp_rtl = 0;
5738 int i, nops;
5739 static tree save_expr_list;
5741 if (x == 0
5742 /* If EXP has varying size, we MUST use a target since we currently
5743 have no way of allocating temporaries of variable size
5744 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5745 So we assume here that something at a higher level has prevented a
5746 clash. This is somewhat bogus, but the best we can do. Only
5747 do this when X is BLKmode and when we are at the top level. */
5748 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5749 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5750 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5751 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5752 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5753 != INTEGER_CST)
5754 && GET_MODE (x) == BLKmode)
5755 /* If X is in the outgoing argument area, it is always safe. */
5756 || (GET_CODE (x) == MEM
5757 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5758 || (GET_CODE (XEXP (x, 0)) == PLUS
5759 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5760 return 1;
5762 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5763 find the underlying pseudo. */
5764 if (GET_CODE (x) == SUBREG)
5766 x = SUBREG_REG (x);
5767 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5768 return 0;
5771 /* A SAVE_EXPR might appear many times in the expression passed to the
5772 top-level safe_from_p call, and if it has a complex subexpression,
5773 examining it multiple times could result in a combinatorial explosion.
5774 E.g. on an Alpha running at least 200MHz, a Fortran testcase compiled
5775 with optimization took about 28 minutes to compile -- even though it was
5776 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5777 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5778 we have processed. Note that the only test of top_p was above. */
5780 if (top_p)
5782 int rtn;
5783 tree t;
5785 save_expr_list = 0;
5787 rtn = safe_from_p (x, exp, 0);
5789 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5790 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5792 return rtn;
5795 /* Now look at our tree code and possibly recurse. */
5796 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5798 case 'd':
5799 exp_rtl = DECL_RTL_IF_SET (exp);
5800 break;
5802 case 'c':
5803 return 1;
5805 case 'x':
5806 if (TREE_CODE (exp) == TREE_LIST)
5808 while (1)
5810 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
5811 return 0;
5812 exp = TREE_CHAIN (exp);
5813 if (!exp)
5814 return 1;
5815 if (TREE_CODE (exp) != TREE_LIST)
5816 return safe_from_p (x, exp, 0);
5819 else if (TREE_CODE (exp) == ERROR_MARK)
5820 return 1; /* An already-visited SAVE_EXPR? */
5821 else
5822 return 0;
5824 case '2':
5825 case '<':
5826 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
5827 return 0;
5828 /* Fall through. */
5830 case '1':
5831 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5833 case 'e':
5834 case 'r':
5835 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5836 the expression. If it is set, we conflict iff we are that rtx or
5837 both are in memory. Otherwise, we check all operands of the
5838 expression recursively. */
5840 switch (TREE_CODE (exp))
5842 case ADDR_EXPR:
5843 /* If the operand is static or we are static, we can't conflict.
5844 Likewise if we don't conflict with the operand at all. */
5845 if (staticp (TREE_OPERAND (exp, 0))
5846 || TREE_STATIC (exp)
5847 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5848 return 1;
5850 /* Otherwise, the only way this can conflict is if we are taking
5851 the address of a DECL a that address if part of X, which is
5852 very rare. */
5853 exp = TREE_OPERAND (exp, 0);
5854 if (DECL_P (exp))
5856 if (!DECL_RTL_SET_P (exp)
5857 || GET_CODE (DECL_RTL (exp)) != MEM)
5858 return 0;
5859 else
5860 exp_rtl = XEXP (DECL_RTL (exp), 0);
5862 break;
5864 case INDIRECT_REF:
5865 if (GET_CODE (x) == MEM
5866 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5867 get_alias_set (exp)))
5868 return 0;
5869 break;
5871 case CALL_EXPR:
5872 /* Assume that the call will clobber all hard registers and
5873 all of memory. */
5874 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5875 || GET_CODE (x) == MEM)
5876 return 0;
5877 break;
5879 case RTL_EXPR:
5880 /* If a sequence exists, we would have to scan every instruction
5881 in the sequence to see if it was safe. This is probably not
5882 worthwhile. */
5883 if (RTL_EXPR_SEQUENCE (exp))
5884 return 0;
5886 exp_rtl = RTL_EXPR_RTL (exp);
5887 break;
5889 case WITH_CLEANUP_EXPR:
5890 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
5891 break;
5893 case CLEANUP_POINT_EXPR:
5894 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5896 case SAVE_EXPR:
5897 exp_rtl = SAVE_EXPR_RTL (exp);
5898 if (exp_rtl)
5899 break;
5901 /* If we've already scanned this, don't do it again. Otherwise,
5902 show we've scanned it and record for clearing the flag if we're
5903 going on. */
5904 if (TREE_PRIVATE (exp))
5905 return 1;
5907 TREE_PRIVATE (exp) = 1;
5908 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5910 TREE_PRIVATE (exp) = 0;
5911 return 0;
5914 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
5915 return 1;
5917 case BIND_EXPR:
5918 /* The only operand we look at is operand 1. The rest aren't
5919 part of the expression. */
5920 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5922 default:
5923 break;
5926 /* If we have an rtx, we do not need to scan our operands. */
5927 if (exp_rtl)
5928 break;
5930 nops = first_rtl_op (TREE_CODE (exp));
5931 for (i = 0; i < nops; i++)
5932 if (TREE_OPERAND (exp, i) != 0
5933 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5934 return 0;
5936 /* If this is a language-specific tree code, it may require
5937 special handling. */
5938 if ((unsigned int) TREE_CODE (exp)
5939 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5940 && !lang_hooks.safe_from_p (x, exp))
5941 return 0;
5944 /* If we have an rtl, find any enclosed object. Then see if we conflict
5945 with it. */
5946 if (exp_rtl)
5948 if (GET_CODE (exp_rtl) == SUBREG)
5950 exp_rtl = SUBREG_REG (exp_rtl);
5951 if (GET_CODE (exp_rtl) == REG
5952 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5953 return 0;
5956 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5957 are memory and they conflict. */
5958 return ! (rtx_equal_p (x, exp_rtl)
5959 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5960 && true_dependence (exp_rtl, VOIDmode, x,
5961 rtx_addr_varies_p)));
5964 /* If we reach here, it is safe. */
5965 return 1;
5968 /* Subroutine of expand_expr: return rtx if EXP is a
5969 variable or parameter; else return 0. */
5971 static rtx
5972 var_rtx (tree exp)
5974 STRIP_NOPS (exp);
5975 switch (TREE_CODE (exp))
5977 case PARM_DECL:
5978 case VAR_DECL:
5979 return DECL_RTL (exp);
5980 default:
5981 return 0;
5985 /* Return the highest power of two that EXP is known to be a multiple of.
5986 This is used in updating alignment of MEMs in array references. */
5988 static unsigned HOST_WIDE_INT
5989 highest_pow2_factor (tree exp)
5991 unsigned HOST_WIDE_INT c0, c1;
5993 switch (TREE_CODE (exp))
5995 case INTEGER_CST:
5996 /* We can find the lowest bit that's a one. If the low
5997 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
5998 We need to handle this case since we can find it in a COND_EXPR,
5999 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6000 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6001 later ICE. */
6002 if (TREE_CONSTANT_OVERFLOW (exp))
6003 return BIGGEST_ALIGNMENT;
6004 else
6006 /* Note: tree_low_cst is intentionally not used here,
6007 we don't care about the upper bits. */
6008 c0 = TREE_INT_CST_LOW (exp);
6009 c0 &= -c0;
6010 return c0 ? c0 : BIGGEST_ALIGNMENT;
6012 break;
6014 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6015 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6016 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6017 return MIN (c0, c1);
6019 case MULT_EXPR:
6020 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6021 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6022 return c0 * c1;
6024 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6025 case CEIL_DIV_EXPR:
6026 if (integer_pow2p (TREE_OPERAND (exp, 1))
6027 && host_integerp (TREE_OPERAND (exp, 1), 1))
6029 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6030 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6031 return MAX (1, c0 / c1);
6033 break;
6035 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6036 case SAVE_EXPR: case WITH_RECORD_EXPR:
6037 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6039 case COMPOUND_EXPR:
6040 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6042 case COND_EXPR:
6043 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6044 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6045 return MIN (c0, c1);
6047 default:
6048 break;
6051 return 1;
6054 /* Similar, except that the alignment requirements of TARGET are
6055 taken into account. Assume it is at least as aligned as its
6056 type, unless it is a COMPONENT_REF in which case the layout of
6057 the structure gives the alignment. */
6059 static unsigned HOST_WIDE_INT
6060 highest_pow2_factor_for_target (tree target, tree exp)
6062 unsigned HOST_WIDE_INT target_align, factor;
6064 factor = highest_pow2_factor (exp);
6065 if (TREE_CODE (target) == COMPONENT_REF)
6066 target_align = DECL_ALIGN (TREE_OPERAND (target, 1)) / BITS_PER_UNIT;
6067 else
6068 target_align = TYPE_ALIGN (TREE_TYPE (target)) / BITS_PER_UNIT;
6069 return MAX (factor, target_align);
6072 /* Return an object on the placeholder list that matches EXP, a
6073 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6074 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6075 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6076 is a location which initially points to a starting location in the
6077 placeholder list (zero means start of the list) and where a pointer into
6078 the placeholder list at which the object is found is placed. */
6080 tree
6081 find_placeholder (tree exp, tree *plist)
6083 tree type = TREE_TYPE (exp);
6084 tree placeholder_expr;
6086 for (placeholder_expr
6087 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
6088 placeholder_expr != 0;
6089 placeholder_expr = TREE_CHAIN (placeholder_expr))
6091 tree need_type = TYPE_MAIN_VARIANT (type);
6092 tree elt;
6094 /* Find the outermost reference that is of the type we want. If none,
6095 see if any object has a type that is a pointer to the type we
6096 want. */
6097 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6098 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
6099 || TREE_CODE (elt) == COND_EXPR)
6100 ? TREE_OPERAND (elt, 1)
6101 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6102 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6103 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6104 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6105 ? TREE_OPERAND (elt, 0) : 0))
6106 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6108 if (plist)
6109 *plist = placeholder_expr;
6110 return elt;
6113 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6115 = ((TREE_CODE (elt) == COMPOUND_EXPR
6116 || TREE_CODE (elt) == COND_EXPR)
6117 ? TREE_OPERAND (elt, 1)
6118 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6119 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6120 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6121 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6122 ? TREE_OPERAND (elt, 0) : 0))
6123 if (POINTER_TYPE_P (TREE_TYPE (elt))
6124 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6125 == need_type))
6127 if (plist)
6128 *plist = placeholder_expr;
6129 return build1 (INDIRECT_REF, need_type, elt);
6133 return 0;
6136 /* Subroutine of expand_expr. Expand the two operands of a binary
6137 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6138 The value may be stored in TARGET if TARGET is nonzero. The
6139 MODIFIER argument is as documented by expand_expr. */
6141 static void
6142 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6143 enum expand_modifier modifier)
6145 if (! safe_from_p (target, exp1, 1))
6146 target = 0;
6147 if (operand_equal_p (exp0, exp1, 0))
6149 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6150 *op1 = copy_rtx (*op0);
6152 else
6154 /* If we need to preserve evaluation order, copy exp0 into its own
6155 temporary variable so that it can't be clobbered by exp1. */
6156 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6157 exp0 = save_expr (exp0);
6158 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6159 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6164 /* expand_expr: generate code for computing expression EXP.
6165 An rtx for the computed value is returned. The value is never null.
6166 In the case of a void EXP, const0_rtx is returned.
6168 The value may be stored in TARGET if TARGET is nonzero.
6169 TARGET is just a suggestion; callers must assume that
6170 the rtx returned may not be the same as TARGET.
6172 If TARGET is CONST0_RTX, it means that the value will be ignored.
6174 If TMODE is not VOIDmode, it suggests generating the
6175 result in mode TMODE. But this is done only when convenient.
6176 Otherwise, TMODE is ignored and the value generated in its natural mode.
6177 TMODE is just a suggestion; callers must assume that
6178 the rtx returned may not have mode TMODE.
6180 Note that TARGET may have neither TMODE nor MODE. In that case, it
6181 probably will not be used.
6183 If MODIFIER is EXPAND_SUM then when EXP is an addition
6184 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6185 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6186 products as above, or REG or MEM, or constant.
6187 Ordinarily in such cases we would output mul or add instructions
6188 and then return a pseudo reg containing the sum.
6190 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6191 it also marks a label as absolutely required (it can't be dead).
6192 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6193 This is used for outputting expressions used in initializers.
6195 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6196 with a constant address even if that address is not normally legitimate.
6197 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6199 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6200 a call parameter. Such targets require special care as we haven't yet
6201 marked TARGET so that it's safe from being trashed by libcalls. We
6202 don't want to use TARGET for anything but the final result;
6203 Intermediate values must go elsewhere. Additionally, calls to
6204 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6206 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6207 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6208 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6209 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6210 recursively. */
6213 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6214 enum expand_modifier modifier, rtx *alt_rtl)
6216 rtx op0, op1, temp;
6217 tree type = TREE_TYPE (exp);
6218 int unsignedp = TREE_UNSIGNED (type);
6219 enum machine_mode mode;
6220 enum tree_code code = TREE_CODE (exp);
6221 optab this_optab;
6222 rtx subtarget, original_target;
6223 int ignore;
6224 tree context;
6226 /* Handle ERROR_MARK before anybody tries to access its type. */
6227 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6229 op0 = CONST0_RTX (tmode);
6230 if (op0 != 0)
6231 return op0;
6232 return const0_rtx;
6235 mode = TYPE_MODE (type);
6236 /* Use subtarget as the target for operand 0 of a binary operation. */
6237 subtarget = get_subtarget (target);
6238 original_target = target;
6239 ignore = (target == const0_rtx
6240 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6241 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6242 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
6243 && TREE_CODE (type) == VOID_TYPE));
6245 /* If we are going to ignore this result, we need only do something
6246 if there is a side-effect somewhere in the expression. If there
6247 is, short-circuit the most common cases here. Note that we must
6248 not call expand_expr with anything but const0_rtx in case this
6249 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6251 if (ignore)
6253 if (! TREE_SIDE_EFFECTS (exp))
6254 return const0_rtx;
6256 /* Ensure we reference a volatile object even if value is ignored, but
6257 don't do this if all we are doing is taking its address. */
6258 if (TREE_THIS_VOLATILE (exp)
6259 && TREE_CODE (exp) != FUNCTION_DECL
6260 && mode != VOIDmode && mode != BLKmode
6261 && modifier != EXPAND_CONST_ADDRESS)
6263 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6264 if (GET_CODE (temp) == MEM)
6265 temp = copy_to_reg (temp);
6266 return const0_rtx;
6269 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6270 || code == INDIRECT_REF || code == BUFFER_REF)
6271 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6272 modifier);
6274 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6275 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6277 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6278 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6279 return const0_rtx;
6281 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6282 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6283 /* If the second operand has no side effects, just evaluate
6284 the first. */
6285 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6286 modifier);
6287 else if (code == BIT_FIELD_REF)
6289 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6290 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6291 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6292 return const0_rtx;
6295 target = 0;
6298 /* If will do cse, generate all results into pseudo registers
6299 since 1) that allows cse to find more things
6300 and 2) otherwise cse could produce an insn the machine
6301 cannot support. An exception is a CONSTRUCTOR into a multi-word
6302 MEM: that's much more likely to be most efficient into the MEM.
6303 Another is a CALL_EXPR which must return in memory. */
6305 if (! cse_not_expected && mode != BLKmode && target
6306 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6307 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6308 && ! (code == CALL_EXPR && aggregate_value_p (exp, exp)))
6309 target = 0;
6311 switch (code)
6313 case LABEL_DECL:
6315 tree function = decl_function_context (exp);
6316 /* Labels in containing functions, or labels used from initializers,
6317 must be forced. */
6318 if (modifier == EXPAND_INITIALIZER
6319 || (function != current_function_decl
6320 && function != inline_function_decl
6321 && function != 0))
6322 temp = force_label_rtx (exp);
6323 else
6324 temp = label_rtx (exp);
6326 temp = gen_rtx_MEM (FUNCTION_MODE, gen_rtx_LABEL_REF (Pmode, temp));
6327 if (function != current_function_decl
6328 && function != inline_function_decl && function != 0)
6329 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6330 return temp;
6333 case PARM_DECL:
6334 if (!DECL_RTL_SET_P (exp))
6336 error ("%Jprior parameter's size depends on '%D'", exp, exp);
6337 return CONST0_RTX (mode);
6340 /* ... fall through ... */
6342 case VAR_DECL:
6343 /* If a static var's type was incomplete when the decl was written,
6344 but the type is complete now, lay out the decl now. */
6345 if (DECL_SIZE (exp) == 0
6346 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6347 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6348 layout_decl (exp, 0);
6350 /* ... fall through ... */
6352 case FUNCTION_DECL:
6353 case RESULT_DECL:
6354 if (DECL_RTL (exp) == 0)
6355 abort ();
6357 /* Ensure variable marked as used even if it doesn't go through
6358 a parser. If it hasn't be used yet, write out an external
6359 definition. */
6360 if (! TREE_USED (exp))
6362 assemble_external (exp);
6363 TREE_USED (exp) = 1;
6366 /* Show we haven't gotten RTL for this yet. */
6367 temp = 0;
6369 /* Handle variables inherited from containing functions. */
6370 context = decl_function_context (exp);
6372 /* We treat inline_function_decl as an alias for the current function
6373 because that is the inline function whose vars, types, etc.
6374 are being merged into the current function.
6375 See expand_inline_function. */
6377 if (context != 0 && context != current_function_decl
6378 && context != inline_function_decl
6379 /* If var is static, we don't need a static chain to access it. */
6380 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6381 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6383 rtx addr;
6385 /* Mark as non-local and addressable. */
6386 DECL_NONLOCAL (exp) = 1;
6387 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6388 abort ();
6389 lang_hooks.mark_addressable (exp);
6390 if (GET_CODE (DECL_RTL (exp)) != MEM)
6391 abort ();
6392 addr = XEXP (DECL_RTL (exp), 0);
6393 if (GET_CODE (addr) == MEM)
6394 addr
6395 = replace_equiv_address (addr,
6396 fix_lexical_addr (XEXP (addr, 0), exp));
6397 else
6398 addr = fix_lexical_addr (addr, exp);
6400 temp = replace_equiv_address (DECL_RTL (exp), addr);
6403 /* This is the case of an array whose size is to be determined
6404 from its initializer, while the initializer is still being parsed.
6405 See expand_decl. */
6407 else if (GET_CODE (DECL_RTL (exp)) == MEM
6408 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6409 temp = validize_mem (DECL_RTL (exp));
6411 /* If DECL_RTL is memory, we are in the normal case and either
6412 the address is not valid or it is not a register and -fforce-addr
6413 is specified, get the address into a register. */
6415 else if (GET_CODE (DECL_RTL (exp)) == MEM
6416 && modifier != EXPAND_CONST_ADDRESS
6417 && modifier != EXPAND_SUM
6418 && modifier != EXPAND_INITIALIZER
6419 && (! memory_address_p (DECL_MODE (exp),
6420 XEXP (DECL_RTL (exp), 0))
6421 || (flag_force_addr
6422 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6424 if (alt_rtl)
6425 *alt_rtl = DECL_RTL (exp);
6426 temp = replace_equiv_address (DECL_RTL (exp),
6427 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6430 /* If we got something, return it. But first, set the alignment
6431 if the address is a register. */
6432 if (temp != 0)
6434 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6435 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6437 return temp;
6440 /* If the mode of DECL_RTL does not match that of the decl, it
6441 must be a promoted value. We return a SUBREG of the wanted mode,
6442 but mark it so that we know that it was already extended. */
6444 if (GET_CODE (DECL_RTL (exp)) == REG
6445 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6447 /* Get the signedness used for this variable. Ensure we get the
6448 same mode we got when the variable was declared. */
6449 if (GET_MODE (DECL_RTL (exp))
6450 != promote_mode (type, DECL_MODE (exp), &unsignedp,
6451 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6452 abort ();
6454 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6455 SUBREG_PROMOTED_VAR_P (temp) = 1;
6456 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6457 return temp;
6460 return DECL_RTL (exp);
6462 case INTEGER_CST:
6463 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6464 TREE_INT_CST_HIGH (exp), mode);
6466 /* ??? If overflow is set, fold will have done an incomplete job,
6467 which can result in (plus xx (const_int 0)), which can get
6468 simplified by validate_replace_rtx during virtual register
6469 instantiation, which can result in unrecognizable insns.
6470 Avoid this by forcing all overflows into registers. */
6471 if (TREE_CONSTANT_OVERFLOW (exp)
6472 && modifier != EXPAND_INITIALIZER)
6473 temp = force_reg (mode, temp);
6475 return temp;
6477 case VECTOR_CST:
6478 return const_vector_from_tree (exp);
6480 case CONST_DECL:
6481 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6483 case REAL_CST:
6484 /* If optimized, generate immediate CONST_DOUBLE
6485 which will be turned into memory by reload if necessary.
6487 We used to force a register so that loop.c could see it. But
6488 this does not allow gen_* patterns to perform optimizations with
6489 the constants. It also produces two insns in cases like "x = 1.0;".
6490 On most machines, floating-point constants are not permitted in
6491 many insns, so we'd end up copying it to a register in any case.
6493 Now, we do the copying in expand_binop, if appropriate. */
6494 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6495 TYPE_MODE (TREE_TYPE (exp)));
6497 case COMPLEX_CST:
6498 /* Handle evaluating a complex constant in a CONCAT target. */
6499 if (original_target && GET_CODE (original_target) == CONCAT)
6501 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6502 rtx rtarg, itarg;
6504 rtarg = XEXP (original_target, 0);
6505 itarg = XEXP (original_target, 1);
6507 /* Move the real and imaginary parts separately. */
6508 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
6509 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
6511 if (op0 != rtarg)
6512 emit_move_insn (rtarg, op0);
6513 if (op1 != itarg)
6514 emit_move_insn (itarg, op1);
6516 return original_target;
6519 /* ... fall through ... */
6521 case STRING_CST:
6522 temp = output_constant_def (exp, 1);
6524 /* temp contains a constant address.
6525 On RISC machines where a constant address isn't valid,
6526 make some insns to get that address into a register. */
6527 if (modifier != EXPAND_CONST_ADDRESS
6528 && modifier != EXPAND_INITIALIZER
6529 && modifier != EXPAND_SUM
6530 && (! memory_address_p (mode, XEXP (temp, 0))
6531 || flag_force_addr))
6532 return replace_equiv_address (temp,
6533 copy_rtx (XEXP (temp, 0)));
6534 return temp;
6536 case EXPR_WITH_FILE_LOCATION:
6538 rtx to_return;
6539 struct file_stack fs;
6541 fs.location = input_location;
6542 fs.next = expr_wfl_stack;
6543 input_filename = EXPR_WFL_FILENAME (exp);
6544 input_line = EXPR_WFL_LINENO (exp);
6545 expr_wfl_stack = &fs;
6546 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6547 emit_line_note (input_location);
6548 /* Possibly avoid switching back and forth here. */
6549 to_return = expand_expr (EXPR_WFL_NODE (exp),
6550 (ignore ? const0_rtx : target),
6551 tmode, modifier);
6552 if (expr_wfl_stack != &fs)
6553 abort ();
6554 input_location = fs.location;
6555 expr_wfl_stack = fs.next;
6556 return to_return;
6559 case SAVE_EXPR:
6560 context = decl_function_context (exp);
6562 /* If this SAVE_EXPR was at global context, assume we are an
6563 initialization function and move it into our context. */
6564 if (context == 0)
6565 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6567 /* We treat inline_function_decl as an alias for the current function
6568 because that is the inline function whose vars, types, etc.
6569 are being merged into the current function.
6570 See expand_inline_function. */
6571 if (context == current_function_decl || context == inline_function_decl)
6572 context = 0;
6574 /* If this is non-local, handle it. */
6575 if (context)
6577 /* The following call just exists to abort if the context is
6578 not of a containing function. */
6579 find_function_data (context);
6581 temp = SAVE_EXPR_RTL (exp);
6582 if (temp && GET_CODE (temp) == REG)
6584 put_var_into_stack (exp, /*rescan=*/true);
6585 temp = SAVE_EXPR_RTL (exp);
6587 if (temp == 0 || GET_CODE (temp) != MEM)
6588 abort ();
6589 return
6590 replace_equiv_address (temp,
6591 fix_lexical_addr (XEXP (temp, 0), exp));
6593 if (SAVE_EXPR_RTL (exp) == 0)
6595 if (mode == VOIDmode)
6596 temp = const0_rtx;
6597 else
6598 temp = assign_temp (build_qualified_type (type,
6599 (TYPE_QUALS (type)
6600 | TYPE_QUAL_CONST)),
6601 3, 0, 0);
6603 SAVE_EXPR_RTL (exp) = temp;
6604 if (!optimize && GET_CODE (temp) == REG)
6605 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6606 save_expr_regs);
6608 /* If the mode of TEMP does not match that of the expression, it
6609 must be a promoted value. We pass store_expr a SUBREG of the
6610 wanted mode but mark it so that we know that it was already
6611 extended. */
6613 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6615 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6616 promote_mode (type, mode, &unsignedp, 0);
6617 SUBREG_PROMOTED_VAR_P (temp) = 1;
6618 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6621 if (temp == const0_rtx)
6622 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6623 else
6624 store_expr (TREE_OPERAND (exp, 0), temp,
6625 modifier == EXPAND_STACK_PARM ? 2 : 0);
6627 TREE_USED (exp) = 1;
6630 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6631 must be a promoted value. We return a SUBREG of the wanted mode,
6632 but mark it so that we know that it was already extended. */
6634 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6635 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6637 /* Compute the signedness and make the proper SUBREG. */
6638 promote_mode (type, mode, &unsignedp, 0);
6639 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6640 SUBREG_PROMOTED_VAR_P (temp) = 1;
6641 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6642 return temp;
6645 return SAVE_EXPR_RTL (exp);
6647 case UNSAVE_EXPR:
6649 rtx temp;
6650 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6651 TREE_OPERAND (exp, 0)
6652 = lang_hooks.unsave_expr_now (TREE_OPERAND (exp, 0));
6653 return temp;
6656 case PLACEHOLDER_EXPR:
6658 tree old_list = placeholder_list;
6659 tree placeholder_expr = 0;
6661 exp = find_placeholder (exp, &placeholder_expr);
6662 if (exp == 0)
6663 abort ();
6665 placeholder_list = TREE_CHAIN (placeholder_expr);
6666 temp = expand_expr (exp, original_target, tmode, modifier);
6667 placeholder_list = old_list;
6668 return temp;
6671 case WITH_RECORD_EXPR:
6672 /* Put the object on the placeholder list, expand our first operand,
6673 and pop the list. */
6674 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6675 placeholder_list);
6676 target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
6677 modifier);
6678 placeholder_list = TREE_CHAIN (placeholder_list);
6679 return target;
6681 case GOTO_EXPR:
6682 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6683 expand_goto (TREE_OPERAND (exp, 0));
6684 else
6685 expand_computed_goto (TREE_OPERAND (exp, 0));
6686 return const0_rtx;
6688 case EXIT_EXPR:
6689 expand_exit_loop_if_false (NULL,
6690 invert_truthvalue (TREE_OPERAND (exp, 0)));
6691 return const0_rtx;
6693 case LABELED_BLOCK_EXPR:
6694 if (LABELED_BLOCK_BODY (exp))
6695 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
6696 /* Should perhaps use expand_label, but this is simpler and safer. */
6697 do_pending_stack_adjust ();
6698 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6699 return const0_rtx;
6701 case EXIT_BLOCK_EXPR:
6702 if (EXIT_BLOCK_RETURN (exp))
6703 sorry ("returned value in block_exit_expr");
6704 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6705 return const0_rtx;
6707 case LOOP_EXPR:
6708 push_temp_slots ();
6709 expand_start_loop (1);
6710 expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
6711 expand_end_loop ();
6712 pop_temp_slots ();
6714 return const0_rtx;
6716 case BIND_EXPR:
6718 tree vars = TREE_OPERAND (exp, 0);
6720 /* Need to open a binding contour here because
6721 if there are any cleanups they must be contained here. */
6722 expand_start_bindings (2);
6724 /* Mark the corresponding BLOCK for output in its proper place. */
6725 if (TREE_OPERAND (exp, 2) != 0
6726 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6727 lang_hooks.decls.insert_block (TREE_OPERAND (exp, 2));
6729 /* If VARS have not yet been expanded, expand them now. */
6730 while (vars)
6732 if (!DECL_RTL_SET_P (vars))
6733 expand_decl (vars);
6734 expand_decl_init (vars);
6735 vars = TREE_CHAIN (vars);
6738 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
6740 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6742 return temp;
6745 case RTL_EXPR:
6746 if (RTL_EXPR_SEQUENCE (exp))
6748 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6749 abort ();
6750 emit_insn (RTL_EXPR_SEQUENCE (exp));
6751 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6753 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6754 free_temps_for_rtl_expr (exp);
6755 if (alt_rtl)
6756 *alt_rtl = RTL_EXPR_ALT_RTL (exp);
6757 return RTL_EXPR_RTL (exp);
6759 case CONSTRUCTOR:
6760 /* If we don't need the result, just ensure we evaluate any
6761 subexpressions. */
6762 if (ignore)
6764 tree elt;
6766 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6767 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6769 return const0_rtx;
6772 /* All elts simple constants => refer to a constant in memory. But
6773 if this is a non-BLKmode mode, let it store a field at a time
6774 since that should make a CONST_INT or CONST_DOUBLE when we
6775 fold. Likewise, if we have a target we can use, it is best to
6776 store directly into the target unless the type is large enough
6777 that memcpy will be used. If we are making an initializer and
6778 all operands are constant, put it in memory as well.
6780 FIXME: Avoid trying to fill vector constructors piece-meal.
6781 Output them with output_constant_def below unless we're sure
6782 they're zeros. This should go away when vector initializers
6783 are treated like VECTOR_CST instead of arrays.
6785 else if ((TREE_STATIC (exp)
6786 && ((mode == BLKmode
6787 && ! (target != 0 && safe_from_p (target, exp, 1)))
6788 || TREE_ADDRESSABLE (exp)
6789 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6790 && (! MOVE_BY_PIECES_P
6791 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6792 TYPE_ALIGN (type)))
6793 && ((TREE_CODE (type) == VECTOR_TYPE
6794 && !is_zeros_p (exp))
6795 || ! mostly_zeros_p (exp)))))
6796 || ((modifier == EXPAND_INITIALIZER
6797 || modifier == EXPAND_CONST_ADDRESS)
6798 && TREE_CONSTANT (exp)))
6800 rtx constructor = output_constant_def (exp, 1);
6802 if (modifier != EXPAND_CONST_ADDRESS
6803 && modifier != EXPAND_INITIALIZER
6804 && modifier != EXPAND_SUM)
6805 constructor = validize_mem (constructor);
6807 return constructor;
6809 else
6811 /* Handle calls that pass values in multiple non-contiguous
6812 locations. The Irix 6 ABI has examples of this. */
6813 if (target == 0 || ! safe_from_p (target, exp, 1)
6814 || GET_CODE (target) == PARALLEL
6815 || modifier == EXPAND_STACK_PARM)
6816 target
6817 = assign_temp (build_qualified_type (type,
6818 (TYPE_QUALS (type)
6819 | (TREE_READONLY (exp)
6820 * TYPE_QUAL_CONST))),
6821 0, TREE_ADDRESSABLE (exp), 1);
6823 store_constructor (exp, target, 0, int_expr_size (exp));
6824 return target;
6827 case INDIRECT_REF:
6829 tree exp1 = TREE_OPERAND (exp, 0);
6830 tree index;
6831 tree string = string_constant (exp1, &index);
6833 /* Try to optimize reads from const strings. */
6834 if (string
6835 && TREE_CODE (string) == STRING_CST
6836 && TREE_CODE (index) == INTEGER_CST
6837 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6838 && GET_MODE_CLASS (mode) == MODE_INT
6839 && GET_MODE_SIZE (mode) == 1
6840 && modifier != EXPAND_WRITE)
6841 return gen_int_mode (TREE_STRING_POINTER (string)
6842 [TREE_INT_CST_LOW (index)], mode);
6844 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6845 op0 = memory_address (mode, op0);
6846 temp = gen_rtx_MEM (mode, op0);
6847 set_mem_attributes (temp, exp, 0);
6849 /* If we are writing to this object and its type is a record with
6850 readonly fields, we must mark it as readonly so it will
6851 conflict with readonly references to those fields. */
6852 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
6853 RTX_UNCHANGING_P (temp) = 1;
6855 return temp;
6858 case ARRAY_REF:
6859 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6860 abort ();
6863 tree array = TREE_OPERAND (exp, 0);
6864 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6865 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6866 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6867 HOST_WIDE_INT i;
6869 /* Optimize the special-case of a zero lower bound.
6871 We convert the low_bound to sizetype to avoid some problems
6872 with constant folding. (E.g. suppose the lower bound is 1,
6873 and its mode is QI. Without the conversion, (ARRAY
6874 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6875 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6877 if (! integer_zerop (low_bound))
6878 index = size_diffop (index, convert (sizetype, low_bound));
6880 /* Fold an expression like: "foo"[2].
6881 This is not done in fold so it won't happen inside &.
6882 Don't fold if this is for wide characters since it's too
6883 difficult to do correctly and this is a very rare case. */
6885 if (modifier != EXPAND_CONST_ADDRESS
6886 && modifier != EXPAND_INITIALIZER
6887 && modifier != EXPAND_MEMORY
6888 && TREE_CODE (array) == STRING_CST
6889 && TREE_CODE (index) == INTEGER_CST
6890 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6891 && GET_MODE_CLASS (mode) == MODE_INT
6892 && GET_MODE_SIZE (mode) == 1)
6893 return gen_int_mode (TREE_STRING_POINTER (array)
6894 [TREE_INT_CST_LOW (index)], mode);
6896 /* If this is a constant index into a constant array,
6897 just get the value from the array. Handle both the cases when
6898 we have an explicit constructor and when our operand is a variable
6899 that was declared const. */
6901 if (modifier != EXPAND_CONST_ADDRESS
6902 && modifier != EXPAND_INITIALIZER
6903 && modifier != EXPAND_MEMORY
6904 && TREE_CODE (array) == CONSTRUCTOR
6905 && ! TREE_SIDE_EFFECTS (array)
6906 && TREE_CODE (index) == INTEGER_CST
6907 && 0 > compare_tree_int (index,
6908 list_length (CONSTRUCTOR_ELTS
6909 (TREE_OPERAND (exp, 0)))))
6911 tree elem;
6913 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6914 i = TREE_INT_CST_LOW (index);
6915 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6918 if (elem)
6919 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6920 modifier);
6923 else if (optimize >= 1
6924 && modifier != EXPAND_CONST_ADDRESS
6925 && modifier != EXPAND_INITIALIZER
6926 && modifier != EXPAND_MEMORY
6927 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6928 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6929 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
6930 && targetm.binds_local_p (array))
6932 if (TREE_CODE (index) == INTEGER_CST)
6934 tree init = DECL_INITIAL (array);
6936 if (TREE_CODE (init) == CONSTRUCTOR)
6938 tree elem;
6940 for (elem = CONSTRUCTOR_ELTS (init);
6941 (elem
6942 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6943 elem = TREE_CHAIN (elem))
6946 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6947 return expand_expr (fold (TREE_VALUE (elem)), target,
6948 tmode, modifier);
6950 else if (TREE_CODE (init) == STRING_CST
6951 && 0 > compare_tree_int (index,
6952 TREE_STRING_LENGTH (init)))
6954 tree type = TREE_TYPE (TREE_TYPE (init));
6955 enum machine_mode mode = TYPE_MODE (type);
6957 if (GET_MODE_CLASS (mode) == MODE_INT
6958 && GET_MODE_SIZE (mode) == 1)
6959 return gen_int_mode (TREE_STRING_POINTER (init)
6960 [TREE_INT_CST_LOW (index)], mode);
6965 goto normal_inner_ref;
6967 case COMPONENT_REF:
6968 /* If the operand is a CONSTRUCTOR, we can just extract the
6969 appropriate field if it is present. */
6970 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
6972 tree elt;
6974 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6975 elt = TREE_CHAIN (elt))
6976 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6977 /* We can normally use the value of the field in the
6978 CONSTRUCTOR. However, if this is a bitfield in
6979 an integral mode that we can fit in a HOST_WIDE_INT,
6980 we must mask only the number of bits in the bitfield,
6981 since this is done implicitly by the constructor. If
6982 the bitfield does not meet either of those conditions,
6983 we can't do this optimization. */
6984 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6985 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6986 == MODE_INT)
6987 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6988 <= HOST_BITS_PER_WIDE_INT))))
6990 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
6991 && modifier == EXPAND_STACK_PARM)
6992 target = 0;
6993 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6994 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6996 HOST_WIDE_INT bitsize
6997 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6998 enum machine_mode imode
6999 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
7001 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
7003 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7004 op0 = expand_and (imode, op0, op1, target);
7006 else
7008 tree count
7009 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
7012 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7013 target, 0);
7014 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7015 target, 0);
7019 return op0;
7022 goto normal_inner_ref;
7024 case BIT_FIELD_REF:
7025 case ARRAY_RANGE_REF:
7026 normal_inner_ref:
7028 enum machine_mode mode1;
7029 HOST_WIDE_INT bitsize, bitpos;
7030 tree offset;
7031 int volatilep = 0;
7032 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7033 &mode1, &unsignedp, &volatilep);
7034 rtx orig_op0;
7036 /* If we got back the original object, something is wrong. Perhaps
7037 we are evaluating an expression too early. In any event, don't
7038 infinitely recurse. */
7039 if (tem == exp)
7040 abort ();
7042 /* If TEM's type is a union of variable size, pass TARGET to the inner
7043 computation, since it will need a temporary and TARGET is known
7044 to have to do. This occurs in unchecked conversion in Ada. */
7046 orig_op0 = op0
7047 = expand_expr (tem,
7048 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7049 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7050 != INTEGER_CST)
7051 && modifier != EXPAND_STACK_PARM
7052 ? target : NULL_RTX),
7053 VOIDmode,
7054 (modifier == EXPAND_INITIALIZER
7055 || modifier == EXPAND_CONST_ADDRESS
7056 || modifier == EXPAND_STACK_PARM)
7057 ? modifier : EXPAND_NORMAL);
7059 /* If this is a constant, put it into a register if it is a
7060 legitimate constant and OFFSET is 0 and memory if it isn't. */
7061 if (CONSTANT_P (op0))
7063 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7064 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7065 && offset == 0)
7066 op0 = force_reg (mode, op0);
7067 else
7068 op0 = validize_mem (force_const_mem (mode, op0));
7071 /* Otherwise, if this object not in memory and we either have an
7072 offset or a BLKmode result, put it there. This case can't occur in
7073 C, but can in Ada if we have unchecked conversion of an expression
7074 from a scalar type to an array or record type or for an
7075 ARRAY_RANGE_REF whose type is BLKmode. */
7076 else if (GET_CODE (op0) != MEM
7077 && (offset != 0
7078 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7080 /* If the operand is a SAVE_EXPR, we can deal with this by
7081 forcing the SAVE_EXPR into memory. */
7082 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7084 put_var_into_stack (TREE_OPERAND (exp, 0),
7085 /*rescan=*/true);
7086 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7088 else
7090 tree nt
7091 = build_qualified_type (TREE_TYPE (tem),
7092 (TYPE_QUALS (TREE_TYPE (tem))
7093 | TYPE_QUAL_CONST));
7094 rtx memloc = assign_temp (nt, 1, 1, 1);
7096 emit_move_insn (memloc, op0);
7097 op0 = memloc;
7101 if (offset != 0)
7103 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7104 EXPAND_SUM);
7106 if (GET_CODE (op0) != MEM)
7107 abort ();
7109 #ifdef POINTERS_EXTEND_UNSIGNED
7110 if (GET_MODE (offset_rtx) != Pmode)
7111 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7112 #else
7113 if (GET_MODE (offset_rtx) != ptr_mode)
7114 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7115 #endif
7117 if (GET_MODE (op0) == BLKmode
7118 /* A constant address in OP0 can have VOIDmode, we must
7119 not try to call force_reg in that case. */
7120 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7121 && bitsize != 0
7122 && (bitpos % bitsize) == 0
7123 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7124 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7126 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7127 bitpos = 0;
7130 op0 = offset_address (op0, offset_rtx,
7131 highest_pow2_factor (offset));
7134 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7135 record its alignment as BIGGEST_ALIGNMENT. */
7136 if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
7137 && is_aligning_offset (offset, tem))
7138 set_mem_align (op0, BIGGEST_ALIGNMENT);
7140 /* Don't forget about volatility even if this is a bitfield. */
7141 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7143 if (op0 == orig_op0)
7144 op0 = copy_rtx (op0);
7146 MEM_VOLATILE_P (op0) = 1;
7149 /* The following code doesn't handle CONCAT.
7150 Assume only bitpos == 0 can be used for CONCAT, due to
7151 one element arrays having the same mode as its element. */
7152 if (GET_CODE (op0) == CONCAT)
7154 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7155 abort ();
7156 return op0;
7159 /* In cases where an aligned union has an unaligned object
7160 as a field, we might be extracting a BLKmode value from
7161 an integer-mode (e.g., SImode) object. Handle this case
7162 by doing the extract into an object as wide as the field
7163 (which we know to be the width of a basic mode), then
7164 storing into memory, and changing the mode to BLKmode. */
7165 if (mode1 == VOIDmode
7166 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7167 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7168 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7169 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7170 && modifier != EXPAND_CONST_ADDRESS
7171 && modifier != EXPAND_INITIALIZER)
7172 /* If the field isn't aligned enough to fetch as a memref,
7173 fetch it as a bit field. */
7174 || (mode1 != BLKmode
7175 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7176 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
7177 || (GET_CODE (op0) == MEM
7178 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7179 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
7180 && ((modifier == EXPAND_CONST_ADDRESS
7181 || modifier == EXPAND_INITIALIZER)
7182 ? STRICT_ALIGNMENT
7183 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7184 || (bitpos % BITS_PER_UNIT != 0)))
7185 /* If the type and the field are a constant size and the
7186 size of the type isn't the same size as the bitfield,
7187 we must use bitfield operations. */
7188 || (bitsize >= 0
7189 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7190 == INTEGER_CST)
7191 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7192 bitsize)))
7194 enum machine_mode ext_mode = mode;
7196 if (ext_mode == BLKmode
7197 && ! (target != 0 && GET_CODE (op0) == MEM
7198 && GET_CODE (target) == MEM
7199 && bitpos % BITS_PER_UNIT == 0))
7200 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7202 if (ext_mode == BLKmode)
7204 if (target == 0)
7205 target = assign_temp (type, 0, 1, 1);
7207 if (bitsize == 0)
7208 return target;
7210 /* In this case, BITPOS must start at a byte boundary and
7211 TARGET, if specified, must be a MEM. */
7212 if (GET_CODE (op0) != MEM
7213 || (target != 0 && GET_CODE (target) != MEM)
7214 || bitpos % BITS_PER_UNIT != 0)
7215 abort ();
7217 emit_block_move (target,
7218 adjust_address (op0, VOIDmode,
7219 bitpos / BITS_PER_UNIT),
7220 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7221 / BITS_PER_UNIT),
7222 (modifier == EXPAND_STACK_PARM
7223 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7225 return target;
7228 op0 = validize_mem (op0);
7230 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7231 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7233 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7234 (modifier == EXPAND_STACK_PARM
7235 ? NULL_RTX : target),
7236 ext_mode, ext_mode,
7237 int_size_in_bytes (TREE_TYPE (tem)));
7239 /* If the result is a record type and BITSIZE is narrower than
7240 the mode of OP0, an integral mode, and this is a big endian
7241 machine, we must put the field into the high-order bits. */
7242 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7243 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7244 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7245 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7246 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7247 - bitsize),
7248 op0, 1);
7250 /* If the result type is BLKmode, store the data into a temporary
7251 of the appropriate type, but with the mode corresponding to the
7252 mode for the data we have (op0's mode). It's tempting to make
7253 this a constant type, since we know it's only being stored once,
7254 but that can cause problems if we are taking the address of this
7255 COMPONENT_REF because the MEM of any reference via that address
7256 will have flags corresponding to the type, which will not
7257 necessarily be constant. */
7258 if (mode == BLKmode)
7260 rtx new
7261 = assign_stack_temp_for_type
7262 (ext_mode, GET_MODE_BITSIZE (ext_mode), 0, type);
7264 emit_move_insn (new, op0);
7265 op0 = copy_rtx (new);
7266 PUT_MODE (op0, BLKmode);
7267 set_mem_attributes (op0, exp, 1);
7270 return op0;
7273 /* If the result is BLKmode, use that to access the object
7274 now as well. */
7275 if (mode == BLKmode)
7276 mode1 = BLKmode;
7278 /* Get a reference to just this component. */
7279 if (modifier == EXPAND_CONST_ADDRESS
7280 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7281 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7282 else
7283 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7285 if (op0 == orig_op0)
7286 op0 = copy_rtx (op0);
7288 set_mem_attributes (op0, exp, 0);
7289 if (GET_CODE (XEXP (op0, 0)) == REG)
7290 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7292 MEM_VOLATILE_P (op0) |= volatilep;
7293 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7294 || modifier == EXPAND_CONST_ADDRESS
7295 || modifier == EXPAND_INITIALIZER)
7296 return op0;
7297 else if (target == 0)
7298 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7300 convert_move (target, op0, unsignedp);
7301 return target;
7304 case VTABLE_REF:
7306 rtx insn, before = get_last_insn (), vtbl_ref;
7308 /* Evaluate the interior expression. */
7309 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7310 tmode, modifier);
7312 /* Get or create an instruction off which to hang a note. */
7313 if (REG_P (subtarget))
7315 target = subtarget;
7316 insn = get_last_insn ();
7317 if (insn == before)
7318 abort ();
7319 if (! INSN_P (insn))
7320 insn = prev_nonnote_insn (insn);
7322 else
7324 target = gen_reg_rtx (GET_MODE (subtarget));
7325 insn = emit_move_insn (target, subtarget);
7328 /* Collect the data for the note. */
7329 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7330 vtbl_ref = plus_constant (vtbl_ref,
7331 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7332 /* Discard the initial CONST that was added. */
7333 vtbl_ref = XEXP (vtbl_ref, 0);
7335 REG_NOTES (insn)
7336 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7338 return target;
7341 /* Intended for a reference to a buffer of a file-object in Pascal.
7342 But it's not certain that a special tree code will really be
7343 necessary for these. INDIRECT_REF might work for them. */
7344 case BUFFER_REF:
7345 abort ();
7347 case IN_EXPR:
7349 /* Pascal set IN expression.
7351 Algorithm:
7352 rlo = set_low - (set_low%bits_per_word);
7353 the_word = set [ (index - rlo)/bits_per_word ];
7354 bit_index = index % bits_per_word;
7355 bitmask = 1 << bit_index;
7356 return !!(the_word & bitmask); */
7358 tree set = TREE_OPERAND (exp, 0);
7359 tree index = TREE_OPERAND (exp, 1);
7360 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7361 tree set_type = TREE_TYPE (set);
7362 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7363 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7364 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7365 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7366 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7367 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7368 rtx setaddr = XEXP (setval, 0);
7369 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7370 rtx rlow;
7371 rtx diff, quo, rem, addr, bit, result;
7373 /* If domain is empty, answer is no. Likewise if index is constant
7374 and out of bounds. */
7375 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7376 && TREE_CODE (set_low_bound) == INTEGER_CST
7377 && tree_int_cst_lt (set_high_bound, set_low_bound))
7378 || (TREE_CODE (index) == INTEGER_CST
7379 && TREE_CODE (set_low_bound) == INTEGER_CST
7380 && tree_int_cst_lt (index, set_low_bound))
7381 || (TREE_CODE (set_high_bound) == INTEGER_CST
7382 && TREE_CODE (index) == INTEGER_CST
7383 && tree_int_cst_lt (set_high_bound, index))))
7384 return const0_rtx;
7386 if (target == 0)
7387 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7389 /* If we get here, we have to generate the code for both cases
7390 (in range and out of range). */
7392 op0 = gen_label_rtx ();
7393 op1 = gen_label_rtx ();
7395 if (! (GET_CODE (index_val) == CONST_INT
7396 && GET_CODE (lo_r) == CONST_INT))
7397 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7398 GET_MODE (index_val), iunsignedp, op1);
7400 if (! (GET_CODE (index_val) == CONST_INT
7401 && GET_CODE (hi_r) == CONST_INT))
7402 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7403 GET_MODE (index_val), iunsignedp, op1);
7405 /* Calculate the element number of bit zero in the first word
7406 of the set. */
7407 if (GET_CODE (lo_r) == CONST_INT)
7408 rlow = GEN_INT (INTVAL (lo_r)
7409 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7410 else
7411 rlow = expand_binop (index_mode, and_optab, lo_r,
7412 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7413 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7415 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7416 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7418 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7419 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7420 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7421 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7423 addr = memory_address (byte_mode,
7424 expand_binop (index_mode, add_optab, diff,
7425 setaddr, NULL_RTX, iunsignedp,
7426 OPTAB_LIB_WIDEN));
7428 /* Extract the bit we want to examine. */
7429 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7430 gen_rtx_MEM (byte_mode, addr),
7431 make_tree (TREE_TYPE (index), rem),
7432 NULL_RTX, 1);
7433 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7434 GET_MODE (target) == byte_mode ? target : 0,
7435 1, OPTAB_LIB_WIDEN);
7437 if (result != target)
7438 convert_move (target, result, 1);
7440 /* Output the code to handle the out-of-range case. */
7441 emit_jump (op0);
7442 emit_label (op1);
7443 emit_move_insn (target, const0_rtx);
7444 emit_label (op0);
7445 return target;
7448 case WITH_CLEANUP_EXPR:
7449 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7451 WITH_CLEANUP_EXPR_RTL (exp)
7452 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7453 expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1),
7454 CLEANUP_EH_ONLY (exp));
7456 /* That's it for this cleanup. */
7457 TREE_OPERAND (exp, 1) = 0;
7459 return WITH_CLEANUP_EXPR_RTL (exp);
7461 case CLEANUP_POINT_EXPR:
7463 /* Start a new binding layer that will keep track of all cleanup
7464 actions to be performed. */
7465 expand_start_bindings (2);
7467 target_temp_slot_level = temp_slot_level;
7469 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7470 /* If we're going to use this value, load it up now. */
7471 if (! ignore)
7472 op0 = force_not_mem (op0);
7473 preserve_temp_slots (op0);
7474 expand_end_bindings (NULL_TREE, 0, 0);
7476 return op0;
7478 case CALL_EXPR:
7479 /* Check for a built-in function. */
7480 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7481 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7482 == FUNCTION_DECL)
7483 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7485 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7486 == BUILT_IN_FRONTEND)
7487 /* ??? Use (*fun) form because expand_expr is a macro. */
7488 return (*lang_hooks.expand_expr) (exp, original_target,
7489 tmode, modifier,
7490 alt_rtl);
7491 else
7492 return expand_builtin (exp, target, subtarget, tmode, ignore);
7495 return expand_call (exp, target, ignore);
7497 case NON_LVALUE_EXPR:
7498 case NOP_EXPR:
7499 case CONVERT_EXPR:
7500 case REFERENCE_EXPR:
7501 if (TREE_OPERAND (exp, 0) == error_mark_node)
7502 return const0_rtx;
7504 if (TREE_CODE (type) == UNION_TYPE)
7506 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7508 /* If both input and output are BLKmode, this conversion isn't doing
7509 anything except possibly changing memory attribute. */
7510 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7512 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7513 modifier);
7515 result = copy_rtx (result);
7516 set_mem_attributes (result, exp, 0);
7517 return result;
7520 if (target == 0)
7522 if (TYPE_MODE (type) != BLKmode)
7523 target = gen_reg_rtx (TYPE_MODE (type));
7524 else
7525 target = assign_temp (type, 0, 1, 1);
7528 if (GET_CODE (target) == MEM)
7529 /* Store data into beginning of memory target. */
7530 store_expr (TREE_OPERAND (exp, 0),
7531 adjust_address (target, TYPE_MODE (valtype), 0),
7532 modifier == EXPAND_STACK_PARM ? 2 : 0);
7534 else if (GET_CODE (target) == REG)
7535 /* Store this field into a union of the proper type. */
7536 store_field (target,
7537 MIN ((int_size_in_bytes (TREE_TYPE
7538 (TREE_OPERAND (exp, 0)))
7539 * BITS_PER_UNIT),
7540 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7541 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7542 VOIDmode, 0, type, 0);
7543 else
7544 abort ();
7546 /* Return the entire union. */
7547 return target;
7550 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7552 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7553 modifier);
7555 /* If the signedness of the conversion differs and OP0 is
7556 a promoted SUBREG, clear that indication since we now
7557 have to do the proper extension. */
7558 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7559 && GET_CODE (op0) == SUBREG)
7560 SUBREG_PROMOTED_VAR_P (op0) = 0;
7562 return op0;
7565 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7566 if (GET_MODE (op0) == mode)
7567 return op0;
7569 /* If OP0 is a constant, just convert it into the proper mode. */
7570 if (CONSTANT_P (op0))
7572 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7573 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7575 if (modifier == EXPAND_INITIALIZER)
7576 return simplify_gen_subreg (mode, op0, inner_mode,
7577 subreg_lowpart_offset (mode,
7578 inner_mode));
7579 else
7580 return convert_modes (mode, inner_mode, op0,
7581 TREE_UNSIGNED (inner_type));
7584 if (modifier == EXPAND_INITIALIZER)
7585 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7587 if (target == 0)
7588 return
7589 convert_to_mode (mode, op0,
7590 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7591 else
7592 convert_move (target, op0,
7593 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7594 return target;
7596 case VIEW_CONVERT_EXPR:
7597 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7599 /* If the input and output modes are both the same, we are done.
7600 Otherwise, if neither mode is BLKmode and both are integral and within
7601 a word, we can use gen_lowpart. If neither is true, make sure the
7602 operand is in memory and convert the MEM to the new mode. */
7603 if (TYPE_MODE (type) == GET_MODE (op0))
7605 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7606 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7607 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
7608 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7609 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7610 op0 = gen_lowpart (TYPE_MODE (type), op0);
7611 else if (GET_CODE (op0) != MEM)
7613 /* If the operand is not a MEM, force it into memory. Since we
7614 are going to be be changing the mode of the MEM, don't call
7615 force_const_mem for constants because we don't allow pool
7616 constants to change mode. */
7617 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7619 if (TREE_ADDRESSABLE (exp))
7620 abort ();
7622 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7623 target
7624 = assign_stack_temp_for_type
7625 (TYPE_MODE (inner_type),
7626 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7628 emit_move_insn (target, op0);
7629 op0 = target;
7632 /* At this point, OP0 is in the correct mode. If the output type is such
7633 that the operand is known to be aligned, indicate that it is.
7634 Otherwise, we need only be concerned about alignment for non-BLKmode
7635 results. */
7636 if (GET_CODE (op0) == MEM)
7638 op0 = copy_rtx (op0);
7640 if (TYPE_ALIGN_OK (type))
7641 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7642 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7643 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7645 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7646 HOST_WIDE_INT temp_size
7647 = MAX (int_size_in_bytes (inner_type),
7648 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7649 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7650 temp_size, 0, type);
7651 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7653 if (TREE_ADDRESSABLE (exp))
7654 abort ();
7656 if (GET_MODE (op0) == BLKmode)
7657 emit_block_move (new_with_op0_mode, op0,
7658 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7659 (modifier == EXPAND_STACK_PARM
7660 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7661 else
7662 emit_move_insn (new_with_op0_mode, op0);
7664 op0 = new;
7667 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7670 return op0;
7672 case PLUS_EXPR:
7673 this_optab = ! unsignedp && flag_trapv
7674 && (GET_MODE_CLASS (mode) == MODE_INT)
7675 ? addv_optab : add_optab;
7677 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7678 something else, make sure we add the register to the constant and
7679 then to the other thing. This case can occur during strength
7680 reduction and doing it this way will produce better code if the
7681 frame pointer or argument pointer is eliminated.
7683 fold-const.c will ensure that the constant is always in the inner
7684 PLUS_EXPR, so the only case we need to do anything about is if
7685 sp, ap, or fp is our second argument, in which case we must swap
7686 the innermost first argument and our second argument. */
7688 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7689 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7690 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7691 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7692 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7693 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7695 tree t = TREE_OPERAND (exp, 1);
7697 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7698 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7701 /* If the result is to be ptr_mode and we are adding an integer to
7702 something, we might be forming a constant. So try to use
7703 plus_constant. If it produces a sum and we can't accept it,
7704 use force_operand. This allows P = &ARR[const] to generate
7705 efficient code on machines where a SYMBOL_REF is not a valid
7706 address.
7708 If this is an EXPAND_SUM call, always return the sum. */
7709 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7710 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7712 if (modifier == EXPAND_STACK_PARM)
7713 target = 0;
7714 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7715 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7716 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7718 rtx constant_part;
7720 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7721 EXPAND_SUM);
7722 /* Use immed_double_const to ensure that the constant is
7723 truncated according to the mode of OP1, then sign extended
7724 to a HOST_WIDE_INT. Using the constant directly can result
7725 in non-canonical RTL in a 64x32 cross compile. */
7726 constant_part
7727 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7728 (HOST_WIDE_INT) 0,
7729 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7730 op1 = plus_constant (op1, INTVAL (constant_part));
7731 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7732 op1 = force_operand (op1, target);
7733 return op1;
7736 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7737 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7738 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7740 rtx constant_part;
7742 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7743 (modifier == EXPAND_INITIALIZER
7744 ? EXPAND_INITIALIZER : EXPAND_SUM));
7745 if (! CONSTANT_P (op0))
7747 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7748 VOIDmode, modifier);
7749 /* Return a PLUS if modifier says it's OK. */
7750 if (modifier == EXPAND_SUM
7751 || modifier == EXPAND_INITIALIZER)
7752 return simplify_gen_binary (PLUS, mode, op0, op1);
7753 goto binop2;
7755 /* Use immed_double_const to ensure that the constant is
7756 truncated according to the mode of OP1, then sign extended
7757 to a HOST_WIDE_INT. Using the constant directly can result
7758 in non-canonical RTL in a 64x32 cross compile. */
7759 constant_part
7760 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7761 (HOST_WIDE_INT) 0,
7762 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7763 op0 = plus_constant (op0, INTVAL (constant_part));
7764 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7765 op0 = force_operand (op0, target);
7766 return op0;
7770 /* No sense saving up arithmetic to be done
7771 if it's all in the wrong mode to form part of an address.
7772 And force_operand won't know whether to sign-extend or
7773 zero-extend. */
7774 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7775 || mode != ptr_mode)
7777 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7778 subtarget, &op0, &op1, 0);
7779 if (op0 == const0_rtx)
7780 return op1;
7781 if (op1 == const0_rtx)
7782 return op0;
7783 goto binop2;
7786 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7787 subtarget, &op0, &op1, modifier);
7788 return simplify_gen_binary (PLUS, mode, op0, op1);
7790 case MINUS_EXPR:
7791 /* For initializers, we are allowed to return a MINUS of two
7792 symbolic constants. Here we handle all cases when both operands
7793 are constant. */
7794 /* Handle difference of two symbolic constants,
7795 for the sake of an initializer. */
7796 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7797 && really_constant_p (TREE_OPERAND (exp, 0))
7798 && really_constant_p (TREE_OPERAND (exp, 1)))
7800 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7801 NULL_RTX, &op0, &op1, modifier);
7803 /* If the last operand is a CONST_INT, use plus_constant of
7804 the negated constant. Else make the MINUS. */
7805 if (GET_CODE (op1) == CONST_INT)
7806 return plus_constant (op0, - INTVAL (op1));
7807 else
7808 return gen_rtx_MINUS (mode, op0, op1);
7811 this_optab = ! unsignedp && flag_trapv
7812 && (GET_MODE_CLASS(mode) == MODE_INT)
7813 ? subv_optab : sub_optab;
7815 /* No sense saving up arithmetic to be done
7816 if it's all in the wrong mode to form part of an address.
7817 And force_operand won't know whether to sign-extend or
7818 zero-extend. */
7819 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7820 || mode != ptr_mode)
7821 goto binop;
7823 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7824 subtarget, &op0, &op1, modifier);
7826 /* Convert A - const to A + (-const). */
7827 if (GET_CODE (op1) == CONST_INT)
7829 op1 = negate_rtx (mode, op1);
7830 return simplify_gen_binary (PLUS, mode, op0, op1);
7833 goto binop2;
7835 case MULT_EXPR:
7836 /* If first operand is constant, swap them.
7837 Thus the following special case checks need only
7838 check the second operand. */
7839 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7841 tree t1 = TREE_OPERAND (exp, 0);
7842 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7843 TREE_OPERAND (exp, 1) = t1;
7846 /* Attempt to return something suitable for generating an
7847 indexed address, for machines that support that. */
7849 if (modifier == EXPAND_SUM && mode == ptr_mode
7850 && host_integerp (TREE_OPERAND (exp, 1), 0))
7852 tree exp1 = TREE_OPERAND (exp, 1);
7854 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7855 EXPAND_SUM);
7857 if (GET_CODE (op0) != REG)
7858 op0 = force_operand (op0, NULL_RTX);
7859 if (GET_CODE (op0) != REG)
7860 op0 = copy_to_mode_reg (mode, op0);
7862 return gen_rtx_MULT (mode, op0,
7863 gen_int_mode (tree_low_cst (exp1, 0),
7864 TYPE_MODE (TREE_TYPE (exp1))));
7867 if (modifier == EXPAND_STACK_PARM)
7868 target = 0;
7870 /* Check for multiplying things that have been extended
7871 from a narrower type. If this machine supports multiplying
7872 in that narrower type with a result in the desired type,
7873 do it that way, and avoid the explicit type-conversion. */
7874 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7875 && TREE_CODE (type) == INTEGER_TYPE
7876 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7877 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7878 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7879 && int_fits_type_p (TREE_OPERAND (exp, 1),
7880 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7881 /* Don't use a widening multiply if a shift will do. */
7882 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7883 > HOST_BITS_PER_WIDE_INT)
7884 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7886 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7887 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7889 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7890 /* If both operands are extended, they must either both
7891 be zero-extended or both be sign-extended. */
7892 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7894 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7896 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
7897 enum machine_mode innermode = TYPE_MODE (op0type);
7898 bool zextend_p = TREE_UNSIGNED (op0type);
7899 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
7900 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
7902 if (mode == GET_MODE_WIDER_MODE (innermode))
7904 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7906 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7907 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7908 TREE_OPERAND (exp, 1),
7909 NULL_RTX, &op0, &op1, 0);
7910 else
7911 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7912 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7913 NULL_RTX, &op0, &op1, 0);
7914 goto binop2;
7916 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7917 && innermode == word_mode)
7919 rtx htem, hipart;
7920 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7921 NULL_RTX, VOIDmode, 0);
7922 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7923 op1 = convert_modes (innermode, mode,
7924 expand_expr (TREE_OPERAND (exp, 1),
7925 NULL_RTX, VOIDmode, 0),
7926 unsignedp);
7927 else
7928 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7929 NULL_RTX, VOIDmode, 0);
7930 temp = expand_binop (mode, other_optab, op0, op1, target,
7931 unsignedp, OPTAB_LIB_WIDEN);
7932 hipart = gen_highpart (innermode, temp);
7933 htem = expand_mult_highpart_adjust (innermode, hipart,
7934 op0, op1, hipart,
7935 zextend_p);
7936 if (htem != hipart)
7937 emit_move_insn (hipart, htem);
7938 return temp;
7942 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7943 subtarget, &op0, &op1, 0);
7944 return expand_mult (mode, op0, op1, target, unsignedp);
7946 case TRUNC_DIV_EXPR:
7947 case FLOOR_DIV_EXPR:
7948 case CEIL_DIV_EXPR:
7949 case ROUND_DIV_EXPR:
7950 case EXACT_DIV_EXPR:
7951 if (modifier == EXPAND_STACK_PARM)
7952 target = 0;
7953 /* Possible optimization: compute the dividend with EXPAND_SUM
7954 then if the divisor is constant can optimize the case
7955 where some terms of the dividend have coeffs divisible by it. */
7956 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7957 subtarget, &op0, &op1, 0);
7958 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7960 case RDIV_EXPR:
7961 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7962 expensive divide. If not, combine will rebuild the original
7963 computation. */
7964 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7965 && TREE_CODE (type) == REAL_TYPE
7966 && !real_onep (TREE_OPERAND (exp, 0)))
7967 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7968 build (RDIV_EXPR, type,
7969 build_real (type, dconst1),
7970 TREE_OPERAND (exp, 1))),
7971 target, tmode, modifier);
7972 this_optab = sdiv_optab;
7973 goto binop;
7975 case TRUNC_MOD_EXPR:
7976 case FLOOR_MOD_EXPR:
7977 case CEIL_MOD_EXPR:
7978 case ROUND_MOD_EXPR:
7979 if (modifier == EXPAND_STACK_PARM)
7980 target = 0;
7981 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7982 subtarget, &op0, &op1, 0);
7983 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7985 case FIX_ROUND_EXPR:
7986 case FIX_FLOOR_EXPR:
7987 case FIX_CEIL_EXPR:
7988 abort (); /* Not used for C. */
7990 case FIX_TRUNC_EXPR:
7991 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7992 if (target == 0 || modifier == EXPAND_STACK_PARM)
7993 target = gen_reg_rtx (mode);
7994 expand_fix (target, op0, unsignedp);
7995 return target;
7997 case FLOAT_EXPR:
7998 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7999 if (target == 0 || modifier == EXPAND_STACK_PARM)
8000 target = gen_reg_rtx (mode);
8001 /* expand_float can't figure out what to do if FROM has VOIDmode.
8002 So give it the correct mode. With -O, cse will optimize this. */
8003 if (GET_MODE (op0) == VOIDmode)
8004 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8005 op0);
8006 expand_float (target, op0,
8007 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8008 return target;
8010 case NEGATE_EXPR:
8011 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8012 if (modifier == EXPAND_STACK_PARM)
8013 target = 0;
8014 temp = expand_unop (mode,
8015 ! unsignedp && flag_trapv
8016 && (GET_MODE_CLASS(mode) == MODE_INT)
8017 ? negv_optab : neg_optab, op0, target, 0);
8018 if (temp == 0)
8019 abort ();
8020 return temp;
8022 case ABS_EXPR:
8023 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8024 if (modifier == EXPAND_STACK_PARM)
8025 target = 0;
8027 /* ABS_EXPR is not valid for complex arguments. */
8028 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
8029 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
8030 abort ();
8032 /* Unsigned abs is simply the operand. Testing here means we don't
8033 risk generating incorrect code below. */
8034 if (TREE_UNSIGNED (type))
8035 return op0;
8037 return expand_abs (mode, op0, target, unsignedp,
8038 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8040 case MAX_EXPR:
8041 case MIN_EXPR:
8042 target = original_target;
8043 if (target == 0
8044 || modifier == EXPAND_STACK_PARM
8045 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
8046 || GET_MODE (target) != mode
8047 || (GET_CODE (target) == REG
8048 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8049 target = gen_reg_rtx (mode);
8050 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8051 target, &op0, &op1, 0);
8053 /* First try to do it with a special MIN or MAX instruction.
8054 If that does not win, use a conditional jump to select the proper
8055 value. */
8056 this_optab = (unsignedp
8057 ? (code == MIN_EXPR ? umin_optab : umax_optab)
8058 : (code == MIN_EXPR ? smin_optab : smax_optab));
8060 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8061 OPTAB_WIDEN);
8062 if (temp != 0)
8063 return temp;
8065 /* At this point, a MEM target is no longer useful; we will get better
8066 code without it. */
8068 if (GET_CODE (target) == MEM)
8069 target = gen_reg_rtx (mode);
8071 /* If op1 was placed in target, swap op0 and op1. */
8072 if (target != op0 && target == op1)
8074 rtx tem = op0;
8075 op0 = op1;
8076 op1 = tem;
8079 if (target != op0)
8080 emit_move_insn (target, op0);
8082 op0 = gen_label_rtx ();
8084 /* If this mode is an integer too wide to compare properly,
8085 compare word by word. Rely on cse to optimize constant cases. */
8086 if (GET_MODE_CLASS (mode) == MODE_INT
8087 && ! can_compare_p (GE, mode, ccp_jump))
8089 if (code == MAX_EXPR)
8090 do_jump_by_parts_greater_rtx (mode, unsignedp, target, op1,
8091 NULL_RTX, op0);
8092 else
8093 do_jump_by_parts_greater_rtx (mode, unsignedp, op1, target,
8094 NULL_RTX, op0);
8096 else
8098 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
8099 unsignedp, mode, NULL_RTX, NULL_RTX, op0);
8101 emit_move_insn (target, op1);
8102 emit_label (op0);
8103 return target;
8105 case BIT_NOT_EXPR:
8106 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8107 if (modifier == EXPAND_STACK_PARM)
8108 target = 0;
8109 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8110 if (temp == 0)
8111 abort ();
8112 return temp;
8114 /* ??? Can optimize bitwise operations with one arg constant.
8115 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8116 and (a bitwise1 b) bitwise2 b (etc)
8117 but that is probably not worth while. */
8119 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8120 boolean values when we want in all cases to compute both of them. In
8121 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8122 as actual zero-or-1 values and then bitwise anding. In cases where
8123 there cannot be any side effects, better code would be made by
8124 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8125 how to recognize those cases. */
8127 case TRUTH_AND_EXPR:
8128 case BIT_AND_EXPR:
8129 this_optab = and_optab;
8130 goto binop;
8132 case TRUTH_OR_EXPR:
8133 case BIT_IOR_EXPR:
8134 this_optab = ior_optab;
8135 goto binop;
8137 case TRUTH_XOR_EXPR:
8138 case BIT_XOR_EXPR:
8139 this_optab = xor_optab;
8140 goto binop;
8142 case LSHIFT_EXPR:
8143 case RSHIFT_EXPR:
8144 case LROTATE_EXPR:
8145 case RROTATE_EXPR:
8146 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8147 subtarget = 0;
8148 if (modifier == EXPAND_STACK_PARM)
8149 target = 0;
8150 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8151 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8152 unsignedp);
8154 /* Could determine the answer when only additive constants differ. Also,
8155 the addition of one can be handled by changing the condition. */
8156 case LT_EXPR:
8157 case LE_EXPR:
8158 case GT_EXPR:
8159 case GE_EXPR:
8160 case EQ_EXPR:
8161 case NE_EXPR:
8162 case UNORDERED_EXPR:
8163 case ORDERED_EXPR:
8164 case UNLT_EXPR:
8165 case UNLE_EXPR:
8166 case UNGT_EXPR:
8167 case UNGE_EXPR:
8168 case UNEQ_EXPR:
8169 temp = do_store_flag (exp,
8170 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8171 tmode != VOIDmode ? tmode : mode, 0);
8172 if (temp != 0)
8173 return temp;
8175 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8176 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8177 && original_target
8178 && GET_CODE (original_target) == REG
8179 && (GET_MODE (original_target)
8180 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8182 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8183 VOIDmode, 0);
8185 /* If temp is constant, we can just compute the result. */
8186 if (GET_CODE (temp) == CONST_INT)
8188 if (INTVAL (temp) != 0)
8189 emit_move_insn (target, const1_rtx);
8190 else
8191 emit_move_insn (target, const0_rtx);
8193 return target;
8196 if (temp != original_target)
8198 enum machine_mode mode1 = GET_MODE (temp);
8199 if (mode1 == VOIDmode)
8200 mode1 = tmode != VOIDmode ? tmode : mode;
8202 temp = copy_to_mode_reg (mode1, temp);
8205 op1 = gen_label_rtx ();
8206 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8207 GET_MODE (temp), unsignedp, op1);
8208 emit_move_insn (temp, const1_rtx);
8209 emit_label (op1);
8210 return temp;
8213 /* If no set-flag instruction, must generate a conditional
8214 store into a temporary variable. Drop through
8215 and handle this like && and ||. */
8217 case TRUTH_ANDIF_EXPR:
8218 case TRUTH_ORIF_EXPR:
8219 if (! ignore
8220 && (target == 0
8221 || modifier == EXPAND_STACK_PARM
8222 || ! safe_from_p (target, exp, 1)
8223 /* Make sure we don't have a hard reg (such as function's return
8224 value) live across basic blocks, if not optimizing. */
8225 || (!optimize && GET_CODE (target) == REG
8226 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8227 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8229 if (target)
8230 emit_clr_insn (target);
8232 op1 = gen_label_rtx ();
8233 jumpifnot (exp, op1);
8235 if (target)
8236 emit_0_to_1_insn (target);
8238 emit_label (op1);
8239 return ignore ? const0_rtx : target;
8241 case TRUTH_NOT_EXPR:
8242 if (modifier == EXPAND_STACK_PARM)
8243 target = 0;
8244 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8245 /* The parser is careful to generate TRUTH_NOT_EXPR
8246 only with operands that are always zero or one. */
8247 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8248 target, 1, OPTAB_LIB_WIDEN);
8249 if (temp == 0)
8250 abort ();
8251 return temp;
8253 case COMPOUND_EXPR:
8254 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8255 emit_queue ();
8256 return expand_expr_real (TREE_OPERAND (exp, 1),
8257 (ignore ? const0_rtx : target),
8258 VOIDmode, modifier, alt_rtl);
8260 case COND_EXPR:
8261 /* If we would have a "singleton" (see below) were it not for a
8262 conversion in each arm, bring that conversion back out. */
8263 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8264 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8265 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8266 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8268 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8269 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8271 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8272 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8273 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8274 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8275 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8276 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8277 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8278 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8279 return expand_expr (build1 (NOP_EXPR, type,
8280 build (COND_EXPR, TREE_TYPE (iftrue),
8281 TREE_OPERAND (exp, 0),
8282 iftrue, iffalse)),
8283 target, tmode, modifier);
8287 /* Note that COND_EXPRs whose type is a structure or union
8288 are required to be constructed to contain assignments of
8289 a temporary variable, so that we can evaluate them here
8290 for side effect only. If type is void, we must do likewise. */
8292 /* If an arm of the branch requires a cleanup,
8293 only that cleanup is performed. */
8295 tree singleton = 0;
8296 tree binary_op = 0, unary_op = 0;
8298 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8299 convert it to our mode, if necessary. */
8300 if (integer_onep (TREE_OPERAND (exp, 1))
8301 && integer_zerop (TREE_OPERAND (exp, 2))
8302 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8304 if (ignore)
8306 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8307 modifier);
8308 return const0_rtx;
8311 if (modifier == EXPAND_STACK_PARM)
8312 target = 0;
8313 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8314 if (GET_MODE (op0) == mode)
8315 return op0;
8317 if (target == 0)
8318 target = gen_reg_rtx (mode);
8319 convert_move (target, op0, unsignedp);
8320 return target;
8323 /* Check for X ? A + B : A. If we have this, we can copy A to the
8324 output and conditionally add B. Similarly for unary operations.
8325 Don't do this if X has side-effects because those side effects
8326 might affect A or B and the "?" operation is a sequence point in
8327 ANSI. (operand_equal_p tests for side effects.) */
8329 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8330 && operand_equal_p (TREE_OPERAND (exp, 2),
8331 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8332 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8333 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8334 && operand_equal_p (TREE_OPERAND (exp, 1),
8335 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8336 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8337 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8338 && operand_equal_p (TREE_OPERAND (exp, 2),
8339 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8340 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8341 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8342 && operand_equal_p (TREE_OPERAND (exp, 1),
8343 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8344 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8346 /* If we are not to produce a result, we have no target. Otherwise,
8347 if a target was specified use it; it will not be used as an
8348 intermediate target unless it is safe. If no target, use a
8349 temporary. */
8351 if (ignore)
8352 temp = 0;
8353 else if (modifier == EXPAND_STACK_PARM)
8354 temp = assign_temp (type, 0, 0, 1);
8355 else if (original_target
8356 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8357 || (singleton && GET_CODE (original_target) == REG
8358 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8359 && original_target == var_rtx (singleton)))
8360 && GET_MODE (original_target) == mode
8361 #ifdef HAVE_conditional_move
8362 && (! can_conditionally_move_p (mode)
8363 || GET_CODE (original_target) == REG
8364 || TREE_ADDRESSABLE (type))
8365 #endif
8366 && (GET_CODE (original_target) != MEM
8367 || TREE_ADDRESSABLE (type)))
8368 temp = original_target;
8369 else if (TREE_ADDRESSABLE (type))
8370 abort ();
8371 else
8372 temp = assign_temp (type, 0, 0, 1);
8374 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8375 do the test of X as a store-flag operation, do this as
8376 A + ((X != 0) << log C). Similarly for other simple binary
8377 operators. Only do for C == 1 if BRANCH_COST is low. */
8378 if (temp && singleton && binary_op
8379 && (TREE_CODE (binary_op) == PLUS_EXPR
8380 || TREE_CODE (binary_op) == MINUS_EXPR
8381 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8382 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8383 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8384 : integer_onep (TREE_OPERAND (binary_op, 1)))
8385 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8387 rtx result;
8388 tree cond;
8389 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8390 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8391 ? addv_optab : add_optab)
8392 : TREE_CODE (binary_op) == MINUS_EXPR
8393 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8394 ? subv_optab : sub_optab)
8395 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8396 : xor_optab);
8398 /* If we had X ? A : A + 1, do this as A + (X == 0). */
8399 if (singleton == TREE_OPERAND (exp, 1))
8400 cond = invert_truthvalue (TREE_OPERAND (exp, 0));
8401 else
8402 cond = TREE_OPERAND (exp, 0);
8404 result = do_store_flag (cond, (safe_from_p (temp, singleton, 1)
8405 ? temp : NULL_RTX),
8406 mode, BRANCH_COST <= 1);
8408 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8409 result = expand_shift (LSHIFT_EXPR, mode, result,
8410 build_int_2 (tree_log2
8411 (TREE_OPERAND
8412 (binary_op, 1)),
8414 (safe_from_p (temp, singleton, 1)
8415 ? temp : NULL_RTX), 0);
8417 if (result)
8419 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8420 return expand_binop (mode, boptab, op1, result, temp,
8421 unsignedp, OPTAB_LIB_WIDEN);
8425 do_pending_stack_adjust ();
8426 NO_DEFER_POP;
8427 op0 = gen_label_rtx ();
8429 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8431 if (temp != 0)
8433 /* If the target conflicts with the other operand of the
8434 binary op, we can't use it. Also, we can't use the target
8435 if it is a hard register, because evaluating the condition
8436 might clobber it. */
8437 if ((binary_op
8438 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8439 || (GET_CODE (temp) == REG
8440 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8441 temp = gen_reg_rtx (mode);
8442 store_expr (singleton, temp,
8443 modifier == EXPAND_STACK_PARM ? 2 : 0);
8445 else
8446 expand_expr (singleton,
8447 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8448 if (singleton == TREE_OPERAND (exp, 1))
8449 jumpif (TREE_OPERAND (exp, 0), op0);
8450 else
8451 jumpifnot (TREE_OPERAND (exp, 0), op0);
8453 start_cleanup_deferral ();
8454 if (binary_op && temp == 0)
8455 /* Just touch the other operand. */
8456 expand_expr (TREE_OPERAND (binary_op, 1),
8457 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8458 else if (binary_op)
8459 store_expr (build (TREE_CODE (binary_op), type,
8460 make_tree (type, temp),
8461 TREE_OPERAND (binary_op, 1)),
8462 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8463 else
8464 store_expr (build1 (TREE_CODE (unary_op), type,
8465 make_tree (type, temp)),
8466 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8467 op1 = op0;
8469 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8470 comparison operator. If we have one of these cases, set the
8471 output to A, branch on A (cse will merge these two references),
8472 then set the output to FOO. */
8473 else if (temp
8474 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8475 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8476 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8477 TREE_OPERAND (exp, 1), 0)
8478 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8479 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8480 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8482 if (GET_CODE (temp) == REG
8483 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8484 temp = gen_reg_rtx (mode);
8485 store_expr (TREE_OPERAND (exp, 1), temp,
8486 modifier == EXPAND_STACK_PARM ? 2 : 0);
8487 jumpif (TREE_OPERAND (exp, 0), op0);
8489 start_cleanup_deferral ();
8490 if (TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8491 store_expr (TREE_OPERAND (exp, 2), temp,
8492 modifier == EXPAND_STACK_PARM ? 2 : 0);
8493 else
8494 expand_expr (TREE_OPERAND (exp, 2),
8495 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8496 op1 = op0;
8498 else if (temp
8499 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8500 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8501 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8502 TREE_OPERAND (exp, 2), 0)
8503 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8504 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8505 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8507 if (GET_CODE (temp) == REG
8508 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8509 temp = gen_reg_rtx (mode);
8510 store_expr (TREE_OPERAND (exp, 2), temp,
8511 modifier == EXPAND_STACK_PARM ? 2 : 0);
8512 jumpifnot (TREE_OPERAND (exp, 0), op0);
8514 start_cleanup_deferral ();
8515 if (TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8516 store_expr (TREE_OPERAND (exp, 1), temp,
8517 modifier == EXPAND_STACK_PARM ? 2 : 0);
8518 else
8519 expand_expr (TREE_OPERAND (exp, 1),
8520 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8521 op1 = op0;
8523 else
8525 op1 = gen_label_rtx ();
8526 jumpifnot (TREE_OPERAND (exp, 0), op0);
8528 start_cleanup_deferral ();
8530 /* One branch of the cond can be void, if it never returns. For
8531 example A ? throw : E */
8532 if (temp != 0
8533 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8534 store_expr (TREE_OPERAND (exp, 1), temp,
8535 modifier == EXPAND_STACK_PARM ? 2 : 0);
8536 else
8537 expand_expr (TREE_OPERAND (exp, 1),
8538 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8539 end_cleanup_deferral ();
8540 emit_queue ();
8541 emit_jump_insn (gen_jump (op1));
8542 emit_barrier ();
8543 emit_label (op0);
8544 start_cleanup_deferral ();
8545 if (temp != 0
8546 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8547 store_expr (TREE_OPERAND (exp, 2), temp,
8548 modifier == EXPAND_STACK_PARM ? 2 : 0);
8549 else
8550 expand_expr (TREE_OPERAND (exp, 2),
8551 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8554 end_cleanup_deferral ();
8556 emit_queue ();
8557 emit_label (op1);
8558 OK_DEFER_POP;
8560 return temp;
8563 case TARGET_EXPR:
8565 /* Something needs to be initialized, but we didn't know
8566 where that thing was when building the tree. For example,
8567 it could be the return value of a function, or a parameter
8568 to a function which lays down in the stack, or a temporary
8569 variable which must be passed by reference.
8571 We guarantee that the expression will either be constructed
8572 or copied into our original target. */
8574 tree slot = TREE_OPERAND (exp, 0);
8575 tree cleanups = NULL_TREE;
8576 tree exp1;
8578 if (TREE_CODE (slot) != VAR_DECL)
8579 abort ();
8581 if (! ignore)
8582 target = original_target;
8584 /* Set this here so that if we get a target that refers to a
8585 register variable that's already been used, put_reg_into_stack
8586 knows that it should fix up those uses. */
8587 TREE_USED (slot) = 1;
8589 if (target == 0)
8591 if (DECL_RTL_SET_P (slot))
8593 target = DECL_RTL (slot);
8594 /* If we have already expanded the slot, so don't do
8595 it again. (mrs) */
8596 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8597 return target;
8599 else
8601 target = assign_temp (type, 2, 0, 1);
8602 /* All temp slots at this level must not conflict. */
8603 preserve_temp_slots (target);
8604 SET_DECL_RTL (slot, target);
8605 if (TREE_ADDRESSABLE (slot))
8606 put_var_into_stack (slot, /*rescan=*/false);
8608 /* Since SLOT is not known to the called function
8609 to belong to its stack frame, we must build an explicit
8610 cleanup. This case occurs when we must build up a reference
8611 to pass the reference as an argument. In this case,
8612 it is very likely that such a reference need not be
8613 built here. */
8615 if (TREE_OPERAND (exp, 2) == 0)
8616 TREE_OPERAND (exp, 2)
8617 = lang_hooks.maybe_build_cleanup (slot);
8618 cleanups = TREE_OPERAND (exp, 2);
8621 else
8623 /* This case does occur, when expanding a parameter which
8624 needs to be constructed on the stack. The target
8625 is the actual stack address that we want to initialize.
8626 The function we call will perform the cleanup in this case. */
8628 /* If we have already assigned it space, use that space,
8629 not target that we were passed in, as our target
8630 parameter is only a hint. */
8631 if (DECL_RTL_SET_P (slot))
8633 target = DECL_RTL (slot);
8634 /* If we have already expanded the slot, so don't do
8635 it again. (mrs) */
8636 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8637 return target;
8639 else
8641 SET_DECL_RTL (slot, target);
8642 /* If we must have an addressable slot, then make sure that
8643 the RTL that we just stored in slot is OK. */
8644 if (TREE_ADDRESSABLE (slot))
8645 put_var_into_stack (slot, /*rescan=*/true);
8649 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8650 /* Mark it as expanded. */
8651 TREE_OPERAND (exp, 1) = NULL_TREE;
8653 store_expr (exp1, target, modifier == EXPAND_STACK_PARM ? 2 : 0);
8655 expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
8657 return target;
8660 case INIT_EXPR:
8662 tree lhs = TREE_OPERAND (exp, 0);
8663 tree rhs = TREE_OPERAND (exp, 1);
8665 temp = expand_assignment (lhs, rhs, ! ignore);
8666 return temp;
8669 case MODIFY_EXPR:
8671 /* If lhs is complex, expand calls in rhs before computing it.
8672 That's so we don't compute a pointer and save it over a
8673 call. If lhs is simple, compute it first so we can give it
8674 as a target if the rhs is just a call. This avoids an
8675 extra temp and copy and that prevents a partial-subsumption
8676 which makes bad code. Actually we could treat
8677 component_ref's of vars like vars. */
8679 tree lhs = TREE_OPERAND (exp, 0);
8680 tree rhs = TREE_OPERAND (exp, 1);
8682 temp = 0;
8684 /* Check for |= or &= of a bitfield of size one into another bitfield
8685 of size 1. In this case, (unless we need the result of the
8686 assignment) we can do this more efficiently with a
8687 test followed by an assignment, if necessary.
8689 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8690 things change so we do, this code should be enhanced to
8691 support it. */
8692 if (ignore
8693 && TREE_CODE (lhs) == COMPONENT_REF
8694 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8695 || TREE_CODE (rhs) == BIT_AND_EXPR)
8696 && TREE_OPERAND (rhs, 0) == lhs
8697 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8698 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8699 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8701 rtx label = gen_label_rtx ();
8703 do_jump (TREE_OPERAND (rhs, 1),
8704 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8705 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8706 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8707 (TREE_CODE (rhs) == BIT_IOR_EXPR
8708 ? integer_one_node
8709 : integer_zero_node)),
8711 do_pending_stack_adjust ();
8712 emit_label (label);
8713 return const0_rtx;
8716 temp = expand_assignment (lhs, rhs, ! ignore);
8718 return temp;
8721 case RETURN_EXPR:
8722 if (!TREE_OPERAND (exp, 0))
8723 expand_null_return ();
8724 else
8725 expand_return (TREE_OPERAND (exp, 0));
8726 return const0_rtx;
8728 case PREINCREMENT_EXPR:
8729 case PREDECREMENT_EXPR:
8730 return expand_increment (exp, 0, ignore);
8732 case POSTINCREMENT_EXPR:
8733 case POSTDECREMENT_EXPR:
8734 /* Faster to treat as pre-increment if result is not used. */
8735 return expand_increment (exp, ! ignore, ignore);
8737 case ADDR_EXPR:
8738 if (modifier == EXPAND_STACK_PARM)
8739 target = 0;
8740 /* Are we taking the address of a nested function? */
8741 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8742 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8743 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8744 && ! TREE_STATIC (exp))
8746 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8747 op0 = force_operand (op0, target);
8749 /* If we are taking the address of something erroneous, just
8750 return a zero. */
8751 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8752 return const0_rtx;
8753 /* If we are taking the address of a constant and are at the
8754 top level, we have to use output_constant_def since we can't
8755 call force_const_mem at top level. */
8756 else if (cfun == 0
8757 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8758 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
8759 == 'c')))
8760 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
8761 else
8763 /* We make sure to pass const0_rtx down if we came in with
8764 ignore set, to avoid doing the cleanups twice for something. */
8765 op0 = expand_expr (TREE_OPERAND (exp, 0),
8766 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8767 (modifier == EXPAND_INITIALIZER
8768 ? modifier : EXPAND_CONST_ADDRESS));
8770 /* If we are going to ignore the result, OP0 will have been set
8771 to const0_rtx, so just return it. Don't get confused and
8772 think we are taking the address of the constant. */
8773 if (ignore)
8774 return op0;
8776 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8777 clever and returns a REG when given a MEM. */
8778 op0 = protect_from_queue (op0, 1);
8780 /* We would like the object in memory. If it is a constant, we can
8781 have it be statically allocated into memory. For a non-constant,
8782 we need to allocate some memory and store the value into it. */
8784 if (CONSTANT_P (op0))
8785 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8786 op0);
8787 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8788 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8789 || GET_CODE (op0) == PARALLEL || GET_CODE (op0) == LO_SUM)
8791 /* If the operand is a SAVE_EXPR, we can deal with this by
8792 forcing the SAVE_EXPR into memory. */
8793 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
8795 put_var_into_stack (TREE_OPERAND (exp, 0),
8796 /*rescan=*/true);
8797 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
8799 else
8801 /* If this object is in a register, it can't be BLKmode. */
8802 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8803 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8805 if (GET_CODE (op0) == PARALLEL)
8806 /* Handle calls that pass values in multiple
8807 non-contiguous locations. The Irix 6 ABI has examples
8808 of this. */
8809 emit_group_store (memloc, op0, inner_type,
8810 int_size_in_bytes (inner_type));
8811 else
8812 emit_move_insn (memloc, op0);
8814 op0 = memloc;
8818 if (GET_CODE (op0) != MEM)
8819 abort ();
8821 mark_temp_addr_taken (op0);
8822 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8824 op0 = XEXP (op0, 0);
8825 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
8826 op0 = convert_memory_address (ptr_mode, op0);
8827 return op0;
8830 /* If OP0 is not aligned as least as much as the type requires, we
8831 need to make a temporary, copy OP0 to it, and take the address of
8832 the temporary. We want to use the alignment of the type, not of
8833 the operand. Note that this is incorrect for FUNCTION_TYPE, but
8834 the test for BLKmode means that can't happen. The test for
8835 BLKmode is because we never make mis-aligned MEMs with
8836 non-BLKmode.
8838 We don't need to do this at all if the machine doesn't have
8839 strict alignment. */
8840 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
8841 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
8842 > MEM_ALIGN (op0))
8843 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
8845 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8846 rtx new;
8848 if (TYPE_ALIGN_OK (inner_type))
8849 abort ();
8851 if (TREE_ADDRESSABLE (inner_type))
8853 /* We can't make a bitwise copy of this object, so fail. */
8854 error ("cannot take the address of an unaligned member");
8855 return const0_rtx;
8858 new = assign_stack_temp_for_type
8859 (TYPE_MODE (inner_type),
8860 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
8861 : int_size_in_bytes (inner_type),
8862 1, build_qualified_type (inner_type,
8863 (TYPE_QUALS (inner_type)
8864 | TYPE_QUAL_CONST)));
8866 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
8867 (modifier == EXPAND_STACK_PARM
8868 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
8870 op0 = new;
8873 op0 = force_operand (XEXP (op0, 0), target);
8876 if (flag_force_addr
8877 && GET_CODE (op0) != REG
8878 && modifier != EXPAND_CONST_ADDRESS
8879 && modifier != EXPAND_INITIALIZER
8880 && modifier != EXPAND_SUM)
8881 op0 = force_reg (Pmode, op0);
8883 if (GET_CODE (op0) == REG
8884 && ! REG_USERVAR_P (op0))
8885 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8887 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
8888 op0 = convert_memory_address (ptr_mode, op0);
8890 return op0;
8892 case ENTRY_VALUE_EXPR:
8893 abort ();
8895 /* COMPLEX type for Extended Pascal & Fortran */
8896 case COMPLEX_EXPR:
8898 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8899 rtx insns;
8901 /* Get the rtx code of the operands. */
8902 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8903 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8905 if (! target)
8906 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8908 start_sequence ();
8910 /* Move the real (op0) and imaginary (op1) parts to their location. */
8911 emit_move_insn (gen_realpart (mode, target), op0);
8912 emit_move_insn (gen_imagpart (mode, target), op1);
8914 insns = get_insns ();
8915 end_sequence ();
8917 /* Complex construction should appear as a single unit. */
8918 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8919 each with a separate pseudo as destination.
8920 It's not correct for flow to treat them as a unit. */
8921 if (GET_CODE (target) != CONCAT)
8922 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8923 else
8924 emit_insn (insns);
8926 return target;
8929 case REALPART_EXPR:
8930 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8931 return gen_realpart (mode, op0);
8933 case IMAGPART_EXPR:
8934 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8935 return gen_imagpart (mode, op0);
8937 case CONJ_EXPR:
8939 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8940 rtx imag_t;
8941 rtx insns;
8943 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8945 if (! target)
8946 target = gen_reg_rtx (mode);
8948 start_sequence ();
8950 /* Store the realpart and the negated imagpart to target. */
8951 emit_move_insn (gen_realpart (partmode, target),
8952 gen_realpart (partmode, op0));
8954 imag_t = gen_imagpart (partmode, target);
8955 temp = expand_unop (partmode,
8956 ! unsignedp && flag_trapv
8957 && (GET_MODE_CLASS(partmode) == MODE_INT)
8958 ? negv_optab : neg_optab,
8959 gen_imagpart (partmode, op0), imag_t, 0);
8960 if (temp != imag_t)
8961 emit_move_insn (imag_t, temp);
8963 insns = get_insns ();
8964 end_sequence ();
8966 /* Conjugate should appear as a single unit
8967 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8968 each with a separate pseudo as destination.
8969 It's not correct for flow to treat them as a unit. */
8970 if (GET_CODE (target) != CONCAT)
8971 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8972 else
8973 emit_insn (insns);
8975 return target;
8978 case TRY_CATCH_EXPR:
8980 tree handler = TREE_OPERAND (exp, 1);
8982 expand_eh_region_start ();
8984 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8986 expand_eh_region_end_cleanup (handler);
8988 return op0;
8991 case TRY_FINALLY_EXPR:
8993 tree try_block = TREE_OPERAND (exp, 0);
8994 tree finally_block = TREE_OPERAND (exp, 1);
8996 if (!optimize || unsafe_for_reeval (finally_block) > 1)
8998 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
8999 is not sufficient, so we cannot expand the block twice.
9000 So we play games with GOTO_SUBROUTINE_EXPR to let us
9001 expand the thing only once. */
9002 /* When not optimizing, we go ahead with this form since
9003 (1) user breakpoints operate more predictably without
9004 code duplication, and
9005 (2) we're not running any of the global optimizers
9006 that would explode in time/space with the highly
9007 connected CFG created by the indirect branching. */
9009 rtx finally_label = gen_label_rtx ();
9010 rtx done_label = gen_label_rtx ();
9011 rtx return_link = gen_reg_rtx (Pmode);
9012 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
9013 (tree) finally_label, (tree) return_link);
9014 TREE_SIDE_EFFECTS (cleanup) = 1;
9016 /* Start a new binding layer that will keep track of all cleanup
9017 actions to be performed. */
9018 expand_start_bindings (2);
9019 target_temp_slot_level = temp_slot_level;
9021 expand_decl_cleanup (NULL_TREE, cleanup);
9022 op0 = expand_expr (try_block, target, tmode, modifier);
9024 preserve_temp_slots (op0);
9025 expand_end_bindings (NULL_TREE, 0, 0);
9026 emit_jump (done_label);
9027 emit_label (finally_label);
9028 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
9029 emit_indirect_jump (return_link);
9030 emit_label (done_label);
9032 else
9034 expand_start_bindings (2);
9035 target_temp_slot_level = temp_slot_level;
9037 expand_decl_cleanup (NULL_TREE, finally_block);
9038 op0 = expand_expr (try_block, target, tmode, modifier);
9040 preserve_temp_slots (op0);
9041 expand_end_bindings (NULL_TREE, 0, 0);
9044 return op0;
9047 case GOTO_SUBROUTINE_EXPR:
9049 rtx subr = (rtx) TREE_OPERAND (exp, 0);
9050 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
9051 rtx return_address = gen_label_rtx ();
9052 emit_move_insn (return_link,
9053 gen_rtx_LABEL_REF (Pmode, return_address));
9054 emit_jump (subr);
9055 emit_label (return_address);
9056 return const0_rtx;
9059 case VA_ARG_EXPR:
9060 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
9062 case EXC_PTR_EXPR:
9063 return get_exception_pointer (cfun);
9065 case FDESC_EXPR:
9066 /* Function descriptors are not valid except for as
9067 initialization constants, and should not be expanded. */
9068 abort ();
9070 default:
9071 /* ??? Use (*fun) form because expand_expr is a macro. */
9072 return (*lang_hooks.expand_expr) (exp, original_target, tmode,
9073 modifier, alt_rtl);
9076 /* Here to do an ordinary binary operator, generating an instruction
9077 from the optab already placed in `this_optab'. */
9078 binop:
9079 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9080 subtarget, &op0, &op1, 0);
9081 binop2:
9082 if (modifier == EXPAND_STACK_PARM)
9083 target = 0;
9084 temp = expand_binop (mode, this_optab, op0, op1, target,
9085 unsignedp, OPTAB_LIB_WIDEN);
9086 if (temp == 0)
9087 abort ();
9088 return temp;
9091 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9092 when applied to the address of EXP produces an address known to be
9093 aligned more than BIGGEST_ALIGNMENT. */
9095 static int
9096 is_aligning_offset (tree offset, tree exp)
9098 /* Strip off any conversions and WITH_RECORD_EXPR nodes. */
9099 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9100 || TREE_CODE (offset) == NOP_EXPR
9101 || TREE_CODE (offset) == CONVERT_EXPR
9102 || TREE_CODE (offset) == WITH_RECORD_EXPR)
9103 offset = TREE_OPERAND (offset, 0);
9105 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9106 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9107 if (TREE_CODE (offset) != BIT_AND_EXPR
9108 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9109 || compare_tree_int (TREE_OPERAND (offset, 1),
9110 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
9111 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9112 return 0;
9114 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9115 It must be NEGATE_EXPR. Then strip any more conversions. */
9116 offset = TREE_OPERAND (offset, 0);
9117 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9118 || TREE_CODE (offset) == NOP_EXPR
9119 || TREE_CODE (offset) == CONVERT_EXPR)
9120 offset = TREE_OPERAND (offset, 0);
9122 if (TREE_CODE (offset) != NEGATE_EXPR)
9123 return 0;
9125 offset = TREE_OPERAND (offset, 0);
9126 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9127 || TREE_CODE (offset) == NOP_EXPR
9128 || TREE_CODE (offset) == CONVERT_EXPR)
9129 offset = TREE_OPERAND (offset, 0);
9131 /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
9132 whose type is the same as EXP. */
9133 return (TREE_CODE (offset) == ADDR_EXPR
9134 && (TREE_OPERAND (offset, 0) == exp
9135 || (TREE_CODE (TREE_OPERAND (offset, 0)) == PLACEHOLDER_EXPR
9136 && (TREE_TYPE (TREE_OPERAND (offset, 0))
9137 == TREE_TYPE (exp)))));
9140 /* Return the tree node if an ARG corresponds to a string constant or zero
9141 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9142 in bytes within the string that ARG is accessing. The type of the
9143 offset will be `sizetype'. */
9145 tree
9146 string_constant (tree arg, tree *ptr_offset)
9148 STRIP_NOPS (arg);
9150 if (TREE_CODE (arg) == ADDR_EXPR
9151 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9153 *ptr_offset = size_zero_node;
9154 return TREE_OPERAND (arg, 0);
9156 else if (TREE_CODE (arg) == PLUS_EXPR)
9158 tree arg0 = TREE_OPERAND (arg, 0);
9159 tree arg1 = TREE_OPERAND (arg, 1);
9161 STRIP_NOPS (arg0);
9162 STRIP_NOPS (arg1);
9164 if (TREE_CODE (arg0) == ADDR_EXPR
9165 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9167 *ptr_offset = convert (sizetype, arg1);
9168 return TREE_OPERAND (arg0, 0);
9170 else if (TREE_CODE (arg1) == ADDR_EXPR
9171 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9173 *ptr_offset = convert (sizetype, arg0);
9174 return TREE_OPERAND (arg1, 0);
9178 return 0;
9181 /* Expand code for a post- or pre- increment or decrement
9182 and return the RTX for the result.
9183 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9185 static rtx
9186 expand_increment (tree exp, int post, int ignore)
9188 rtx op0, op1;
9189 rtx temp, value;
9190 tree incremented = TREE_OPERAND (exp, 0);
9191 optab this_optab = add_optab;
9192 int icode;
9193 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9194 int op0_is_copy = 0;
9195 int single_insn = 0;
9196 /* 1 means we can't store into OP0 directly,
9197 because it is a subreg narrower than a word,
9198 and we don't dare clobber the rest of the word. */
9199 int bad_subreg = 0;
9201 /* Stabilize any component ref that might need to be
9202 evaluated more than once below. */
9203 if (!post
9204 || TREE_CODE (incremented) == BIT_FIELD_REF
9205 || (TREE_CODE (incremented) == COMPONENT_REF
9206 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9207 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9208 incremented = stabilize_reference (incremented);
9209 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9210 ones into save exprs so that they don't accidentally get evaluated
9211 more than once by the code below. */
9212 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9213 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9214 incremented = save_expr (incremented);
9216 /* Compute the operands as RTX.
9217 Note whether OP0 is the actual lvalue or a copy of it:
9218 I believe it is a copy iff it is a register or subreg
9219 and insns were generated in computing it. */
9221 temp = get_last_insn ();
9222 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9224 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9225 in place but instead must do sign- or zero-extension during assignment,
9226 so we copy it into a new register and let the code below use it as
9227 a copy.
9229 Note that we can safely modify this SUBREG since it is know not to be
9230 shared (it was made by the expand_expr call above). */
9232 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9234 if (post)
9235 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9236 else
9237 bad_subreg = 1;
9239 else if (GET_CODE (op0) == SUBREG
9240 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9242 /* We cannot increment this SUBREG in place. If we are
9243 post-incrementing, get a copy of the old value. Otherwise,
9244 just mark that we cannot increment in place. */
9245 if (post)
9246 op0 = copy_to_reg (op0);
9247 else
9248 bad_subreg = 1;
9251 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9252 && temp != get_last_insn ());
9253 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9255 /* Decide whether incrementing or decrementing. */
9256 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9257 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9258 this_optab = sub_optab;
9260 /* Convert decrement by a constant into a negative increment. */
9261 if (this_optab == sub_optab
9262 && GET_CODE (op1) == CONST_INT)
9264 op1 = GEN_INT (-INTVAL (op1));
9265 this_optab = add_optab;
9268 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9269 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9271 /* For a preincrement, see if we can do this with a single instruction. */
9272 if (!post)
9274 icode = (int) this_optab->handlers[(int) mode].insn_code;
9275 if (icode != (int) CODE_FOR_nothing
9276 /* Make sure that OP0 is valid for operands 0 and 1
9277 of the insn we want to queue. */
9278 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9279 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9280 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9281 single_insn = 1;
9284 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9285 then we cannot just increment OP0. We must therefore contrive to
9286 increment the original value. Then, for postincrement, we can return
9287 OP0 since it is a copy of the old value. For preincrement, expand here
9288 unless we can do it with a single insn.
9290 Likewise if storing directly into OP0 would clobber high bits
9291 we need to preserve (bad_subreg). */
9292 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9294 /* This is the easiest way to increment the value wherever it is.
9295 Problems with multiple evaluation of INCREMENTED are prevented
9296 because either (1) it is a component_ref or preincrement,
9297 in which case it was stabilized above, or (2) it is an array_ref
9298 with constant index in an array in a register, which is
9299 safe to reevaluate. */
9300 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9301 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9302 ? MINUS_EXPR : PLUS_EXPR),
9303 TREE_TYPE (exp),
9304 incremented,
9305 TREE_OPERAND (exp, 1));
9307 while (TREE_CODE (incremented) == NOP_EXPR
9308 || TREE_CODE (incremented) == CONVERT_EXPR)
9310 newexp = convert (TREE_TYPE (incremented), newexp);
9311 incremented = TREE_OPERAND (incremented, 0);
9314 temp = expand_assignment (incremented, newexp, ! post && ! ignore);
9315 return post ? op0 : temp;
9318 if (post)
9320 /* We have a true reference to the value in OP0.
9321 If there is an insn to add or subtract in this mode, queue it.
9322 Queuing the increment insn avoids the register shuffling
9323 that often results if we must increment now and first save
9324 the old value for subsequent use. */
9326 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9327 op0 = stabilize (op0);
9328 #endif
9330 icode = (int) this_optab->handlers[(int) mode].insn_code;
9331 if (icode != (int) CODE_FOR_nothing
9332 /* Make sure that OP0 is valid for operands 0 and 1
9333 of the insn we want to queue. */
9334 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9335 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9337 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9338 op1 = force_reg (mode, op1);
9340 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9342 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9344 rtx addr = (general_operand (XEXP (op0, 0), mode)
9345 ? force_reg (Pmode, XEXP (op0, 0))
9346 : copy_to_reg (XEXP (op0, 0)));
9347 rtx temp, result;
9349 op0 = replace_equiv_address (op0, addr);
9350 temp = force_reg (GET_MODE (op0), op0);
9351 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9352 op1 = force_reg (mode, op1);
9354 /* The increment queue is LIFO, thus we have to `queue'
9355 the instructions in reverse order. */
9356 enqueue_insn (op0, gen_move_insn (op0, temp));
9357 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9358 return result;
9362 /* Preincrement, or we can't increment with one simple insn. */
9363 if (post)
9364 /* Save a copy of the value before inc or dec, to return it later. */
9365 temp = value = copy_to_reg (op0);
9366 else
9367 /* Arrange to return the incremented value. */
9368 /* Copy the rtx because expand_binop will protect from the queue,
9369 and the results of that would be invalid for us to return
9370 if our caller does emit_queue before using our result. */
9371 temp = copy_rtx (value = op0);
9373 /* Increment however we can. */
9374 op1 = expand_binop (mode, this_optab, value, op1, op0,
9375 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9377 /* Make sure the value is stored into OP0. */
9378 if (op1 != op0)
9379 emit_move_insn (op0, op1);
9381 return temp;
9384 /* Generate code to calculate EXP using a store-flag instruction
9385 and return an rtx for the result. EXP is either a comparison
9386 or a TRUTH_NOT_EXPR whose operand is a comparison.
9388 If TARGET is nonzero, store the result there if convenient.
9390 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9391 cheap.
9393 Return zero if there is no suitable set-flag instruction
9394 available on this machine.
9396 Once expand_expr has been called on the arguments of the comparison,
9397 we are committed to doing the store flag, since it is not safe to
9398 re-evaluate the expression. We emit the store-flag insn by calling
9399 emit_store_flag, but only expand the arguments if we have a reason
9400 to believe that emit_store_flag will be successful. If we think that
9401 it will, but it isn't, we have to simulate the store-flag with a
9402 set/jump/set sequence. */
9404 static rtx
9405 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
9407 enum rtx_code code;
9408 tree arg0, arg1, type;
9409 tree tem;
9410 enum machine_mode operand_mode;
9411 int invert = 0;
9412 int unsignedp;
9413 rtx op0, op1;
9414 enum insn_code icode;
9415 rtx subtarget = target;
9416 rtx result, label;
9418 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9419 result at the end. We can't simply invert the test since it would
9420 have already been inverted if it were valid. This case occurs for
9421 some floating-point comparisons. */
9423 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9424 invert = 1, exp = TREE_OPERAND (exp, 0);
9426 arg0 = TREE_OPERAND (exp, 0);
9427 arg1 = TREE_OPERAND (exp, 1);
9429 /* Don't crash if the comparison was erroneous. */
9430 if (arg0 == error_mark_node || arg1 == error_mark_node)
9431 return const0_rtx;
9433 type = TREE_TYPE (arg0);
9434 operand_mode = TYPE_MODE (type);
9435 unsignedp = TREE_UNSIGNED (type);
9437 /* We won't bother with BLKmode store-flag operations because it would mean
9438 passing a lot of information to emit_store_flag. */
9439 if (operand_mode == BLKmode)
9440 return 0;
9442 /* We won't bother with store-flag operations involving function pointers
9443 when function pointers must be canonicalized before comparisons. */
9444 #ifdef HAVE_canonicalize_funcptr_for_compare
9445 if (HAVE_canonicalize_funcptr_for_compare
9446 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9447 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9448 == FUNCTION_TYPE))
9449 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9450 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9451 == FUNCTION_TYPE))))
9452 return 0;
9453 #endif
9455 STRIP_NOPS (arg0);
9456 STRIP_NOPS (arg1);
9458 /* Get the rtx comparison code to use. We know that EXP is a comparison
9459 operation of some type. Some comparisons against 1 and -1 can be
9460 converted to comparisons with zero. Do so here so that the tests
9461 below will be aware that we have a comparison with zero. These
9462 tests will not catch constants in the first operand, but constants
9463 are rarely passed as the first operand. */
9465 switch (TREE_CODE (exp))
9467 case EQ_EXPR:
9468 code = EQ;
9469 break;
9470 case NE_EXPR:
9471 code = NE;
9472 break;
9473 case LT_EXPR:
9474 if (integer_onep (arg1))
9475 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9476 else
9477 code = unsignedp ? LTU : LT;
9478 break;
9479 case LE_EXPR:
9480 if (! unsignedp && integer_all_onesp (arg1))
9481 arg1 = integer_zero_node, code = LT;
9482 else
9483 code = unsignedp ? LEU : LE;
9484 break;
9485 case GT_EXPR:
9486 if (! unsignedp && integer_all_onesp (arg1))
9487 arg1 = integer_zero_node, code = GE;
9488 else
9489 code = unsignedp ? GTU : GT;
9490 break;
9491 case GE_EXPR:
9492 if (integer_onep (arg1))
9493 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9494 else
9495 code = unsignedp ? GEU : GE;
9496 break;
9498 case UNORDERED_EXPR:
9499 code = UNORDERED;
9500 break;
9501 case ORDERED_EXPR:
9502 code = ORDERED;
9503 break;
9504 case UNLT_EXPR:
9505 code = UNLT;
9506 break;
9507 case UNLE_EXPR:
9508 code = UNLE;
9509 break;
9510 case UNGT_EXPR:
9511 code = UNGT;
9512 break;
9513 case UNGE_EXPR:
9514 code = UNGE;
9515 break;
9516 case UNEQ_EXPR:
9517 code = UNEQ;
9518 break;
9520 default:
9521 abort ();
9524 /* Put a constant second. */
9525 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9527 tem = arg0; arg0 = arg1; arg1 = tem;
9528 code = swap_condition (code);
9531 /* If this is an equality or inequality test of a single bit, we can
9532 do this by shifting the bit being tested to the low-order bit and
9533 masking the result with the constant 1. If the condition was EQ,
9534 we xor it with 1. This does not require an scc insn and is faster
9535 than an scc insn even if we have it.
9537 The code to make this transformation was moved into fold_single_bit_test,
9538 so we just call into the folder and expand its result. */
9540 if ((code == NE || code == EQ)
9541 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9542 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9544 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
9545 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
9546 arg0, arg1, type),
9547 target, VOIDmode, EXPAND_NORMAL);
9550 /* Now see if we are likely to be able to do this. Return if not. */
9551 if (! can_compare_p (code, operand_mode, ccp_store_flag))
9552 return 0;
9554 icode = setcc_gen_code[(int) code];
9555 if (icode == CODE_FOR_nothing
9556 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
9558 /* We can only do this if it is one of the special cases that
9559 can be handled without an scc insn. */
9560 if ((code == LT && integer_zerop (arg1))
9561 || (! only_cheap && code == GE && integer_zerop (arg1)))
9563 else if (BRANCH_COST >= 0
9564 && ! only_cheap && (code == NE || code == EQ)
9565 && TREE_CODE (type) != REAL_TYPE
9566 && ((abs_optab->handlers[(int) operand_mode].insn_code
9567 != CODE_FOR_nothing)
9568 || (ffs_optab->handlers[(int) operand_mode].insn_code
9569 != CODE_FOR_nothing)))
9571 else
9572 return 0;
9575 if (! get_subtarget (target)
9576 || GET_MODE (subtarget) != operand_mode)
9577 subtarget = 0;
9579 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
9581 if (target == 0)
9582 target = gen_reg_rtx (mode);
9584 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
9585 because, if the emit_store_flag does anything it will succeed and
9586 OP0 and OP1 will not be used subsequently. */
9588 result = emit_store_flag (target, code,
9589 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
9590 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
9591 operand_mode, unsignedp, 1);
9593 if (result)
9595 if (invert)
9596 result = expand_binop (mode, xor_optab, result, const1_rtx,
9597 result, 0, OPTAB_LIB_WIDEN);
9598 return result;
9601 /* If this failed, we have to do this with set/compare/jump/set code. */
9602 if (GET_CODE (target) != REG
9603 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9604 target = gen_reg_rtx (GET_MODE (target));
9606 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9607 result = compare_from_rtx (op0, op1, code, unsignedp,
9608 operand_mode, NULL_RTX);
9609 if (GET_CODE (result) == CONST_INT)
9610 return (((result == const0_rtx && ! invert)
9611 || (result != const0_rtx && invert))
9612 ? const0_rtx : const1_rtx);
9614 /* The code of RESULT may not match CODE if compare_from_rtx
9615 decided to swap its operands and reverse the original code.
9617 We know that compare_from_rtx returns either a CONST_INT or
9618 a new comparison code, so it is safe to just extract the
9619 code from RESULT. */
9620 code = GET_CODE (result);
9622 label = gen_label_rtx ();
9623 if (bcc_gen_fctn[(int) code] == 0)
9624 abort ();
9626 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
9627 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9628 emit_label (label);
9630 return target;
9634 /* Stubs in case we haven't got a casesi insn. */
9635 #ifndef HAVE_casesi
9636 # define HAVE_casesi 0
9637 # define gen_casesi(a, b, c, d, e) (0)
9638 # define CODE_FOR_casesi CODE_FOR_nothing
9639 #endif
9641 /* If the machine does not have a case insn that compares the bounds,
9642 this means extra overhead for dispatch tables, which raises the
9643 threshold for using them. */
9644 #ifndef CASE_VALUES_THRESHOLD
9645 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9646 #endif /* CASE_VALUES_THRESHOLD */
9648 unsigned int
9649 case_values_threshold (void)
9651 return CASE_VALUES_THRESHOLD;
9654 /* Attempt to generate a casesi instruction. Returns 1 if successful,
9655 0 otherwise (i.e. if there is no casesi instruction). */
9657 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
9658 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
9660 enum machine_mode index_mode = SImode;
9661 int index_bits = GET_MODE_BITSIZE (index_mode);
9662 rtx op1, op2, index;
9663 enum machine_mode op_mode;
9665 if (! HAVE_casesi)
9666 return 0;
9668 /* Convert the index to SImode. */
9669 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
9671 enum machine_mode omode = TYPE_MODE (index_type);
9672 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
9674 /* We must handle the endpoints in the original mode. */
9675 index_expr = build (MINUS_EXPR, index_type,
9676 index_expr, minval);
9677 minval = integer_zero_node;
9678 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9679 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
9680 omode, 1, default_label);
9681 /* Now we can safely truncate. */
9682 index = convert_to_mode (index_mode, index, 0);
9684 else
9686 if (TYPE_MODE (index_type) != index_mode)
9688 index_expr = convert (lang_hooks.types.type_for_size
9689 (index_bits, 0), index_expr);
9690 index_type = TREE_TYPE (index_expr);
9693 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9695 emit_queue ();
9696 index = protect_from_queue (index, 0);
9697 do_pending_stack_adjust ();
9699 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
9700 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
9701 (index, op_mode))
9702 index = copy_to_mode_reg (op_mode, index);
9704 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
9706 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
9707 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
9708 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
9709 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
9710 (op1, op_mode))
9711 op1 = copy_to_mode_reg (op_mode, op1);
9713 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
9715 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
9716 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
9717 op2, TREE_UNSIGNED (TREE_TYPE (range)));
9718 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
9719 (op2, op_mode))
9720 op2 = copy_to_mode_reg (op_mode, op2);
9722 emit_jump_insn (gen_casesi (index, op1, op2,
9723 table_label, default_label));
9724 return 1;
9727 /* Attempt to generate a tablejump instruction; same concept. */
9728 #ifndef HAVE_tablejump
9729 #define HAVE_tablejump 0
9730 #define gen_tablejump(x, y) (0)
9731 #endif
9733 /* Subroutine of the next function.
9735 INDEX is the value being switched on, with the lowest value
9736 in the table already subtracted.
9737 MODE is its expected mode (needed if INDEX is constant).
9738 RANGE is the length of the jump table.
9739 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9741 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9742 index value is out of range. */
9744 static void
9745 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
9746 rtx default_label)
9748 rtx temp, vector;
9750 if (INTVAL (range) > cfun->max_jumptable_ents)
9751 cfun->max_jumptable_ents = INTVAL (range);
9753 /* Do an unsigned comparison (in the proper mode) between the index
9754 expression and the value which represents the length of the range.
9755 Since we just finished subtracting the lower bound of the range
9756 from the index expression, this comparison allows us to simultaneously
9757 check that the original index expression value is both greater than
9758 or equal to the minimum value of the range and less than or equal to
9759 the maximum value of the range. */
9761 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
9762 default_label);
9764 /* If index is in range, it must fit in Pmode.
9765 Convert to Pmode so we can index with it. */
9766 if (mode != Pmode)
9767 index = convert_to_mode (Pmode, index, 1);
9769 /* Don't let a MEM slip through, because then INDEX that comes
9770 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9771 and break_out_memory_refs will go to work on it and mess it up. */
9772 #ifdef PIC_CASE_VECTOR_ADDRESS
9773 if (flag_pic && GET_CODE (index) != REG)
9774 index = copy_to_mode_reg (Pmode, index);
9775 #endif
9777 /* If flag_force_addr were to affect this address
9778 it could interfere with the tricky assumptions made
9779 about addresses that contain label-refs,
9780 which may be valid only very near the tablejump itself. */
9781 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9782 GET_MODE_SIZE, because this indicates how large insns are. The other
9783 uses should all be Pmode, because they are addresses. This code
9784 could fail if addresses and insns are not the same size. */
9785 index = gen_rtx_PLUS (Pmode,
9786 gen_rtx_MULT (Pmode, index,
9787 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9788 gen_rtx_LABEL_REF (Pmode, table_label));
9789 #ifdef PIC_CASE_VECTOR_ADDRESS
9790 if (flag_pic)
9791 index = PIC_CASE_VECTOR_ADDRESS (index);
9792 else
9793 #endif
9794 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9795 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9796 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
9797 RTX_UNCHANGING_P (vector) = 1;
9798 MEM_NOTRAP_P (vector) = 1;
9799 convert_move (temp, vector, 0);
9801 emit_jump_insn (gen_tablejump (temp, table_label));
9803 /* If we are generating PIC code or if the table is PC-relative, the
9804 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9805 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9806 emit_barrier ();
9810 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
9811 rtx table_label, rtx default_label)
9813 rtx index;
9815 if (! HAVE_tablejump)
9816 return 0;
9818 index_expr = fold (build (MINUS_EXPR, index_type,
9819 convert (index_type, index_expr),
9820 convert (index_type, minval)));
9821 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9822 emit_queue ();
9823 index = protect_from_queue (index, 0);
9824 do_pending_stack_adjust ();
9826 do_tablejump (index, TYPE_MODE (index_type),
9827 convert_modes (TYPE_MODE (index_type),
9828 TYPE_MODE (TREE_TYPE (range)),
9829 expand_expr (range, NULL_RTX,
9830 VOIDmode, 0),
9831 TREE_UNSIGNED (TREE_TYPE (range))),
9832 table_label, default_label);
9833 return 1;
9836 /* Nonzero if the mode is a valid vector mode for this architecture.
9837 This returns nonzero even if there is no hardware support for the
9838 vector mode, but we can emulate with narrower modes. */
9841 vector_mode_valid_p (enum machine_mode mode)
9843 enum mode_class class = GET_MODE_CLASS (mode);
9844 enum machine_mode innermode;
9846 /* Doh! What's going on? */
9847 if (class != MODE_VECTOR_INT
9848 && class != MODE_VECTOR_FLOAT)
9849 return 0;
9851 /* Hardware support. Woo hoo! */
9852 if (VECTOR_MODE_SUPPORTED_P (mode))
9853 return 1;
9855 innermode = GET_MODE_INNER (mode);
9857 /* We should probably return 1 if requesting V4DI and we have no DI,
9858 but we have V2DI, but this is probably very unlikely. */
9860 /* If we have support for the inner mode, we can safely emulate it.
9861 We may not have V2DI, but me can emulate with a pair of DIs. */
9862 return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
9865 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
9866 static rtx
9867 const_vector_from_tree (tree exp)
9869 rtvec v;
9870 int units, i;
9871 tree link, elt;
9872 enum machine_mode inner, mode;
9874 mode = TYPE_MODE (TREE_TYPE (exp));
9876 if (is_zeros_p (exp))
9877 return CONST0_RTX (mode);
9879 units = GET_MODE_NUNITS (mode);
9880 inner = GET_MODE_INNER (mode);
9882 v = rtvec_alloc (units);
9884 link = TREE_VECTOR_CST_ELTS (exp);
9885 for (i = 0; link; link = TREE_CHAIN (link), ++i)
9887 elt = TREE_VALUE (link);
9889 if (TREE_CODE (elt) == REAL_CST)
9890 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
9891 inner);
9892 else
9893 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
9894 TREE_INT_CST_HIGH (elt),
9895 inner);
9898 /* Initialize remaining elements to 0. */
9899 for (; i < units; ++i)
9900 RTVEC_ELT (v, i) = CONST0_RTX (inner);
9902 return gen_rtx_raw_CONST_VECTOR (mode, v);
9905 #include "gt-expr.h"