rewrap comment
[official-gcc.git] / gcc / expr.c
blobd5267505c6b506557ae7fe384829a75e8300d900
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "flags.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "except.h"
34 #include "function.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38 #include "expr.h"
39 #include "optabs.h"
40 #include "libfuncs.h"
41 #include "recog.h"
42 #include "reload.h"
43 #include "output.h"
44 #include "typeclass.h"
45 #include "toplev.h"
46 #include "ggc.h"
47 #include "langhooks.h"
48 #include "intl.h"
49 #include "tm_p.h"
50 #include "target.h"
52 /* Decide whether a function's arguments should be processed
53 from first to last or from last to first.
55 They should if the stack and args grow in opposite directions, but
56 only if we have push insns. */
58 #ifdef PUSH_ROUNDING
60 #ifndef PUSH_ARGS_REVERSED
61 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
62 #define PUSH_ARGS_REVERSED /* If it's last to first. */
63 #endif
64 #endif
66 #endif
68 #ifndef STACK_PUSH_CODE
69 #ifdef STACK_GROWS_DOWNWARD
70 #define STACK_PUSH_CODE PRE_DEC
71 #else
72 #define STACK_PUSH_CODE PRE_INC
73 #endif
74 #endif
76 /* Convert defined/undefined to boolean. */
77 #ifdef TARGET_MEM_FUNCTIONS
78 #undef TARGET_MEM_FUNCTIONS
79 #define TARGET_MEM_FUNCTIONS 1
80 #else
81 #define TARGET_MEM_FUNCTIONS 0
82 #endif
85 /* If this is nonzero, we do not bother generating VOLATILE
86 around volatile memory references, and we are willing to
87 output indirect addresses. If cse is to follow, we reject
88 indirect addresses so a useful potential cse is generated;
89 if it is used only once, instruction combination will produce
90 the same indirect address eventually. */
91 int cse_not_expected;
93 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
94 tree placeholder_list = 0;
96 /* This structure is used by move_by_pieces to describe the move to
97 be performed. */
98 struct move_by_pieces
100 rtx to;
101 rtx to_addr;
102 int autinc_to;
103 int explicit_inc_to;
104 rtx from;
105 rtx from_addr;
106 int autinc_from;
107 int explicit_inc_from;
108 unsigned HOST_WIDE_INT len;
109 HOST_WIDE_INT offset;
110 int reverse;
113 /* This structure is used by store_by_pieces to describe the clear to
114 be performed. */
116 struct store_by_pieces
118 rtx to;
119 rtx to_addr;
120 int autinc_to;
121 int explicit_inc_to;
122 unsigned HOST_WIDE_INT len;
123 HOST_WIDE_INT offset;
124 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
125 void *constfundata;
126 int reverse;
129 static rtx enqueue_insn (rtx, rtx);
130 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
131 unsigned int);
132 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
133 struct move_by_pieces *);
134 static bool block_move_libcall_safe_for_call_parm (void);
135 static bool emit_block_move_via_movstr (rtx, rtx, rtx, unsigned);
136 static rtx emit_block_move_via_libcall (rtx, rtx, rtx);
137 static tree emit_block_move_libcall_fn (int);
138 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
139 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
140 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
141 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
142 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
143 struct store_by_pieces *);
144 static bool clear_storage_via_clrstr (rtx, rtx, unsigned);
145 static rtx clear_storage_via_libcall (rtx, rtx);
146 static tree clear_storage_libcall_fn (int);
147 static rtx compress_float_constant (rtx, rtx);
148 static rtx get_subtarget (rtx);
149 static int is_zeros_p (tree);
150 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
151 HOST_WIDE_INT, enum machine_mode,
152 tree, tree, int, int);
153 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
154 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
155 tree, enum machine_mode, int, tree, int);
156 static rtx var_rtx (tree);
158 static unsigned HOST_WIDE_INT highest_pow2_factor (tree);
159 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree);
161 static int is_aligning_offset (tree, tree);
162 static rtx expand_increment (tree, int, int);
163 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
164 enum expand_modifier);
165 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
166 #ifdef PUSH_ROUNDING
167 static void emit_single_push_insn (enum machine_mode, rtx, tree);
168 #endif
169 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
170 static rtx const_vector_from_tree (tree);
172 /* Record for each mode whether we can move a register directly to or
173 from an object of that mode in memory. If we can't, we won't try
174 to use that mode directly when accessing a field of that mode. */
176 static char direct_load[NUM_MACHINE_MODES];
177 static char direct_store[NUM_MACHINE_MODES];
179 /* Record for each mode whether we can float-extend from memory. */
181 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
183 /* This macro is used to determine whether move_by_pieces should be called
184 to perform a structure copy. */
185 #ifndef MOVE_BY_PIECES_P
186 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
187 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
188 #endif
190 /* This macro is used to determine whether clear_by_pieces should be
191 called to clear storage. */
192 #ifndef CLEAR_BY_PIECES_P
193 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
194 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
195 #endif
197 /* This macro is used to determine whether store_by_pieces should be
198 called to "memset" storage with byte values other than zero, or
199 to "memcpy" storage when the source is a constant string. */
200 #ifndef STORE_BY_PIECES_P
201 #define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
202 #endif
204 /* This array records the insn_code of insns to perform block moves. */
205 enum insn_code movstr_optab[NUM_MACHINE_MODES];
207 /* This array records the insn_code of insns to perform block clears. */
208 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
210 /* These arrays record the insn_code of two different kinds of insns
211 to perform block compares. */
212 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
213 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
215 /* Stack of EXPR_WITH_FILE_LOCATION nested expressions. */
216 struct file_stack *expr_wfl_stack;
218 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
220 #ifndef SLOW_UNALIGNED_ACCESS
221 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
222 #endif
224 /* This is run once per compilation to set up which modes can be used
225 directly in memory and to initialize the block move optab. */
227 void
228 init_expr_once (void)
230 rtx insn, pat;
231 enum machine_mode mode;
232 int num_clobbers;
233 rtx mem, mem1;
234 rtx reg;
236 /* Try indexing by frame ptr and try by stack ptr.
237 It is known that on the Convex the stack ptr isn't a valid index.
238 With luck, one or the other is valid on any machine. */
239 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
240 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
242 /* A scratch register we can modify in-place below to avoid
243 useless RTL allocations. */
244 reg = gen_rtx_REG (VOIDmode, -1);
246 insn = rtx_alloc (INSN);
247 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
248 PATTERN (insn) = pat;
250 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
251 mode = (enum machine_mode) ((int) mode + 1))
253 int regno;
255 direct_load[(int) mode] = direct_store[(int) mode] = 0;
256 PUT_MODE (mem, mode);
257 PUT_MODE (mem1, mode);
258 PUT_MODE (reg, mode);
260 /* See if there is some register that can be used in this mode and
261 directly loaded or stored from memory. */
263 if (mode != VOIDmode && mode != BLKmode)
264 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
265 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
266 regno++)
268 if (! HARD_REGNO_MODE_OK (regno, mode))
269 continue;
271 REGNO (reg) = regno;
273 SET_SRC (pat) = mem;
274 SET_DEST (pat) = reg;
275 if (recog (pat, insn, &num_clobbers) >= 0)
276 direct_load[(int) mode] = 1;
278 SET_SRC (pat) = mem1;
279 SET_DEST (pat) = reg;
280 if (recog (pat, insn, &num_clobbers) >= 0)
281 direct_load[(int) mode] = 1;
283 SET_SRC (pat) = reg;
284 SET_DEST (pat) = mem;
285 if (recog (pat, insn, &num_clobbers) >= 0)
286 direct_store[(int) mode] = 1;
288 SET_SRC (pat) = reg;
289 SET_DEST (pat) = mem1;
290 if (recog (pat, insn, &num_clobbers) >= 0)
291 direct_store[(int) mode] = 1;
295 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
297 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
298 mode = GET_MODE_WIDER_MODE (mode))
300 enum machine_mode srcmode;
301 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
302 srcmode = GET_MODE_WIDER_MODE (srcmode))
304 enum insn_code ic;
306 ic = can_extend_p (mode, srcmode, 0);
307 if (ic == CODE_FOR_nothing)
308 continue;
310 PUT_MODE (mem, srcmode);
312 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
313 float_extend_from_mem[mode][srcmode] = true;
318 /* This is run at the start of compiling a function. */
320 void
321 init_expr (void)
323 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
326 /* Small sanity check that the queue is empty at the end of a function. */
328 void
329 finish_expr_for_function (void)
331 if (pending_chain)
332 abort ();
335 /* Manage the queue of increment instructions to be output
336 for POSTINCREMENT_EXPR expressions, etc. */
338 /* Queue up to increment (or change) VAR later. BODY says how:
339 BODY should be the same thing you would pass to emit_insn
340 to increment right away. It will go to emit_insn later on.
342 The value is a QUEUED expression to be used in place of VAR
343 where you want to guarantee the pre-incrementation value of VAR. */
345 static rtx
346 enqueue_insn (rtx var, rtx body)
348 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
349 body, pending_chain);
350 return pending_chain;
353 /* Use protect_from_queue to convert a QUEUED expression
354 into something that you can put immediately into an instruction.
355 If the queued incrementation has not happened yet,
356 protect_from_queue returns the variable itself.
357 If the incrementation has happened, protect_from_queue returns a temp
358 that contains a copy of the old value of the variable.
360 Any time an rtx which might possibly be a QUEUED is to be put
361 into an instruction, it must be passed through protect_from_queue first.
362 QUEUED expressions are not meaningful in instructions.
364 Do not pass a value through protect_from_queue and then hold
365 on to it for a while before putting it in an instruction!
366 If the queue is flushed in between, incorrect code will result. */
369 protect_from_queue (rtx x, int modify)
371 RTX_CODE code = GET_CODE (x);
373 #if 0 /* A QUEUED can hang around after the queue is forced out. */
374 /* Shortcut for most common case. */
375 if (pending_chain == 0)
376 return x;
377 #endif
379 if (code != QUEUED)
381 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
382 use of autoincrement. Make a copy of the contents of the memory
383 location rather than a copy of the address, but not if the value is
384 of mode BLKmode. Don't modify X in place since it might be
385 shared. */
386 if (code == MEM && GET_MODE (x) != BLKmode
387 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
389 rtx y = XEXP (x, 0);
390 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
392 if (QUEUED_INSN (y))
394 rtx temp = gen_reg_rtx (GET_MODE (x));
396 emit_insn_before (gen_move_insn (temp, new),
397 QUEUED_INSN (y));
398 return temp;
401 /* Copy the address into a pseudo, so that the returned value
402 remains correct across calls to emit_queue. */
403 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
406 /* Otherwise, recursively protect the subexpressions of all
407 the kinds of rtx's that can contain a QUEUED. */
408 if (code == MEM)
410 rtx tem = protect_from_queue (XEXP (x, 0), 0);
411 if (tem != XEXP (x, 0))
413 x = copy_rtx (x);
414 XEXP (x, 0) = tem;
417 else if (code == PLUS || code == MULT)
419 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
420 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
421 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
423 x = copy_rtx (x);
424 XEXP (x, 0) = new0;
425 XEXP (x, 1) = new1;
428 return x;
430 /* If the increment has not happened, use the variable itself. Copy it
431 into a new pseudo so that the value remains correct across calls to
432 emit_queue. */
433 if (QUEUED_INSN (x) == 0)
434 return copy_to_reg (QUEUED_VAR (x));
435 /* If the increment has happened and a pre-increment copy exists,
436 use that copy. */
437 if (QUEUED_COPY (x) != 0)
438 return QUEUED_COPY (x);
439 /* The increment has happened but we haven't set up a pre-increment copy.
440 Set one up now, and use it. */
441 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
442 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
443 QUEUED_INSN (x));
444 return QUEUED_COPY (x);
447 /* Return nonzero if X contains a QUEUED expression:
448 if it contains anything that will be altered by a queued increment.
449 We handle only combinations of MEM, PLUS, MINUS and MULT operators
450 since memory addresses generally contain only those. */
453 queued_subexp_p (rtx x)
455 enum rtx_code code = GET_CODE (x);
456 switch (code)
458 case QUEUED:
459 return 1;
460 case MEM:
461 return queued_subexp_p (XEXP (x, 0));
462 case MULT:
463 case PLUS:
464 case MINUS:
465 return (queued_subexp_p (XEXP (x, 0))
466 || queued_subexp_p (XEXP (x, 1)));
467 default:
468 return 0;
472 /* Perform all the pending incrementations. */
474 void
475 emit_queue (void)
477 rtx p;
478 while ((p = pending_chain))
480 rtx body = QUEUED_BODY (p);
482 switch (GET_CODE (body))
484 case INSN:
485 case JUMP_INSN:
486 case CALL_INSN:
487 case CODE_LABEL:
488 case BARRIER:
489 case NOTE:
490 QUEUED_INSN (p) = body;
491 emit_insn (body);
492 break;
494 #ifdef ENABLE_CHECKING
495 case SEQUENCE:
496 abort ();
497 break;
498 #endif
500 default:
501 QUEUED_INSN (p) = emit_insn (body);
502 break;
505 pending_chain = QUEUED_NEXT (p);
509 /* Copy data from FROM to TO, where the machine modes are not the same.
510 Both modes may be integer, or both may be floating.
511 UNSIGNEDP should be nonzero if FROM is an unsigned type.
512 This causes zero-extension instead of sign-extension. */
514 void
515 convert_move (rtx to, rtx from, int unsignedp)
517 enum machine_mode to_mode = GET_MODE (to);
518 enum machine_mode from_mode = GET_MODE (from);
519 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
520 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
521 enum insn_code code;
522 rtx libcall;
524 /* rtx code for making an equivalent value. */
525 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
526 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
528 to = protect_from_queue (to, 1);
529 from = protect_from_queue (from, 0);
531 if (to_real != from_real)
532 abort ();
534 /* If FROM is a SUBREG that indicates that we have already done at least
535 the required extension, strip it. We don't handle such SUBREGs as
536 TO here. */
538 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
539 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
540 >= GET_MODE_SIZE (to_mode))
541 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
542 from = gen_lowpart (to_mode, from), from_mode = to_mode;
544 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
545 abort ();
547 if (to_mode == from_mode
548 || (from_mode == VOIDmode && CONSTANT_P (from)))
550 emit_move_insn (to, from);
551 return;
554 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
556 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
557 abort ();
559 if (VECTOR_MODE_P (to_mode))
560 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
561 else
562 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
564 emit_move_insn (to, from);
565 return;
568 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
570 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
571 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
572 return;
575 if (to_real)
577 rtx value, insns;
578 convert_optab tab;
580 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
581 tab = sext_optab;
582 else if (GET_MODE_PRECISION (from_mode) > GET_MODE_PRECISION (to_mode))
583 tab = trunc_optab;
584 else
585 abort ();
587 /* Try converting directly if the insn is supported. */
589 code = tab->handlers[to_mode][from_mode].insn_code;
590 if (code != CODE_FOR_nothing)
592 emit_unop_insn (code, to, from,
593 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
594 return;
597 /* Otherwise use a libcall. */
598 libcall = tab->handlers[to_mode][from_mode].libfunc;
600 if (!libcall)
601 /* This conversion is not implemented yet. */
602 abort ();
604 start_sequence ();
605 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
606 1, from, from_mode);
607 insns = get_insns ();
608 end_sequence ();
609 emit_libcall_block (insns, to, value,
610 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
611 from)
612 : gen_rtx_FLOAT_EXTEND (to_mode, from));
613 return;
616 /* Handle pointer conversion. */ /* SPEE 900220. */
617 /* Targets are expected to provide conversion insns between PxImode and
618 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
619 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
621 enum machine_mode full_mode
622 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
624 if (trunc_optab->handlers[to_mode][full_mode].insn_code
625 == CODE_FOR_nothing)
626 abort ();
628 if (full_mode != from_mode)
629 from = convert_to_mode (full_mode, from, unsignedp);
630 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
631 to, from, UNKNOWN);
632 return;
634 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
636 enum machine_mode full_mode
637 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
639 if (sext_optab->handlers[full_mode][from_mode].insn_code
640 == CODE_FOR_nothing)
641 abort ();
643 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
644 to, from, UNKNOWN);
645 if (to_mode == full_mode)
646 return;
648 /* else proceed to integer conversions below. */
649 from_mode = full_mode;
652 /* Now both modes are integers. */
654 /* Handle expanding beyond a word. */
655 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
656 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
658 rtx insns;
659 rtx lowpart;
660 rtx fill_value;
661 rtx lowfrom;
662 int i;
663 enum machine_mode lowpart_mode;
664 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
666 /* Try converting directly if the insn is supported. */
667 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
668 != CODE_FOR_nothing)
670 /* If FROM is a SUBREG, put it into a register. Do this
671 so that we always generate the same set of insns for
672 better cse'ing; if an intermediate assignment occurred,
673 we won't be doing the operation directly on the SUBREG. */
674 if (optimize > 0 && GET_CODE (from) == SUBREG)
675 from = force_reg (from_mode, from);
676 emit_unop_insn (code, to, from, equiv_code);
677 return;
679 /* Next, try converting via full word. */
680 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
681 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
682 != CODE_FOR_nothing))
684 if (GET_CODE (to) == REG)
686 if (reg_overlap_mentioned_p (to, from))
687 from = force_reg (from_mode, from);
688 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
690 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
691 emit_unop_insn (code, to,
692 gen_lowpart (word_mode, to), equiv_code);
693 return;
696 /* No special multiword conversion insn; do it by hand. */
697 start_sequence ();
699 /* Since we will turn this into a no conflict block, we must ensure
700 that the source does not overlap the target. */
702 if (reg_overlap_mentioned_p (to, from))
703 from = force_reg (from_mode, from);
705 /* Get a copy of FROM widened to a word, if necessary. */
706 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
707 lowpart_mode = word_mode;
708 else
709 lowpart_mode = from_mode;
711 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
713 lowpart = gen_lowpart (lowpart_mode, to);
714 emit_move_insn (lowpart, lowfrom);
716 /* Compute the value to put in each remaining word. */
717 if (unsignedp)
718 fill_value = const0_rtx;
719 else
721 #ifdef HAVE_slt
722 if (HAVE_slt
723 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
724 && STORE_FLAG_VALUE == -1)
726 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
727 lowpart_mode, 0);
728 fill_value = gen_reg_rtx (word_mode);
729 emit_insn (gen_slt (fill_value));
731 else
732 #endif
734 fill_value
735 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
736 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
737 NULL_RTX, 0);
738 fill_value = convert_to_mode (word_mode, fill_value, 1);
742 /* Fill the remaining words. */
743 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
745 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
746 rtx subword = operand_subword (to, index, 1, to_mode);
748 if (subword == 0)
749 abort ();
751 if (fill_value != subword)
752 emit_move_insn (subword, fill_value);
755 insns = get_insns ();
756 end_sequence ();
758 emit_no_conflict_block (insns, to, from, NULL_RTX,
759 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
760 return;
763 /* Truncating multi-word to a word or less. */
764 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
765 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
767 if (!((GET_CODE (from) == MEM
768 && ! MEM_VOLATILE_P (from)
769 && direct_load[(int) to_mode]
770 && ! mode_dependent_address_p (XEXP (from, 0)))
771 || GET_CODE (from) == REG
772 || GET_CODE (from) == SUBREG))
773 from = force_reg (from_mode, from);
774 convert_move (to, gen_lowpart (word_mode, from), 0);
775 return;
778 /* Now follow all the conversions between integers
779 no more than a word long. */
781 /* For truncation, usually we can just refer to FROM in a narrower mode. */
782 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
783 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
784 GET_MODE_BITSIZE (from_mode)))
786 if (!((GET_CODE (from) == MEM
787 && ! MEM_VOLATILE_P (from)
788 && direct_load[(int) to_mode]
789 && ! mode_dependent_address_p (XEXP (from, 0)))
790 || GET_CODE (from) == REG
791 || GET_CODE (from) == SUBREG))
792 from = force_reg (from_mode, from);
793 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
794 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
795 from = copy_to_reg (from);
796 emit_move_insn (to, gen_lowpart (to_mode, from));
797 return;
800 /* Handle extension. */
801 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
803 /* Convert directly if that works. */
804 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
805 != CODE_FOR_nothing)
807 if (flag_force_mem)
808 from = force_not_mem (from);
810 emit_unop_insn (code, to, from, equiv_code);
811 return;
813 else
815 enum machine_mode intermediate;
816 rtx tmp;
817 tree shift_amount;
819 /* Search for a mode to convert via. */
820 for (intermediate = from_mode; intermediate != VOIDmode;
821 intermediate = GET_MODE_WIDER_MODE (intermediate))
822 if (((can_extend_p (to_mode, intermediate, unsignedp)
823 != CODE_FOR_nothing)
824 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
825 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
826 GET_MODE_BITSIZE (intermediate))))
827 && (can_extend_p (intermediate, from_mode, unsignedp)
828 != CODE_FOR_nothing))
830 convert_move (to, convert_to_mode (intermediate, from,
831 unsignedp), unsignedp);
832 return;
835 /* No suitable intermediate mode.
836 Generate what we need with shifts. */
837 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
838 - GET_MODE_BITSIZE (from_mode), 0);
839 from = gen_lowpart (to_mode, force_reg (from_mode, from));
840 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
841 to, unsignedp);
842 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
843 to, unsignedp);
844 if (tmp != to)
845 emit_move_insn (to, tmp);
846 return;
850 /* Support special truncate insns for certain modes. */
851 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
853 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
854 to, from, UNKNOWN);
855 return;
858 /* Handle truncation of volatile memrefs, and so on;
859 the things that couldn't be truncated directly,
860 and for which there was no special instruction.
862 ??? Code above formerly short-circuited this, for most integer
863 mode pairs, with a force_reg in from_mode followed by a recursive
864 call to this routine. Appears always to have been wrong. */
865 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
867 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
868 emit_move_insn (to, temp);
869 return;
872 /* Mode combination is not recognized. */
873 abort ();
876 /* Return an rtx for a value that would result
877 from converting X to mode MODE.
878 Both X and MODE may be floating, or both integer.
879 UNSIGNEDP is nonzero if X is an unsigned value.
880 This can be done by referring to a part of X in place
881 or by copying to a new temporary with conversion.
883 This function *must not* call protect_from_queue
884 except when putting X into an insn (in which case convert_move does it). */
887 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
889 return convert_modes (mode, VOIDmode, x, unsignedp);
892 /* Return an rtx for a value that would result
893 from converting X from mode OLDMODE to mode MODE.
894 Both modes may be floating, or both integer.
895 UNSIGNEDP is nonzero if X is an unsigned value.
897 This can be done by referring to a part of X in place
898 or by copying to a new temporary with conversion.
900 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
902 This function *must not* call protect_from_queue
903 except when putting X into an insn (in which case convert_move does it). */
906 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
908 rtx temp;
910 /* If FROM is a SUBREG that indicates that we have already done at least
911 the required extension, strip it. */
913 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
914 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
915 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
916 x = gen_lowpart (mode, x);
918 if (GET_MODE (x) != VOIDmode)
919 oldmode = GET_MODE (x);
921 if (mode == oldmode)
922 return x;
924 /* There is one case that we must handle specially: If we are converting
925 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
926 we are to interpret the constant as unsigned, gen_lowpart will do
927 the wrong if the constant appears negative. What we want to do is
928 make the high-order word of the constant zero, not all ones. */
930 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
931 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
932 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
934 HOST_WIDE_INT val = INTVAL (x);
936 if (oldmode != VOIDmode
937 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
939 int width = GET_MODE_BITSIZE (oldmode);
941 /* We need to zero extend VAL. */
942 val &= ((HOST_WIDE_INT) 1 << width) - 1;
945 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
948 /* We can do this with a gen_lowpart if both desired and current modes
949 are integer, and this is either a constant integer, a register, or a
950 non-volatile MEM. Except for the constant case where MODE is no
951 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
953 if ((GET_CODE (x) == CONST_INT
954 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
955 || (GET_MODE_CLASS (mode) == MODE_INT
956 && GET_MODE_CLASS (oldmode) == MODE_INT
957 && (GET_CODE (x) == CONST_DOUBLE
958 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
959 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
960 && direct_load[(int) mode])
961 || (GET_CODE (x) == REG
962 && (! HARD_REGISTER_P (x)
963 || HARD_REGNO_MODE_OK (REGNO (x), mode))
964 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
965 GET_MODE_BITSIZE (GET_MODE (x)))))))))
967 /* ?? If we don't know OLDMODE, we have to assume here that
968 X does not need sign- or zero-extension. This may not be
969 the case, but it's the best we can do. */
970 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
971 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
973 HOST_WIDE_INT val = INTVAL (x);
974 int width = GET_MODE_BITSIZE (oldmode);
976 /* We must sign or zero-extend in this case. Start by
977 zero-extending, then sign extend if we need to. */
978 val &= ((HOST_WIDE_INT) 1 << width) - 1;
979 if (! unsignedp
980 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
981 val |= (HOST_WIDE_INT) (-1) << width;
983 return gen_int_mode (val, mode);
986 return gen_lowpart (mode, x);
989 /* Converting from integer constant into mode is always equivalent to an
990 subreg operation. */
991 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
993 if (GET_MODE_BITSIZE (mode) != GET_MODE_BITSIZE (oldmode))
994 abort ();
995 return simplify_gen_subreg (mode, x, oldmode, 0);
998 temp = gen_reg_rtx (mode);
999 convert_move (temp, x, unsignedp);
1000 return temp;
1003 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1004 store efficiently. Due to internal GCC limitations, this is
1005 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1006 for an immediate constant. */
1008 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1010 /* Determine whether the LEN bytes can be moved by using several move
1011 instructions. Return nonzero if a call to move_by_pieces should
1012 succeed. */
1015 can_move_by_pieces (unsigned HOST_WIDE_INT len,
1016 unsigned int align ATTRIBUTE_UNUSED)
1018 return MOVE_BY_PIECES_P (len, align);
1021 /* Generate several move instructions to copy LEN bytes from block FROM to
1022 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1023 and TO through protect_from_queue before calling.
1025 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1026 used to push FROM to the stack.
1028 ALIGN is maximum stack alignment we can assume.
1030 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
1031 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
1032 stpcpy. */
1035 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
1036 unsigned int align, int endp)
1038 struct move_by_pieces data;
1039 rtx to_addr, from_addr = XEXP (from, 0);
1040 unsigned int max_size = MOVE_MAX_PIECES + 1;
1041 enum machine_mode mode = VOIDmode, tmode;
1042 enum insn_code icode;
1044 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
1046 data.offset = 0;
1047 data.from_addr = from_addr;
1048 if (to)
1050 to_addr = XEXP (to, 0);
1051 data.to = to;
1052 data.autinc_to
1053 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1054 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1055 data.reverse
1056 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1058 else
1060 to_addr = NULL_RTX;
1061 data.to = NULL_RTX;
1062 data.autinc_to = 1;
1063 #ifdef STACK_GROWS_DOWNWARD
1064 data.reverse = 1;
1065 #else
1066 data.reverse = 0;
1067 #endif
1069 data.to_addr = to_addr;
1070 data.from = from;
1071 data.autinc_from
1072 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1073 || GET_CODE (from_addr) == POST_INC
1074 || GET_CODE (from_addr) == POST_DEC);
1076 data.explicit_inc_from = 0;
1077 data.explicit_inc_to = 0;
1078 if (data.reverse) data.offset = len;
1079 data.len = len;
1081 /* If copying requires more than two move insns,
1082 copy addresses to registers (to make displacements shorter)
1083 and use post-increment if available. */
1084 if (!(data.autinc_from && data.autinc_to)
1085 && move_by_pieces_ninsns (len, align) > 2)
1087 /* Find the mode of the largest move... */
1088 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1089 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1090 if (GET_MODE_SIZE (tmode) < max_size)
1091 mode = tmode;
1093 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1095 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1096 data.autinc_from = 1;
1097 data.explicit_inc_from = -1;
1099 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1101 data.from_addr = copy_addr_to_reg (from_addr);
1102 data.autinc_from = 1;
1103 data.explicit_inc_from = 1;
1105 if (!data.autinc_from && CONSTANT_P (from_addr))
1106 data.from_addr = copy_addr_to_reg (from_addr);
1107 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1109 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1110 data.autinc_to = 1;
1111 data.explicit_inc_to = -1;
1113 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1115 data.to_addr = copy_addr_to_reg (to_addr);
1116 data.autinc_to = 1;
1117 data.explicit_inc_to = 1;
1119 if (!data.autinc_to && CONSTANT_P (to_addr))
1120 data.to_addr = copy_addr_to_reg (to_addr);
1123 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1124 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1125 align = MOVE_MAX * BITS_PER_UNIT;
1127 /* First move what we can in the largest integer mode, then go to
1128 successively smaller modes. */
1130 while (max_size > 1)
1132 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1133 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1134 if (GET_MODE_SIZE (tmode) < max_size)
1135 mode = tmode;
1137 if (mode == VOIDmode)
1138 break;
1140 icode = mov_optab->handlers[(int) mode].insn_code;
1141 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1142 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1144 max_size = GET_MODE_SIZE (mode);
1147 /* The code above should have handled everything. */
1148 if (data.len > 0)
1149 abort ();
1151 if (endp)
1153 rtx to1;
1155 if (data.reverse)
1156 abort ();
1157 if (data.autinc_to)
1159 if (endp == 2)
1161 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1162 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1163 else
1164 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1165 -1));
1167 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1168 data.offset);
1170 else
1172 if (endp == 2)
1173 --data.offset;
1174 to1 = adjust_address (data.to, QImode, data.offset);
1176 return to1;
1178 else
1179 return data.to;
1182 /* Return number of insns required to move L bytes by pieces.
1183 ALIGN (in bits) is maximum alignment we can assume. */
1185 static unsigned HOST_WIDE_INT
1186 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align)
1188 unsigned HOST_WIDE_INT n_insns = 0;
1189 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1191 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1192 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1193 align = MOVE_MAX * BITS_PER_UNIT;
1195 while (max_size > 1)
1197 enum machine_mode mode = VOIDmode, tmode;
1198 enum insn_code icode;
1200 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1201 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1202 if (GET_MODE_SIZE (tmode) < max_size)
1203 mode = tmode;
1205 if (mode == VOIDmode)
1206 break;
1208 icode = mov_optab->handlers[(int) mode].insn_code;
1209 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1210 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1212 max_size = GET_MODE_SIZE (mode);
1215 if (l)
1216 abort ();
1217 return n_insns;
1220 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1221 with move instructions for mode MODE. GENFUN is the gen_... function
1222 to make a move insn for that mode. DATA has all the other info. */
1224 static void
1225 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1226 struct move_by_pieces *data)
1228 unsigned int size = GET_MODE_SIZE (mode);
1229 rtx to1 = NULL_RTX, from1;
1231 while (data->len >= size)
1233 if (data->reverse)
1234 data->offset -= size;
1236 if (data->to)
1238 if (data->autinc_to)
1239 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1240 data->offset);
1241 else
1242 to1 = adjust_address (data->to, mode, data->offset);
1245 if (data->autinc_from)
1246 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1247 data->offset);
1248 else
1249 from1 = adjust_address (data->from, mode, data->offset);
1251 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1252 emit_insn (gen_add2_insn (data->to_addr,
1253 GEN_INT (-(HOST_WIDE_INT)size)));
1254 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1255 emit_insn (gen_add2_insn (data->from_addr,
1256 GEN_INT (-(HOST_WIDE_INT)size)));
1258 if (data->to)
1259 emit_insn ((*genfun) (to1, from1));
1260 else
1262 #ifdef PUSH_ROUNDING
1263 emit_single_push_insn (mode, from1, NULL);
1264 #else
1265 abort ();
1266 #endif
1269 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1270 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1271 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1272 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1274 if (! data->reverse)
1275 data->offset += size;
1277 data->len -= size;
1281 /* Emit code to move a block Y to a block X. This may be done with
1282 string-move instructions, with multiple scalar move instructions,
1283 or with a library call.
1285 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1286 SIZE is an rtx that says how long they are.
1287 ALIGN is the maximum alignment we can assume they have.
1288 METHOD describes what kind of copy this is, and what mechanisms may be used.
1290 Return the address of the new block, if memcpy is called and returns it,
1291 0 otherwise. */
1294 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1296 bool may_use_call;
1297 rtx retval = 0;
1298 unsigned int align;
1300 switch (method)
1302 case BLOCK_OP_NORMAL:
1303 may_use_call = true;
1304 break;
1306 case BLOCK_OP_CALL_PARM:
1307 may_use_call = block_move_libcall_safe_for_call_parm ();
1309 /* Make inhibit_defer_pop nonzero around the library call
1310 to force it to pop the arguments right away. */
1311 NO_DEFER_POP;
1312 break;
1314 case BLOCK_OP_NO_LIBCALL:
1315 may_use_call = false;
1316 break;
1318 default:
1319 abort ();
1322 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1324 if (GET_MODE (x) != BLKmode)
1325 abort ();
1326 if (GET_MODE (y) != BLKmode)
1327 abort ();
1329 x = protect_from_queue (x, 1);
1330 y = protect_from_queue (y, 0);
1331 size = protect_from_queue (size, 0);
1333 if (GET_CODE (x) != MEM)
1334 abort ();
1335 if (GET_CODE (y) != MEM)
1336 abort ();
1337 if (size == 0)
1338 abort ();
1340 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1341 can be incorrect is coming from __builtin_memcpy. */
1342 if (GET_CODE (size) == CONST_INT)
1344 if (INTVAL (size) == 0)
1345 return 0;
1347 x = shallow_copy_rtx (x);
1348 y = shallow_copy_rtx (y);
1349 set_mem_size (x, size);
1350 set_mem_size (y, size);
1353 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1354 move_by_pieces (x, y, INTVAL (size), align, 0);
1355 else if (emit_block_move_via_movstr (x, y, size, align))
1357 else if (may_use_call)
1358 retval = emit_block_move_via_libcall (x, y, size);
1359 else
1360 emit_block_move_via_loop (x, y, size, align);
1362 if (method == BLOCK_OP_CALL_PARM)
1363 OK_DEFER_POP;
1365 return retval;
1368 /* A subroutine of emit_block_move. Returns true if calling the
1369 block move libcall will not clobber any parameters which may have
1370 already been placed on the stack. */
1372 static bool
1373 block_move_libcall_safe_for_call_parm (void)
1375 /* If arguments are pushed on the stack, then they're safe. */
1376 if (PUSH_ARGS)
1377 return true;
1379 /* If registers go on the stack anyway, any argument is sure to clobber
1380 an outgoing argument. */
1381 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1383 tree fn = emit_block_move_libcall_fn (false);
1384 (void) fn;
1385 if (REG_PARM_STACK_SPACE (fn) != 0)
1386 return false;
1388 #endif
1390 /* If any argument goes in memory, then it might clobber an outgoing
1391 argument. */
1393 CUMULATIVE_ARGS args_so_far;
1394 tree fn, arg;
1396 fn = emit_block_move_libcall_fn (false);
1397 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1399 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1400 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1402 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1403 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1404 if (!tmp || !REG_P (tmp))
1405 return false;
1406 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1407 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1408 NULL_TREE, 1))
1409 return false;
1410 #endif
1411 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1414 return true;
1417 /* A subroutine of emit_block_move. Expand a movstr pattern;
1418 return true if successful. */
1420 static bool
1421 emit_block_move_via_movstr (rtx x, rtx y, rtx size, unsigned int align)
1423 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1424 int save_volatile_ok = volatile_ok;
1425 enum machine_mode mode;
1427 /* Since this is a move insn, we don't care about volatility. */
1428 volatile_ok = 1;
1430 /* Try the most limited insn first, because there's no point
1431 including more than one in the machine description unless
1432 the more limited one has some advantage. */
1434 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1435 mode = GET_MODE_WIDER_MODE (mode))
1437 enum insn_code code = movstr_optab[(int) mode];
1438 insn_operand_predicate_fn pred;
1440 if (code != CODE_FOR_nothing
1441 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1442 here because if SIZE is less than the mode mask, as it is
1443 returned by the macro, it will definitely be less than the
1444 actual mode mask. */
1445 && ((GET_CODE (size) == CONST_INT
1446 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1447 <= (GET_MODE_MASK (mode) >> 1)))
1448 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1449 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1450 || (*pred) (x, BLKmode))
1451 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1452 || (*pred) (y, BLKmode))
1453 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1454 || (*pred) (opalign, VOIDmode)))
1456 rtx op2;
1457 rtx last = get_last_insn ();
1458 rtx pat;
1460 op2 = convert_to_mode (mode, size, 1);
1461 pred = insn_data[(int) code].operand[2].predicate;
1462 if (pred != 0 && ! (*pred) (op2, mode))
1463 op2 = copy_to_mode_reg (mode, op2);
1465 /* ??? When called via emit_block_move_for_call, it'd be
1466 nice if there were some way to inform the backend, so
1467 that it doesn't fail the expansion because it thinks
1468 emitting the libcall would be more efficient. */
1470 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1471 if (pat)
1473 emit_insn (pat);
1474 volatile_ok = save_volatile_ok;
1475 return true;
1477 else
1478 delete_insns_since (last);
1482 volatile_ok = save_volatile_ok;
1483 return false;
1486 /* A subroutine of emit_block_move. Expand a call to memcpy or bcopy.
1487 Return the return value from memcpy, 0 otherwise. */
1489 static rtx
1490 emit_block_move_via_libcall (rtx dst, rtx src, rtx size)
1492 rtx dst_addr, src_addr;
1493 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1494 enum machine_mode size_mode;
1495 rtx retval;
1497 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
1499 It is unsafe to save the value generated by protect_from_queue and reuse
1500 it later. Consider what happens if emit_queue is called before the
1501 return value from protect_from_queue is used.
1503 Expansion of the CALL_EXPR below will call emit_queue before we are
1504 finished emitting RTL for argument setup. So if we are not careful we
1505 could get the wrong value for an argument.
1507 To avoid this problem we go ahead and emit code to copy the addresses of
1508 DST and SRC and SIZE into new pseudos. We can then place those new
1509 pseudos into an RTL_EXPR and use them later, even after a call to
1510 emit_queue.
1512 Note this is not strictly needed for library calls since they do not call
1513 emit_queue before loading their arguments. However, we may need to have
1514 library calls call emit_queue in the future since failing to do so could
1515 cause problems for targets which define SMALL_REGISTER_CLASSES and pass
1516 arguments in registers. */
1518 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1519 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1521 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1522 src_addr = convert_memory_address (ptr_mode, src_addr);
1524 dst_tree = make_tree (ptr_type_node, dst_addr);
1525 src_tree = make_tree (ptr_type_node, src_addr);
1527 if (TARGET_MEM_FUNCTIONS)
1528 size_mode = TYPE_MODE (sizetype);
1529 else
1530 size_mode = TYPE_MODE (unsigned_type_node);
1532 size = convert_to_mode (size_mode, size, 1);
1533 size = copy_to_mode_reg (size_mode, size);
1535 /* It is incorrect to use the libcall calling conventions to call
1536 memcpy in this context. This could be a user call to memcpy and
1537 the user may wish to examine the return value from memcpy. For
1538 targets where libcalls and normal calls have different conventions
1539 for returning pointers, we could end up generating incorrect code.
1541 For convenience, we generate the call to bcopy this way as well. */
1543 if (TARGET_MEM_FUNCTIONS)
1544 size_tree = make_tree (sizetype, size);
1545 else
1546 size_tree = make_tree (unsigned_type_node, size);
1548 fn = emit_block_move_libcall_fn (true);
1549 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1550 if (TARGET_MEM_FUNCTIONS)
1552 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1553 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1555 else
1557 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1558 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1561 /* Now we have to build up the CALL_EXPR itself. */
1562 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1563 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1564 call_expr, arg_list, NULL_TREE);
1566 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1568 /* If we are initializing a readonly value, show the above call clobbered
1569 it. Otherwise, a load from it may erroneously be hoisted from a loop, or
1570 the delay slot scheduler might overlook conflicts and take nasty
1571 decisions. */
1572 if (RTX_UNCHANGING_P (dst))
1573 add_function_usage_to
1574 (last_call_insn (), gen_rtx_EXPR_LIST (VOIDmode,
1575 gen_rtx_CLOBBER (VOIDmode, dst),
1576 NULL_RTX));
1578 return TARGET_MEM_FUNCTIONS ? retval : NULL_RTX;
1581 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1582 for the function we use for block copies. The first time FOR_CALL
1583 is true, we call assemble_external. */
1585 static GTY(()) tree block_move_fn;
1587 void
1588 init_block_move_fn (const char *asmspec)
1590 if (!block_move_fn)
1592 tree args, fn;
1594 if (TARGET_MEM_FUNCTIONS)
1596 fn = get_identifier ("memcpy");
1597 args = build_function_type_list (ptr_type_node, ptr_type_node,
1598 const_ptr_type_node, sizetype,
1599 NULL_TREE);
1601 else
1603 fn = get_identifier ("bcopy");
1604 args = build_function_type_list (void_type_node, const_ptr_type_node,
1605 ptr_type_node, unsigned_type_node,
1606 NULL_TREE);
1609 fn = build_decl (FUNCTION_DECL, fn, args);
1610 DECL_EXTERNAL (fn) = 1;
1611 TREE_PUBLIC (fn) = 1;
1612 DECL_ARTIFICIAL (fn) = 1;
1613 TREE_NOTHROW (fn) = 1;
1615 block_move_fn = fn;
1618 if (asmspec)
1620 SET_DECL_RTL (block_move_fn, NULL_RTX);
1621 SET_DECL_ASSEMBLER_NAME (block_move_fn, get_identifier (asmspec));
1625 static tree
1626 emit_block_move_libcall_fn (int for_call)
1628 static bool emitted_extern;
1630 if (!block_move_fn)
1631 init_block_move_fn (NULL);
1633 if (for_call && !emitted_extern)
1635 emitted_extern = true;
1636 make_decl_rtl (block_move_fn, NULL);
1637 assemble_external (block_move_fn);
1640 return block_move_fn;
1643 /* A subroutine of emit_block_move. Copy the data via an explicit
1644 loop. This is used only when libcalls are forbidden. */
1645 /* ??? It'd be nice to copy in hunks larger than QImode. */
1647 static void
1648 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1649 unsigned int align ATTRIBUTE_UNUSED)
1651 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1652 enum machine_mode iter_mode;
1654 iter_mode = GET_MODE (size);
1655 if (iter_mode == VOIDmode)
1656 iter_mode = word_mode;
1658 top_label = gen_label_rtx ();
1659 cmp_label = gen_label_rtx ();
1660 iter = gen_reg_rtx (iter_mode);
1662 emit_move_insn (iter, const0_rtx);
1664 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1665 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1666 do_pending_stack_adjust ();
1668 emit_note (NOTE_INSN_LOOP_BEG);
1670 emit_jump (cmp_label);
1671 emit_label (top_label);
1673 tmp = convert_modes (Pmode, iter_mode, iter, true);
1674 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1675 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1676 x = change_address (x, QImode, x_addr);
1677 y = change_address (y, QImode, y_addr);
1679 emit_move_insn (x, y);
1681 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1682 true, OPTAB_LIB_WIDEN);
1683 if (tmp != iter)
1684 emit_move_insn (iter, tmp);
1686 emit_note (NOTE_INSN_LOOP_CONT);
1687 emit_label (cmp_label);
1689 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1690 true, top_label);
1692 emit_note (NOTE_INSN_LOOP_END);
1695 /* Copy all or part of a value X into registers starting at REGNO.
1696 The number of registers to be filled is NREGS. */
1698 void
1699 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1701 int i;
1702 #ifdef HAVE_load_multiple
1703 rtx pat;
1704 rtx last;
1705 #endif
1707 if (nregs == 0)
1708 return;
1710 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1711 x = validize_mem (force_const_mem (mode, x));
1713 /* See if the machine can do this with a load multiple insn. */
1714 #ifdef HAVE_load_multiple
1715 if (HAVE_load_multiple)
1717 last = get_last_insn ();
1718 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1719 GEN_INT (nregs));
1720 if (pat)
1722 emit_insn (pat);
1723 return;
1725 else
1726 delete_insns_since (last);
1728 #endif
1730 for (i = 0; i < nregs; i++)
1731 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1732 operand_subword_force (x, i, mode));
1735 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1736 The number of registers to be filled is NREGS. */
1738 void
1739 move_block_from_reg (int regno, rtx x, int nregs)
1741 int i;
1743 if (nregs == 0)
1744 return;
1746 /* See if the machine can do this with a store multiple insn. */
1747 #ifdef HAVE_store_multiple
1748 if (HAVE_store_multiple)
1750 rtx last = get_last_insn ();
1751 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1752 GEN_INT (nregs));
1753 if (pat)
1755 emit_insn (pat);
1756 return;
1758 else
1759 delete_insns_since (last);
1761 #endif
1763 for (i = 0; i < nregs; i++)
1765 rtx tem = operand_subword (x, i, 1, BLKmode);
1767 if (tem == 0)
1768 abort ();
1770 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1774 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1775 ORIG, where ORIG is a non-consecutive group of registers represented by
1776 a PARALLEL. The clone is identical to the original except in that the
1777 original set of registers is replaced by a new set of pseudo registers.
1778 The new set has the same modes as the original set. */
1781 gen_group_rtx (rtx orig)
1783 int i, length;
1784 rtx *tmps;
1786 if (GET_CODE (orig) != PARALLEL)
1787 abort ();
1789 length = XVECLEN (orig, 0);
1790 tmps = alloca (sizeof (rtx) * length);
1792 /* Skip a NULL entry in first slot. */
1793 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1795 if (i)
1796 tmps[0] = 0;
1798 for (; i < length; i++)
1800 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1801 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1803 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1806 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1809 /* Emit code to move a block ORIG_SRC of type TYPE to a block DST,
1810 where DST is non-consecutive registers represented by a PARALLEL.
1811 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1812 if not known. */
1814 void
1815 emit_group_load (rtx dst, rtx orig_src, tree type ATTRIBUTE_UNUSED, int ssize)
1817 rtx *tmps, src;
1818 int start, i;
1820 if (GET_CODE (dst) != PARALLEL)
1821 abort ();
1823 /* Check for a NULL entry, used to indicate that the parameter goes
1824 both on the stack and in registers. */
1825 if (XEXP (XVECEXP (dst, 0, 0), 0))
1826 start = 0;
1827 else
1828 start = 1;
1830 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1832 /* Process the pieces. */
1833 for (i = start; i < XVECLEN (dst, 0); i++)
1835 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1836 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1837 unsigned int bytelen = GET_MODE_SIZE (mode);
1838 int shift = 0;
1840 /* Handle trailing fragments that run over the size of the struct. */
1841 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1843 /* Arrange to shift the fragment to where it belongs.
1844 extract_bit_field loads to the lsb of the reg. */
1845 if (
1846 #ifdef BLOCK_REG_PADDING
1847 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1848 == (BYTES_BIG_ENDIAN ? upward : downward)
1849 #else
1850 BYTES_BIG_ENDIAN
1851 #endif
1853 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1854 bytelen = ssize - bytepos;
1855 if (bytelen <= 0)
1856 abort ();
1859 /* If we won't be loading directly from memory, protect the real source
1860 from strange tricks we might play; but make sure that the source can
1861 be loaded directly into the destination. */
1862 src = orig_src;
1863 if (GET_CODE (orig_src) != MEM
1864 && (!CONSTANT_P (orig_src)
1865 || (GET_MODE (orig_src) != mode
1866 && GET_MODE (orig_src) != VOIDmode)))
1868 if (GET_MODE (orig_src) == VOIDmode)
1869 src = gen_reg_rtx (mode);
1870 else
1871 src = gen_reg_rtx (GET_MODE (orig_src));
1873 emit_move_insn (src, orig_src);
1876 /* Optimize the access just a bit. */
1877 if (GET_CODE (src) == MEM
1878 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1879 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1880 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1881 && bytelen == GET_MODE_SIZE (mode))
1883 tmps[i] = gen_reg_rtx (mode);
1884 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1886 else if (GET_CODE (src) == CONCAT)
1888 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1889 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1891 if ((bytepos == 0 && bytelen == slen0)
1892 || (bytepos != 0 && bytepos + bytelen <= slen))
1894 /* The following assumes that the concatenated objects all
1895 have the same size. In this case, a simple calculation
1896 can be used to determine the object and the bit field
1897 to be extracted. */
1898 tmps[i] = XEXP (src, bytepos / slen0);
1899 if (! CONSTANT_P (tmps[i])
1900 && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
1901 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1902 (bytepos % slen0) * BITS_PER_UNIT,
1903 1, NULL_RTX, mode, mode, ssize);
1905 else if (bytepos == 0)
1907 rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
1908 emit_move_insn (mem, src);
1909 tmps[i] = adjust_address (mem, mode, 0);
1911 else
1912 abort ();
1914 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1915 SIMD register, which is currently broken. While we get GCC
1916 to emit proper RTL for these cases, let's dump to memory. */
1917 else if (VECTOR_MODE_P (GET_MODE (dst))
1918 && GET_CODE (src) == REG)
1920 int slen = GET_MODE_SIZE (GET_MODE (src));
1921 rtx mem;
1923 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1924 emit_move_insn (mem, src);
1925 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1927 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1928 && XVECLEN (dst, 0) > 1)
1929 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1930 else if (CONSTANT_P (src)
1931 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
1932 tmps[i] = src;
1933 else
1934 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1935 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1936 mode, mode, ssize);
1938 if (shift)
1939 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
1940 tmps[i], 0, OPTAB_WIDEN);
1943 emit_queue ();
1945 /* Copy the extracted pieces into the proper (probable) hard regs. */
1946 for (i = start; i < XVECLEN (dst, 0); i++)
1947 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
1950 /* Emit code to move a block SRC to block DST, where SRC and DST are
1951 non-consecutive groups of registers, each represented by a PARALLEL. */
1953 void
1954 emit_group_move (rtx dst, rtx src)
1956 int i;
1958 if (GET_CODE (src) != PARALLEL
1959 || GET_CODE (dst) != PARALLEL
1960 || XVECLEN (src, 0) != XVECLEN (dst, 0))
1961 abort ();
1963 /* Skip first entry if NULL. */
1964 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1965 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1966 XEXP (XVECEXP (src, 0, i), 0));
1969 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1970 where SRC is non-consecutive registers represented by a PARALLEL.
1971 SSIZE represents the total size of block ORIG_DST, or -1 if not
1972 known. */
1974 void
1975 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1977 rtx *tmps, dst;
1978 int start, i;
1980 if (GET_CODE (src) != PARALLEL)
1981 abort ();
1983 /* Check for a NULL entry, used to indicate that the parameter goes
1984 both on the stack and in registers. */
1985 if (XEXP (XVECEXP (src, 0, 0), 0))
1986 start = 0;
1987 else
1988 start = 1;
1990 tmps = alloca (sizeof (rtx) * XVECLEN (src, 0));
1992 /* Copy the (probable) hard regs into pseudos. */
1993 for (i = start; i < XVECLEN (src, 0); i++)
1995 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1996 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1997 emit_move_insn (tmps[i], reg);
1999 emit_queue ();
2001 /* If we won't be storing directly into memory, protect the real destination
2002 from strange tricks we might play. */
2003 dst = orig_dst;
2004 if (GET_CODE (dst) == PARALLEL)
2006 rtx temp;
2008 /* We can get a PARALLEL dst if there is a conditional expression in
2009 a return statement. In that case, the dst and src are the same,
2010 so no action is necessary. */
2011 if (rtx_equal_p (dst, src))
2012 return;
2014 /* It is unclear if we can ever reach here, but we may as well handle
2015 it. Allocate a temporary, and split this into a store/load to/from
2016 the temporary. */
2018 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2019 emit_group_store (temp, src, type, ssize);
2020 emit_group_load (dst, temp, type, ssize);
2021 return;
2023 else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
2025 dst = gen_reg_rtx (GET_MODE (orig_dst));
2026 /* Make life a bit easier for combine. */
2027 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
2030 /* Process the pieces. */
2031 for (i = start; i < XVECLEN (src, 0); i++)
2033 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2034 enum machine_mode mode = GET_MODE (tmps[i]);
2035 unsigned int bytelen = GET_MODE_SIZE (mode);
2036 rtx dest = dst;
2038 /* Handle trailing fragments that run over the size of the struct. */
2039 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2041 /* store_bit_field always takes its value from the lsb.
2042 Move the fragment to the lsb if it's not already there. */
2043 if (
2044 #ifdef BLOCK_REG_PADDING
2045 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2046 == (BYTES_BIG_ENDIAN ? upward : downward)
2047 #else
2048 BYTES_BIG_ENDIAN
2049 #endif
2052 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2053 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2054 tmps[i], 0, OPTAB_WIDEN);
2056 bytelen = ssize - bytepos;
2059 if (GET_CODE (dst) == CONCAT)
2061 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2062 dest = XEXP (dst, 0);
2063 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2065 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2066 dest = XEXP (dst, 1);
2068 else if (bytepos == 0 && XVECLEN (src, 0))
2070 dest = assign_stack_temp (GET_MODE (dest),
2071 GET_MODE_SIZE (GET_MODE (dest)), 0);
2072 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2073 tmps[i]);
2074 dst = dest;
2075 break;
2077 else
2078 abort ();
2081 /* Optimize the access just a bit. */
2082 if (GET_CODE (dest) == MEM
2083 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2084 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2085 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2086 && bytelen == GET_MODE_SIZE (mode))
2087 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2088 else
2089 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2090 mode, tmps[i], ssize);
2093 emit_queue ();
2095 /* Copy from the pseudo into the (probable) hard reg. */
2096 if (orig_dst != dst)
2097 emit_move_insn (orig_dst, dst);
2100 /* Generate code to copy a BLKmode object of TYPE out of a
2101 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2102 is null, a stack temporary is created. TGTBLK is returned.
2104 The purpose of this routine is to handle functions that return
2105 BLKmode structures in registers. Some machines (the PA for example)
2106 want to return all small structures in registers regardless of the
2107 structure's alignment. */
2110 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2112 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2113 rtx src = NULL, dst = NULL;
2114 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2115 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2117 if (tgtblk == 0)
2119 tgtblk = assign_temp (build_qualified_type (type,
2120 (TYPE_QUALS (type)
2121 | TYPE_QUAL_CONST)),
2122 0, 1, 1);
2123 preserve_temp_slots (tgtblk);
2126 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2127 into a new pseudo which is a full word. */
2129 if (GET_MODE (srcreg) != BLKmode
2130 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2131 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2133 /* If the structure doesn't take up a whole number of words, see whether
2134 SRCREG is padded on the left or on the right. If it's on the left,
2135 set PADDING_CORRECTION to the number of bits to skip.
2137 In most ABIs, the structure will be returned at the least end of
2138 the register, which translates to right padding on little-endian
2139 targets and left padding on big-endian targets. The opposite
2140 holds if the structure is returned at the most significant
2141 end of the register. */
2142 if (bytes % UNITS_PER_WORD != 0
2143 && (targetm.calls.return_in_msb (type)
2144 ? !BYTES_BIG_ENDIAN
2145 : BYTES_BIG_ENDIAN))
2146 padding_correction
2147 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2149 /* Copy the structure BITSIZE bites at a time.
2151 We could probably emit more efficient code for machines which do not use
2152 strict alignment, but it doesn't seem worth the effort at the current
2153 time. */
2154 for (bitpos = 0, xbitpos = padding_correction;
2155 bitpos < bytes * BITS_PER_UNIT;
2156 bitpos += bitsize, xbitpos += bitsize)
2158 /* We need a new source operand each time xbitpos is on a
2159 word boundary and when xbitpos == padding_correction
2160 (the first time through). */
2161 if (xbitpos % BITS_PER_WORD == 0
2162 || xbitpos == padding_correction)
2163 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2164 GET_MODE (srcreg));
2166 /* We need a new destination operand each time bitpos is on
2167 a word boundary. */
2168 if (bitpos % BITS_PER_WORD == 0)
2169 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2171 /* Use xbitpos for the source extraction (right justified) and
2172 xbitpos for the destination store (left justified). */
2173 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2174 extract_bit_field (src, bitsize,
2175 xbitpos % BITS_PER_WORD, 1,
2176 NULL_RTX, word_mode, word_mode,
2177 BITS_PER_WORD),
2178 BITS_PER_WORD);
2181 return tgtblk;
2184 /* Add a USE expression for REG to the (possibly empty) list pointed
2185 to by CALL_FUSAGE. REG must denote a hard register. */
2187 void
2188 use_reg (rtx *call_fusage, rtx reg)
2190 if (GET_CODE (reg) != REG
2191 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2192 abort ();
2194 *call_fusage
2195 = gen_rtx_EXPR_LIST (VOIDmode,
2196 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2199 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2200 starting at REGNO. All of these registers must be hard registers. */
2202 void
2203 use_regs (rtx *call_fusage, int regno, int nregs)
2205 int i;
2207 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2208 abort ();
2210 for (i = 0; i < nregs; i++)
2211 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2214 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2215 PARALLEL REGS. This is for calls that pass values in multiple
2216 non-contiguous locations. The Irix 6 ABI has examples of this. */
2218 void
2219 use_group_regs (rtx *call_fusage, rtx regs)
2221 int i;
2223 for (i = 0; i < XVECLEN (regs, 0); i++)
2225 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2227 /* A NULL entry means the parameter goes both on the stack and in
2228 registers. This can also be a MEM for targets that pass values
2229 partially on the stack and partially in registers. */
2230 if (reg != 0 && GET_CODE (reg) == REG)
2231 use_reg (call_fusage, reg);
2236 /* Determine whether the LEN bytes generated by CONSTFUN can be
2237 stored to memory using several move instructions. CONSTFUNDATA is
2238 a pointer which will be passed as argument in every CONSTFUN call.
2239 ALIGN is maximum alignment we can assume. Return nonzero if a
2240 call to store_by_pieces should succeed. */
2243 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2244 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2245 void *constfundata, unsigned int align)
2247 unsigned HOST_WIDE_INT max_size, l;
2248 HOST_WIDE_INT offset = 0;
2249 enum machine_mode mode, tmode;
2250 enum insn_code icode;
2251 int reverse;
2252 rtx cst;
2254 if (len == 0)
2255 return 1;
2257 if (! STORE_BY_PIECES_P (len, align))
2258 return 0;
2260 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2261 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2262 align = MOVE_MAX * BITS_PER_UNIT;
2264 /* We would first store what we can in the largest integer mode, then go to
2265 successively smaller modes. */
2267 for (reverse = 0;
2268 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2269 reverse++)
2271 l = len;
2272 mode = VOIDmode;
2273 max_size = STORE_MAX_PIECES + 1;
2274 while (max_size > 1)
2276 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2277 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2278 if (GET_MODE_SIZE (tmode) < max_size)
2279 mode = tmode;
2281 if (mode == VOIDmode)
2282 break;
2284 icode = mov_optab->handlers[(int) mode].insn_code;
2285 if (icode != CODE_FOR_nothing
2286 && align >= GET_MODE_ALIGNMENT (mode))
2288 unsigned int size = GET_MODE_SIZE (mode);
2290 while (l >= size)
2292 if (reverse)
2293 offset -= size;
2295 cst = (*constfun) (constfundata, offset, mode);
2296 if (!LEGITIMATE_CONSTANT_P (cst))
2297 return 0;
2299 if (!reverse)
2300 offset += size;
2302 l -= size;
2306 max_size = GET_MODE_SIZE (mode);
2309 /* The code above should have handled everything. */
2310 if (l != 0)
2311 abort ();
2314 return 1;
2317 /* Generate several move instructions to store LEN bytes generated by
2318 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2319 pointer which will be passed as argument in every CONSTFUN call.
2320 ALIGN is maximum alignment we can assume.
2321 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2322 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2323 stpcpy. */
2326 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2327 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2328 void *constfundata, unsigned int align, int endp)
2330 struct store_by_pieces data;
2332 if (len == 0)
2334 if (endp == 2)
2335 abort ();
2336 return to;
2339 if (! STORE_BY_PIECES_P (len, align))
2340 abort ();
2341 to = protect_from_queue (to, 1);
2342 data.constfun = constfun;
2343 data.constfundata = constfundata;
2344 data.len = len;
2345 data.to = to;
2346 store_by_pieces_1 (&data, align);
2347 if (endp)
2349 rtx to1;
2351 if (data.reverse)
2352 abort ();
2353 if (data.autinc_to)
2355 if (endp == 2)
2357 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2358 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2359 else
2360 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2361 -1));
2363 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2364 data.offset);
2366 else
2368 if (endp == 2)
2369 --data.offset;
2370 to1 = adjust_address (data.to, QImode, data.offset);
2372 return to1;
2374 else
2375 return data.to;
2378 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2379 rtx with BLKmode). The caller must pass TO through protect_from_queue
2380 before calling. ALIGN is maximum alignment we can assume. */
2382 static void
2383 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2385 struct store_by_pieces data;
2387 if (len == 0)
2388 return;
2390 data.constfun = clear_by_pieces_1;
2391 data.constfundata = NULL;
2392 data.len = len;
2393 data.to = to;
2394 store_by_pieces_1 (&data, align);
2397 /* Callback routine for clear_by_pieces.
2398 Return const0_rtx unconditionally. */
2400 static rtx
2401 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2402 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2403 enum machine_mode mode ATTRIBUTE_UNUSED)
2405 return const0_rtx;
2408 /* Subroutine of clear_by_pieces and store_by_pieces.
2409 Generate several move instructions to store LEN bytes of block TO. (A MEM
2410 rtx with BLKmode). The caller must pass TO through protect_from_queue
2411 before calling. ALIGN is maximum alignment we can assume. */
2413 static void
2414 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2415 unsigned int align ATTRIBUTE_UNUSED)
2417 rtx to_addr = XEXP (data->to, 0);
2418 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
2419 enum machine_mode mode = VOIDmode, tmode;
2420 enum insn_code icode;
2422 data->offset = 0;
2423 data->to_addr = to_addr;
2424 data->autinc_to
2425 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2426 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2428 data->explicit_inc_to = 0;
2429 data->reverse
2430 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2431 if (data->reverse)
2432 data->offset = data->len;
2434 /* If storing requires more than two move insns,
2435 copy addresses to registers (to make displacements shorter)
2436 and use post-increment if available. */
2437 if (!data->autinc_to
2438 && move_by_pieces_ninsns (data->len, align) > 2)
2440 /* Determine the main mode we'll be using. */
2441 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2442 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2443 if (GET_MODE_SIZE (tmode) < max_size)
2444 mode = tmode;
2446 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2448 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2449 data->autinc_to = 1;
2450 data->explicit_inc_to = -1;
2453 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2454 && ! data->autinc_to)
2456 data->to_addr = copy_addr_to_reg (to_addr);
2457 data->autinc_to = 1;
2458 data->explicit_inc_to = 1;
2461 if ( !data->autinc_to && CONSTANT_P (to_addr))
2462 data->to_addr = copy_addr_to_reg (to_addr);
2465 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2466 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2467 align = MOVE_MAX * BITS_PER_UNIT;
2469 /* First store what we can in the largest integer mode, then go to
2470 successively smaller modes. */
2472 while (max_size > 1)
2474 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2475 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2476 if (GET_MODE_SIZE (tmode) < max_size)
2477 mode = tmode;
2479 if (mode == VOIDmode)
2480 break;
2482 icode = mov_optab->handlers[(int) mode].insn_code;
2483 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2484 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2486 max_size = GET_MODE_SIZE (mode);
2489 /* The code above should have handled everything. */
2490 if (data->len != 0)
2491 abort ();
2494 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2495 with move instructions for mode MODE. GENFUN is the gen_... function
2496 to make a move insn for that mode. DATA has all the other info. */
2498 static void
2499 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2500 struct store_by_pieces *data)
2502 unsigned int size = GET_MODE_SIZE (mode);
2503 rtx to1, cst;
2505 while (data->len >= size)
2507 if (data->reverse)
2508 data->offset -= size;
2510 if (data->autinc_to)
2511 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2512 data->offset);
2513 else
2514 to1 = adjust_address (data->to, mode, data->offset);
2516 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2517 emit_insn (gen_add2_insn (data->to_addr,
2518 GEN_INT (-(HOST_WIDE_INT) size)));
2520 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2521 emit_insn ((*genfun) (to1, cst));
2523 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2524 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2526 if (! data->reverse)
2527 data->offset += size;
2529 data->len -= size;
2533 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2534 its length in bytes. */
2537 clear_storage (rtx object, rtx size)
2539 rtx retval = 0;
2540 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2541 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2543 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2544 just move a zero. Otherwise, do this a piece at a time. */
2545 if (GET_MODE (object) != BLKmode
2546 && GET_CODE (size) == CONST_INT
2547 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
2548 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2549 else
2551 object = protect_from_queue (object, 1);
2552 size = protect_from_queue (size, 0);
2554 if (size == const0_rtx)
2556 else if (GET_CODE (size) == CONST_INT
2557 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2558 clear_by_pieces (object, INTVAL (size), align);
2559 else if (clear_storage_via_clrstr (object, size, align))
2561 else
2562 retval = clear_storage_via_libcall (object, size);
2565 return retval;
2568 /* A subroutine of clear_storage. Expand a clrstr pattern;
2569 return true if successful. */
2571 static bool
2572 clear_storage_via_clrstr (rtx object, rtx size, unsigned int align)
2574 /* Try the most limited insn first, because there's no point
2575 including more than one in the machine description unless
2576 the more limited one has some advantage. */
2578 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2579 enum machine_mode mode;
2581 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2582 mode = GET_MODE_WIDER_MODE (mode))
2584 enum insn_code code = clrstr_optab[(int) mode];
2585 insn_operand_predicate_fn pred;
2587 if (code != CODE_FOR_nothing
2588 /* We don't need MODE to be narrower than
2589 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2590 the mode mask, as it is returned by the macro, it will
2591 definitely be less than the actual mode mask. */
2592 && ((GET_CODE (size) == CONST_INT
2593 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2594 <= (GET_MODE_MASK (mode) >> 1)))
2595 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2596 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2597 || (*pred) (object, BLKmode))
2598 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2599 || (*pred) (opalign, VOIDmode)))
2601 rtx op1;
2602 rtx last = get_last_insn ();
2603 rtx pat;
2605 op1 = convert_to_mode (mode, size, 1);
2606 pred = insn_data[(int) code].operand[1].predicate;
2607 if (pred != 0 && ! (*pred) (op1, mode))
2608 op1 = copy_to_mode_reg (mode, op1);
2610 pat = GEN_FCN ((int) code) (object, op1, opalign);
2611 if (pat)
2613 emit_insn (pat);
2614 return true;
2616 else
2617 delete_insns_since (last);
2621 return false;
2624 /* A subroutine of clear_storage. Expand a call to memset or bzero.
2625 Return the return value of memset, 0 otherwise. */
2627 static rtx
2628 clear_storage_via_libcall (rtx object, rtx size)
2630 tree call_expr, arg_list, fn, object_tree, size_tree;
2631 enum machine_mode size_mode;
2632 rtx retval;
2634 /* OBJECT or SIZE may have been passed through protect_from_queue.
2636 It is unsafe to save the value generated by protect_from_queue
2637 and reuse it later. Consider what happens if emit_queue is
2638 called before the return value from protect_from_queue is used.
2640 Expansion of the CALL_EXPR below will call emit_queue before
2641 we are finished emitting RTL for argument setup. So if we are
2642 not careful we could get the wrong value for an argument.
2644 To avoid this problem we go ahead and emit code to copy OBJECT
2645 and SIZE into new pseudos. We can then place those new pseudos
2646 into an RTL_EXPR and use them later, even after a call to
2647 emit_queue.
2649 Note this is not strictly needed for library calls since they
2650 do not call emit_queue before loading their arguments. However,
2651 we may need to have library calls call emit_queue in the future
2652 since failing to do so could cause problems for targets which
2653 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2655 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2657 if (TARGET_MEM_FUNCTIONS)
2658 size_mode = TYPE_MODE (sizetype);
2659 else
2660 size_mode = TYPE_MODE (unsigned_type_node);
2661 size = convert_to_mode (size_mode, size, 1);
2662 size = copy_to_mode_reg (size_mode, size);
2664 /* It is incorrect to use the libcall calling conventions to call
2665 memset in this context. This could be a user call to memset and
2666 the user may wish to examine the return value from memset. For
2667 targets where libcalls and normal calls have different conventions
2668 for returning pointers, we could end up generating incorrect code.
2670 For convenience, we generate the call to bzero this way as well. */
2672 object_tree = make_tree (ptr_type_node, object);
2673 if (TARGET_MEM_FUNCTIONS)
2674 size_tree = make_tree (sizetype, size);
2675 else
2676 size_tree = make_tree (unsigned_type_node, size);
2678 fn = clear_storage_libcall_fn (true);
2679 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2680 if (TARGET_MEM_FUNCTIONS)
2681 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
2682 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2684 /* Now we have to build up the CALL_EXPR itself. */
2685 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2686 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2687 call_expr, arg_list, NULL_TREE);
2689 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2691 /* If we are initializing a readonly value, show the above call
2692 clobbered it. Otherwise, a load from it may erroneously be
2693 hoisted from a loop. */
2694 if (RTX_UNCHANGING_P (object))
2695 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
2697 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
2700 /* A subroutine of clear_storage_via_libcall. Create the tree node
2701 for the function we use for block clears. The first time FOR_CALL
2702 is true, we call assemble_external. */
2704 static GTY(()) tree block_clear_fn;
2706 void
2707 init_block_clear_fn (const char *asmspec)
2709 if (!block_clear_fn)
2711 tree fn, args;
2713 if (TARGET_MEM_FUNCTIONS)
2715 fn = get_identifier ("memset");
2716 args = build_function_type_list (ptr_type_node, ptr_type_node,
2717 integer_type_node, sizetype,
2718 NULL_TREE);
2720 else
2722 fn = get_identifier ("bzero");
2723 args = build_function_type_list (void_type_node, ptr_type_node,
2724 unsigned_type_node, NULL_TREE);
2727 fn = build_decl (FUNCTION_DECL, fn, args);
2728 DECL_EXTERNAL (fn) = 1;
2729 TREE_PUBLIC (fn) = 1;
2730 DECL_ARTIFICIAL (fn) = 1;
2731 TREE_NOTHROW (fn) = 1;
2733 block_clear_fn = fn;
2736 if (asmspec)
2738 SET_DECL_RTL (block_clear_fn, NULL_RTX);
2739 SET_DECL_ASSEMBLER_NAME (block_clear_fn, get_identifier (asmspec));
2743 static tree
2744 clear_storage_libcall_fn (int for_call)
2746 static bool emitted_extern;
2748 if (!block_clear_fn)
2749 init_block_clear_fn (NULL);
2751 if (for_call && !emitted_extern)
2753 emitted_extern = true;
2754 make_decl_rtl (block_clear_fn, NULL);
2755 assemble_external (block_clear_fn);
2758 return block_clear_fn;
2761 /* Generate code to copy Y into X.
2762 Both Y and X must have the same mode, except that
2763 Y can be a constant with VOIDmode.
2764 This mode cannot be BLKmode; use emit_block_move for that.
2766 Return the last instruction emitted. */
2769 emit_move_insn (rtx x, rtx y)
2771 enum machine_mode mode = GET_MODE (x);
2772 rtx y_cst = NULL_RTX;
2773 rtx last_insn, set;
2775 x = protect_from_queue (x, 1);
2776 y = protect_from_queue (y, 0);
2778 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2779 abort ();
2781 /* Never force constant_p_rtx to memory. */
2782 if (GET_CODE (y) == CONSTANT_P_RTX)
2784 else if (CONSTANT_P (y))
2786 if (optimize
2787 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
2788 && (last_insn = compress_float_constant (x, y)))
2789 return last_insn;
2791 y_cst = y;
2793 if (!LEGITIMATE_CONSTANT_P (y))
2795 y = force_const_mem (mode, y);
2797 /* If the target's cannot_force_const_mem prevented the spill,
2798 assume that the target's move expanders will also take care
2799 of the non-legitimate constant. */
2800 if (!y)
2801 y = y_cst;
2805 /* If X or Y are memory references, verify that their addresses are valid
2806 for the machine. */
2807 if (GET_CODE (x) == MEM
2808 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2809 && ! push_operand (x, GET_MODE (x)))
2810 || (flag_force_addr
2811 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2812 x = validize_mem (x);
2814 if (GET_CODE (y) == MEM
2815 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2816 || (flag_force_addr
2817 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2818 y = validize_mem (y);
2820 if (mode == BLKmode)
2821 abort ();
2823 last_insn = emit_move_insn_1 (x, y);
2825 if (y_cst && GET_CODE (x) == REG
2826 && (set = single_set (last_insn)) != NULL_RTX
2827 && SET_DEST (set) == x
2828 && ! rtx_equal_p (y_cst, SET_SRC (set)))
2829 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
2831 return last_insn;
2834 /* Low level part of emit_move_insn.
2835 Called just like emit_move_insn, but assumes X and Y
2836 are basically valid. */
2839 emit_move_insn_1 (rtx x, rtx y)
2841 enum machine_mode mode = GET_MODE (x);
2842 enum machine_mode submode;
2843 enum mode_class class = GET_MODE_CLASS (mode);
2845 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2846 abort ();
2848 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2849 return
2850 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2852 /* Expand complex moves by moving real part and imag part, if possible. */
2853 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2854 && BLKmode != (submode = GET_MODE_INNER (mode))
2855 && (mov_optab->handlers[(int) submode].insn_code
2856 != CODE_FOR_nothing))
2858 /* Don't split destination if it is a stack push. */
2859 int stack = push_operand (x, GET_MODE (x));
2861 #ifdef PUSH_ROUNDING
2862 /* In case we output to the stack, but the size is smaller than the
2863 machine can push exactly, we need to use move instructions. */
2864 if (stack
2865 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
2866 != GET_MODE_SIZE (submode)))
2868 rtx temp;
2869 HOST_WIDE_INT offset1, offset2;
2871 /* Do not use anti_adjust_stack, since we don't want to update
2872 stack_pointer_delta. */
2873 temp = expand_binop (Pmode,
2874 #ifdef STACK_GROWS_DOWNWARD
2875 sub_optab,
2876 #else
2877 add_optab,
2878 #endif
2879 stack_pointer_rtx,
2880 GEN_INT
2881 (PUSH_ROUNDING
2882 (GET_MODE_SIZE (GET_MODE (x)))),
2883 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2885 if (temp != stack_pointer_rtx)
2886 emit_move_insn (stack_pointer_rtx, temp);
2888 #ifdef STACK_GROWS_DOWNWARD
2889 offset1 = 0;
2890 offset2 = GET_MODE_SIZE (submode);
2891 #else
2892 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2893 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2894 + GET_MODE_SIZE (submode));
2895 #endif
2897 emit_move_insn (change_address (x, submode,
2898 gen_rtx_PLUS (Pmode,
2899 stack_pointer_rtx,
2900 GEN_INT (offset1))),
2901 gen_realpart (submode, y));
2902 emit_move_insn (change_address (x, submode,
2903 gen_rtx_PLUS (Pmode,
2904 stack_pointer_rtx,
2905 GEN_INT (offset2))),
2906 gen_imagpart (submode, y));
2908 else
2909 #endif
2910 /* If this is a stack, push the highpart first, so it
2911 will be in the argument order.
2913 In that case, change_address is used only to convert
2914 the mode, not to change the address. */
2915 if (stack)
2917 /* Note that the real part always precedes the imag part in memory
2918 regardless of machine's endianness. */
2919 #ifdef STACK_GROWS_DOWNWARD
2920 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2921 gen_imagpart (submode, y));
2922 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2923 gen_realpart (submode, y));
2924 #else
2925 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2926 gen_realpart (submode, y));
2927 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2928 gen_imagpart (submode, y));
2929 #endif
2931 else
2933 rtx realpart_x, realpart_y;
2934 rtx imagpart_x, imagpart_y;
2936 /* If this is a complex value with each part being smaller than a
2937 word, the usual calling sequence will likely pack the pieces into
2938 a single register. Unfortunately, SUBREG of hard registers only
2939 deals in terms of words, so we have a problem converting input
2940 arguments to the CONCAT of two registers that is used elsewhere
2941 for complex values. If this is before reload, we can copy it into
2942 memory and reload. FIXME, we should see about using extract and
2943 insert on integer registers, but complex short and complex char
2944 variables should be rarely used. */
2945 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2946 && (reload_in_progress | reload_completed) == 0)
2948 int packed_dest_p
2949 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2950 int packed_src_p
2951 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2953 if (packed_dest_p || packed_src_p)
2955 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2956 ? MODE_FLOAT : MODE_INT);
2958 enum machine_mode reg_mode
2959 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2961 if (reg_mode != BLKmode)
2963 rtx mem = assign_stack_temp (reg_mode,
2964 GET_MODE_SIZE (mode), 0);
2965 rtx cmem = adjust_address (mem, mode, 0);
2967 cfun->cannot_inline
2968 = N_("function using short complex types cannot be inline");
2970 if (packed_dest_p)
2972 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2974 emit_move_insn_1 (cmem, y);
2975 return emit_move_insn_1 (sreg, mem);
2977 else
2979 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2981 emit_move_insn_1 (mem, sreg);
2982 return emit_move_insn_1 (x, cmem);
2988 realpart_x = gen_realpart (submode, x);
2989 realpart_y = gen_realpart (submode, y);
2990 imagpart_x = gen_imagpart (submode, x);
2991 imagpart_y = gen_imagpart (submode, y);
2993 /* Show the output dies here. This is necessary for SUBREGs
2994 of pseudos since we cannot track their lifetimes correctly;
2995 hard regs shouldn't appear here except as return values.
2996 We never want to emit such a clobber after reload. */
2997 if (x != y
2998 && ! (reload_in_progress || reload_completed)
2999 && (GET_CODE (realpart_x) == SUBREG
3000 || GET_CODE (imagpart_x) == SUBREG))
3001 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3003 emit_move_insn (realpart_x, realpart_y);
3004 emit_move_insn (imagpart_x, imagpart_y);
3007 return get_last_insn ();
3010 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
3011 find a mode to do it in. If we have a movcc, use it. Otherwise,
3012 find the MODE_INT mode of the same width. */
3013 else if (GET_MODE_CLASS (mode) == MODE_CC
3014 && mov_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
3016 enum insn_code insn_code;
3017 enum machine_mode tmode = VOIDmode;
3018 rtx x1 = x, y1 = y;
3020 if (mode != CCmode
3021 && mov_optab->handlers[(int) CCmode].insn_code != CODE_FOR_nothing)
3022 tmode = CCmode;
3023 else
3024 for (tmode = QImode; tmode != VOIDmode;
3025 tmode = GET_MODE_WIDER_MODE (tmode))
3026 if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode))
3027 break;
3029 if (tmode == VOIDmode)
3030 abort ();
3032 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
3033 may call change_address which is not appropriate if we were
3034 called when a reload was in progress. We don't have to worry
3035 about changing the address since the size in bytes is supposed to
3036 be the same. Copy the MEM to change the mode and move any
3037 substitutions from the old MEM to the new one. */
3039 if (reload_in_progress)
3041 x = gen_lowpart_common (tmode, x1);
3042 if (x == 0 && GET_CODE (x1) == MEM)
3044 x = adjust_address_nv (x1, tmode, 0);
3045 copy_replacements (x1, x);
3048 y = gen_lowpart_common (tmode, y1);
3049 if (y == 0 && GET_CODE (y1) == MEM)
3051 y = adjust_address_nv (y1, tmode, 0);
3052 copy_replacements (y1, y);
3055 else
3057 x = gen_lowpart (tmode, x);
3058 y = gen_lowpart (tmode, y);
3061 insn_code = mov_optab->handlers[(int) tmode].insn_code;
3062 return emit_insn (GEN_FCN (insn_code) (x, y));
3065 /* Try using a move pattern for the corresponding integer mode. This is
3066 only safe when simplify_subreg can convert MODE constants into integer
3067 constants. At present, it can only do this reliably if the value
3068 fits within a HOST_WIDE_INT. */
3069 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3070 && (submode = int_mode_for_mode (mode)) != BLKmode
3071 && mov_optab->handlers[submode].insn_code != CODE_FOR_nothing)
3072 return emit_insn (GEN_FCN (mov_optab->handlers[submode].insn_code)
3073 (simplify_gen_subreg (submode, x, mode, 0),
3074 simplify_gen_subreg (submode, y, mode, 0)));
3076 /* This will handle any multi-word or full-word mode that lacks a move_insn
3077 pattern. However, you will get better code if you define such patterns,
3078 even if they must turn into multiple assembler instructions. */
3079 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
3081 rtx last_insn = 0;
3082 rtx seq, inner;
3083 int need_clobber;
3084 int i;
3086 #ifdef PUSH_ROUNDING
3088 /* If X is a push on the stack, do the push now and replace
3089 X with a reference to the stack pointer. */
3090 if (push_operand (x, GET_MODE (x)))
3092 rtx temp;
3093 enum rtx_code code;
3095 /* Do not use anti_adjust_stack, since we don't want to update
3096 stack_pointer_delta. */
3097 temp = expand_binop (Pmode,
3098 #ifdef STACK_GROWS_DOWNWARD
3099 sub_optab,
3100 #else
3101 add_optab,
3102 #endif
3103 stack_pointer_rtx,
3104 GEN_INT
3105 (PUSH_ROUNDING
3106 (GET_MODE_SIZE (GET_MODE (x)))),
3107 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3109 if (temp != stack_pointer_rtx)
3110 emit_move_insn (stack_pointer_rtx, temp);
3112 code = GET_CODE (XEXP (x, 0));
3114 /* Just hope that small offsets off SP are OK. */
3115 if (code == POST_INC)
3116 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3117 GEN_INT (-((HOST_WIDE_INT)
3118 GET_MODE_SIZE (GET_MODE (x)))));
3119 else if (code == POST_DEC)
3120 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3121 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3122 else
3123 temp = stack_pointer_rtx;
3125 x = change_address (x, VOIDmode, temp);
3127 #endif
3129 /* If we are in reload, see if either operand is a MEM whose address
3130 is scheduled for replacement. */
3131 if (reload_in_progress && GET_CODE (x) == MEM
3132 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3133 x = replace_equiv_address_nv (x, inner);
3134 if (reload_in_progress && GET_CODE (y) == MEM
3135 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3136 y = replace_equiv_address_nv (y, inner);
3138 start_sequence ();
3140 need_clobber = 0;
3141 for (i = 0;
3142 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3143 i++)
3145 rtx xpart = operand_subword (x, i, 1, mode);
3146 rtx ypart = operand_subword (y, i, 1, mode);
3148 /* If we can't get a part of Y, put Y into memory if it is a
3149 constant. Otherwise, force it into a register. If we still
3150 can't get a part of Y, abort. */
3151 if (ypart == 0 && CONSTANT_P (y))
3153 y = force_const_mem (mode, y);
3154 ypart = operand_subword (y, i, 1, mode);
3156 else if (ypart == 0)
3157 ypart = operand_subword_force (y, i, mode);
3159 if (xpart == 0 || ypart == 0)
3160 abort ();
3162 need_clobber |= (GET_CODE (xpart) == SUBREG);
3164 last_insn = emit_move_insn (xpart, ypart);
3167 seq = get_insns ();
3168 end_sequence ();
3170 /* Show the output dies here. This is necessary for SUBREGs
3171 of pseudos since we cannot track their lifetimes correctly;
3172 hard regs shouldn't appear here except as return values.
3173 We never want to emit such a clobber after reload. */
3174 if (x != y
3175 && ! (reload_in_progress || reload_completed)
3176 && need_clobber != 0)
3177 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3179 emit_insn (seq);
3181 return last_insn;
3183 else
3184 abort ();
3187 /* If Y is representable exactly in a narrower mode, and the target can
3188 perform the extension directly from constant or memory, then emit the
3189 move as an extension. */
3191 static rtx
3192 compress_float_constant (rtx x, rtx y)
3194 enum machine_mode dstmode = GET_MODE (x);
3195 enum machine_mode orig_srcmode = GET_MODE (y);
3196 enum machine_mode srcmode;
3197 REAL_VALUE_TYPE r;
3199 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3201 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3202 srcmode != orig_srcmode;
3203 srcmode = GET_MODE_WIDER_MODE (srcmode))
3205 enum insn_code ic;
3206 rtx trunc_y, last_insn;
3208 /* Skip if the target can't extend this way. */
3209 ic = can_extend_p (dstmode, srcmode, 0);
3210 if (ic == CODE_FOR_nothing)
3211 continue;
3213 /* Skip if the narrowed value isn't exact. */
3214 if (! exact_real_truncate (srcmode, &r))
3215 continue;
3217 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3219 if (LEGITIMATE_CONSTANT_P (trunc_y))
3221 /* Skip if the target needs extra instructions to perform
3222 the extension. */
3223 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3224 continue;
3226 else if (float_extend_from_mem[dstmode][srcmode])
3227 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3228 else
3229 continue;
3231 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3232 last_insn = get_last_insn ();
3234 if (GET_CODE (x) == REG)
3235 set_unique_reg_note (last_insn, REG_EQUAL, y);
3237 return last_insn;
3240 return NULL_RTX;
3243 /* Pushing data onto the stack. */
3245 /* Push a block of length SIZE (perhaps variable)
3246 and return an rtx to address the beginning of the block.
3247 Note that it is not possible for the value returned to be a QUEUED.
3248 The value may be virtual_outgoing_args_rtx.
3250 EXTRA is the number of bytes of padding to push in addition to SIZE.
3251 BELOW nonzero means this padding comes at low addresses;
3252 otherwise, the padding comes at high addresses. */
3255 push_block (rtx size, int extra, int below)
3257 rtx temp;
3259 size = convert_modes (Pmode, ptr_mode, size, 1);
3260 if (CONSTANT_P (size))
3261 anti_adjust_stack (plus_constant (size, extra));
3262 else if (GET_CODE (size) == REG && extra == 0)
3263 anti_adjust_stack (size);
3264 else
3266 temp = copy_to_mode_reg (Pmode, size);
3267 if (extra != 0)
3268 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3269 temp, 0, OPTAB_LIB_WIDEN);
3270 anti_adjust_stack (temp);
3273 #ifndef STACK_GROWS_DOWNWARD
3274 if (0)
3275 #else
3276 if (1)
3277 #endif
3279 temp = virtual_outgoing_args_rtx;
3280 if (extra != 0 && below)
3281 temp = plus_constant (temp, extra);
3283 else
3285 if (GET_CODE (size) == CONST_INT)
3286 temp = plus_constant (virtual_outgoing_args_rtx,
3287 -INTVAL (size) - (below ? 0 : extra));
3288 else if (extra != 0 && !below)
3289 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3290 negate_rtx (Pmode, plus_constant (size, extra)));
3291 else
3292 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3293 negate_rtx (Pmode, size));
3296 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3299 #ifdef PUSH_ROUNDING
3301 /* Emit single push insn. */
3303 static void
3304 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3306 rtx dest_addr;
3307 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3308 rtx dest;
3309 enum insn_code icode;
3310 insn_operand_predicate_fn pred;
3312 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3313 /* If there is push pattern, use it. Otherwise try old way of throwing
3314 MEM representing push operation to move expander. */
3315 icode = push_optab->handlers[(int) mode].insn_code;
3316 if (icode != CODE_FOR_nothing)
3318 if (((pred = insn_data[(int) icode].operand[0].predicate)
3319 && !((*pred) (x, mode))))
3320 x = force_reg (mode, x);
3321 emit_insn (GEN_FCN (icode) (x));
3322 return;
3324 if (GET_MODE_SIZE (mode) == rounded_size)
3325 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3326 /* If we are to pad downward, adjust the stack pointer first and
3327 then store X into the stack location using an offset. This is
3328 because emit_move_insn does not know how to pad; it does not have
3329 access to type. */
3330 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3332 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3333 HOST_WIDE_INT offset;
3335 emit_move_insn (stack_pointer_rtx,
3336 expand_binop (Pmode,
3337 #ifdef STACK_GROWS_DOWNWARD
3338 sub_optab,
3339 #else
3340 add_optab,
3341 #endif
3342 stack_pointer_rtx,
3343 GEN_INT (rounded_size),
3344 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3346 offset = (HOST_WIDE_INT) padding_size;
3347 #ifdef STACK_GROWS_DOWNWARD
3348 if (STACK_PUSH_CODE == POST_DEC)
3349 /* We have already decremented the stack pointer, so get the
3350 previous value. */
3351 offset += (HOST_WIDE_INT) rounded_size;
3352 #else
3353 if (STACK_PUSH_CODE == POST_INC)
3354 /* We have already incremented the stack pointer, so get the
3355 previous value. */
3356 offset -= (HOST_WIDE_INT) rounded_size;
3357 #endif
3358 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3360 else
3362 #ifdef STACK_GROWS_DOWNWARD
3363 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3364 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3365 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3366 #else
3367 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3368 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3369 GEN_INT (rounded_size));
3370 #endif
3371 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3374 dest = gen_rtx_MEM (mode, dest_addr);
3376 if (type != 0)
3378 set_mem_attributes (dest, type, 1);
3380 if (flag_optimize_sibling_calls)
3381 /* Function incoming arguments may overlap with sibling call
3382 outgoing arguments and we cannot allow reordering of reads
3383 from function arguments with stores to outgoing arguments
3384 of sibling calls. */
3385 set_mem_alias_set (dest, 0);
3387 emit_move_insn (dest, x);
3389 #endif
3391 /* Generate code to push X onto the stack, assuming it has mode MODE and
3392 type TYPE.
3393 MODE is redundant except when X is a CONST_INT (since they don't
3394 carry mode info).
3395 SIZE is an rtx for the size of data to be copied (in bytes),
3396 needed only if X is BLKmode.
3398 ALIGN (in bits) is maximum alignment we can assume.
3400 If PARTIAL and REG are both nonzero, then copy that many of the first
3401 words of X into registers starting with REG, and push the rest of X.
3402 The amount of space pushed is decreased by PARTIAL words,
3403 rounded *down* to a multiple of PARM_BOUNDARY.
3404 REG must be a hard register in this case.
3405 If REG is zero but PARTIAL is not, take any all others actions for an
3406 argument partially in registers, but do not actually load any
3407 registers.
3409 EXTRA is the amount in bytes of extra space to leave next to this arg.
3410 This is ignored if an argument block has already been allocated.
3412 On a machine that lacks real push insns, ARGS_ADDR is the address of
3413 the bottom of the argument block for this call. We use indexing off there
3414 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3415 argument block has not been preallocated.
3417 ARGS_SO_FAR is the size of args previously pushed for this call.
3419 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3420 for arguments passed in registers. If nonzero, it will be the number
3421 of bytes required. */
3423 void
3424 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3425 unsigned int align, int partial, rtx reg, int extra,
3426 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3427 rtx alignment_pad)
3429 rtx xinner;
3430 enum direction stack_direction
3431 #ifdef STACK_GROWS_DOWNWARD
3432 = downward;
3433 #else
3434 = upward;
3435 #endif
3437 /* Decide where to pad the argument: `downward' for below,
3438 `upward' for above, or `none' for don't pad it.
3439 Default is below for small data on big-endian machines; else above. */
3440 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3442 /* Invert direction if stack is post-decrement.
3443 FIXME: why? */
3444 if (STACK_PUSH_CODE == POST_DEC)
3445 if (where_pad != none)
3446 where_pad = (where_pad == downward ? upward : downward);
3448 xinner = x = protect_from_queue (x, 0);
3450 if (mode == BLKmode)
3452 /* Copy a block into the stack, entirely or partially. */
3454 rtx temp;
3455 int used = partial * UNITS_PER_WORD;
3456 int offset;
3457 int skip;
3459 if (reg && GET_CODE (reg) == PARALLEL)
3461 /* Use the size of the elt to compute offset. */
3462 rtx elt = XEXP (XVECEXP (reg, 0, 0), 0);
3463 used = partial * GET_MODE_SIZE (GET_MODE (elt));
3464 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3466 else
3467 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3469 if (size == 0)
3470 abort ();
3472 used -= offset;
3474 /* USED is now the # of bytes we need not copy to the stack
3475 because registers will take care of them. */
3477 if (partial != 0)
3478 xinner = adjust_address (xinner, BLKmode, used);
3480 /* If the partial register-part of the arg counts in its stack size,
3481 skip the part of stack space corresponding to the registers.
3482 Otherwise, start copying to the beginning of the stack space,
3483 by setting SKIP to 0. */
3484 skip = (reg_parm_stack_space == 0) ? 0 : used;
3486 #ifdef PUSH_ROUNDING
3487 /* Do it with several push insns if that doesn't take lots of insns
3488 and if there is no difficulty with push insns that skip bytes
3489 on the stack for alignment purposes. */
3490 if (args_addr == 0
3491 && PUSH_ARGS
3492 && GET_CODE (size) == CONST_INT
3493 && skip == 0
3494 && MEM_ALIGN (xinner) >= align
3495 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3496 /* Here we avoid the case of a structure whose weak alignment
3497 forces many pushes of a small amount of data,
3498 and such small pushes do rounding that causes trouble. */
3499 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3500 || align >= BIGGEST_ALIGNMENT
3501 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3502 == (align / BITS_PER_UNIT)))
3503 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3505 /* Push padding now if padding above and stack grows down,
3506 or if padding below and stack grows up.
3507 But if space already allocated, this has already been done. */
3508 if (extra && args_addr == 0
3509 && where_pad != none && where_pad != stack_direction)
3510 anti_adjust_stack (GEN_INT (extra));
3512 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3514 else
3515 #endif /* PUSH_ROUNDING */
3517 rtx target;
3519 /* Otherwise make space on the stack and copy the data
3520 to the address of that space. */
3522 /* Deduct words put into registers from the size we must copy. */
3523 if (partial != 0)
3525 if (GET_CODE (size) == CONST_INT)
3526 size = GEN_INT (INTVAL (size) - used);
3527 else
3528 size = expand_binop (GET_MODE (size), sub_optab, size,
3529 GEN_INT (used), NULL_RTX, 0,
3530 OPTAB_LIB_WIDEN);
3533 /* Get the address of the stack space.
3534 In this case, we do not deal with EXTRA separately.
3535 A single stack adjust will do. */
3536 if (! args_addr)
3538 temp = push_block (size, extra, where_pad == downward);
3539 extra = 0;
3541 else if (GET_CODE (args_so_far) == CONST_INT)
3542 temp = memory_address (BLKmode,
3543 plus_constant (args_addr,
3544 skip + INTVAL (args_so_far)));
3545 else
3546 temp = memory_address (BLKmode,
3547 plus_constant (gen_rtx_PLUS (Pmode,
3548 args_addr,
3549 args_so_far),
3550 skip));
3552 if (!ACCUMULATE_OUTGOING_ARGS)
3554 /* If the source is referenced relative to the stack pointer,
3555 copy it to another register to stabilize it. We do not need
3556 to do this if we know that we won't be changing sp. */
3558 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3559 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3560 temp = copy_to_reg (temp);
3563 target = gen_rtx_MEM (BLKmode, temp);
3565 if (type != 0)
3567 set_mem_attributes (target, type, 1);
3568 /* Function incoming arguments may overlap with sibling call
3569 outgoing arguments and we cannot allow reordering of reads
3570 from function arguments with stores to outgoing arguments
3571 of sibling calls. */
3572 set_mem_alias_set (target, 0);
3575 /* ALIGN may well be better aligned than TYPE, e.g. due to
3576 PARM_BOUNDARY. Assume the caller isn't lying. */
3577 set_mem_align (target, align);
3579 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3582 else if (partial > 0)
3584 /* Scalar partly in registers. */
3586 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3587 int i;
3588 int not_stack;
3589 /* # words of start of argument
3590 that we must make space for but need not store. */
3591 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3592 int args_offset = INTVAL (args_so_far);
3593 int skip;
3595 /* Push padding now if padding above and stack grows down,
3596 or if padding below and stack grows up.
3597 But if space already allocated, this has already been done. */
3598 if (extra && args_addr == 0
3599 && where_pad != none && where_pad != stack_direction)
3600 anti_adjust_stack (GEN_INT (extra));
3602 /* If we make space by pushing it, we might as well push
3603 the real data. Otherwise, we can leave OFFSET nonzero
3604 and leave the space uninitialized. */
3605 if (args_addr == 0)
3606 offset = 0;
3608 /* Now NOT_STACK gets the number of words that we don't need to
3609 allocate on the stack. */
3610 not_stack = partial - offset;
3612 /* If the partial register-part of the arg counts in its stack size,
3613 skip the part of stack space corresponding to the registers.
3614 Otherwise, start copying to the beginning of the stack space,
3615 by setting SKIP to 0. */
3616 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3618 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3619 x = validize_mem (force_const_mem (mode, x));
3621 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3622 SUBREGs of such registers are not allowed. */
3623 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3624 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3625 x = copy_to_reg (x);
3627 /* Loop over all the words allocated on the stack for this arg. */
3628 /* We can do it by words, because any scalar bigger than a word
3629 has a size a multiple of a word. */
3630 #ifndef PUSH_ARGS_REVERSED
3631 for (i = not_stack; i < size; i++)
3632 #else
3633 for (i = size - 1; i >= not_stack; i--)
3634 #endif
3635 if (i >= not_stack + offset)
3636 emit_push_insn (operand_subword_force (x, i, mode),
3637 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3638 0, args_addr,
3639 GEN_INT (args_offset + ((i - not_stack + skip)
3640 * UNITS_PER_WORD)),
3641 reg_parm_stack_space, alignment_pad);
3643 else
3645 rtx addr;
3646 rtx dest;
3648 /* Push padding now if padding above and stack grows down,
3649 or if padding below and stack grows up.
3650 But if space already allocated, this has already been done. */
3651 if (extra && args_addr == 0
3652 && where_pad != none && where_pad != stack_direction)
3653 anti_adjust_stack (GEN_INT (extra));
3655 #ifdef PUSH_ROUNDING
3656 if (args_addr == 0 && PUSH_ARGS)
3657 emit_single_push_insn (mode, x, type);
3658 else
3659 #endif
3661 if (GET_CODE (args_so_far) == CONST_INT)
3662 addr
3663 = memory_address (mode,
3664 plus_constant (args_addr,
3665 INTVAL (args_so_far)));
3666 else
3667 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3668 args_so_far));
3669 dest = gen_rtx_MEM (mode, addr);
3670 if (type != 0)
3672 set_mem_attributes (dest, type, 1);
3673 /* Function incoming arguments may overlap with sibling call
3674 outgoing arguments and we cannot allow reordering of reads
3675 from function arguments with stores to outgoing arguments
3676 of sibling calls. */
3677 set_mem_alias_set (dest, 0);
3680 emit_move_insn (dest, x);
3684 /* If part should go in registers, copy that part
3685 into the appropriate registers. Do this now, at the end,
3686 since mem-to-mem copies above may do function calls. */
3687 if (partial > 0 && reg != 0)
3689 /* Handle calls that pass values in multiple non-contiguous locations.
3690 The Irix 6 ABI has examples of this. */
3691 if (GET_CODE (reg) == PARALLEL)
3692 emit_group_load (reg, x, type, -1);
3693 else
3694 move_block_to_reg (REGNO (reg), x, partial, mode);
3697 if (extra && args_addr == 0 && where_pad == stack_direction)
3698 anti_adjust_stack (GEN_INT (extra));
3700 if (alignment_pad && args_addr == 0)
3701 anti_adjust_stack (alignment_pad);
3704 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3705 operations. */
3707 static rtx
3708 get_subtarget (rtx x)
3710 return ((x == 0
3711 /* Only registers can be subtargets. */
3712 || GET_CODE (x) != REG
3713 /* If the register is readonly, it can't be set more than once. */
3714 || RTX_UNCHANGING_P (x)
3715 /* Don't use hard regs to avoid extending their life. */
3716 || REGNO (x) < FIRST_PSEUDO_REGISTER
3717 /* Avoid subtargets inside loops,
3718 since they hide some invariant expressions. */
3719 || preserve_subexpressions_p ())
3720 ? 0 : x);
3723 /* Expand an assignment that stores the value of FROM into TO.
3724 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3725 (This may contain a QUEUED rtx;
3726 if the value is constant, this rtx is a constant.)
3727 Otherwise, the returned value is NULL_RTX. */
3730 expand_assignment (tree to, tree from, int want_value)
3732 rtx to_rtx = 0;
3733 rtx result;
3735 /* Don't crash if the lhs of the assignment was erroneous. */
3737 if (TREE_CODE (to) == ERROR_MARK)
3739 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3740 return want_value ? result : NULL_RTX;
3743 /* Assignment of a structure component needs special treatment
3744 if the structure component's rtx is not simply a MEM.
3745 Assignment of an array element at a constant index, and assignment of
3746 an array element in an unaligned packed structure field, has the same
3747 problem. */
3749 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3750 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF
3751 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
3753 enum machine_mode mode1;
3754 HOST_WIDE_INT bitsize, bitpos;
3755 rtx orig_to_rtx;
3756 tree offset;
3757 int unsignedp;
3758 int volatilep = 0;
3759 tree tem;
3761 push_temp_slots ();
3762 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3763 &unsignedp, &volatilep);
3765 /* If we are going to use store_bit_field and extract_bit_field,
3766 make sure to_rtx will be safe for multiple use. */
3768 if (mode1 == VOIDmode && want_value)
3769 tem = stabilize_reference (tem);
3771 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3773 if (offset != 0)
3775 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
3777 if (GET_CODE (to_rtx) != MEM)
3778 abort ();
3780 #ifdef POINTERS_EXTEND_UNSIGNED
3781 if (GET_MODE (offset_rtx) != Pmode)
3782 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
3783 #else
3784 if (GET_MODE (offset_rtx) != ptr_mode)
3785 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3786 #endif
3788 /* A constant address in TO_RTX can have VOIDmode, we must not try
3789 to call force_reg for that case. Avoid that case. */
3790 if (GET_CODE (to_rtx) == MEM
3791 && GET_MODE (to_rtx) == BLKmode
3792 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3793 && bitsize > 0
3794 && (bitpos % bitsize) == 0
3795 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3796 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3798 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3799 bitpos = 0;
3802 to_rtx = offset_address (to_rtx, offset_rtx,
3803 highest_pow2_factor_for_target (to,
3804 offset));
3807 if (GET_CODE (to_rtx) == MEM)
3809 /* If the field is at offset zero, we could have been given the
3810 DECL_RTX of the parent struct. Don't munge it. */
3811 to_rtx = shallow_copy_rtx (to_rtx);
3813 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
3816 /* Deal with volatile and readonly fields. The former is only done
3817 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3818 if (volatilep && GET_CODE (to_rtx) == MEM)
3820 if (to_rtx == orig_to_rtx)
3821 to_rtx = copy_rtx (to_rtx);
3822 MEM_VOLATILE_P (to_rtx) = 1;
3825 if (TREE_CODE (to) == COMPONENT_REF
3826 && TREE_READONLY (TREE_OPERAND (to, 1))
3827 /* We can't assert that a MEM won't be set more than once
3828 if the component is not addressable because another
3829 non-addressable component may be referenced by the same MEM. */
3830 && ! (GET_CODE (to_rtx) == MEM && ! can_address_p (to)))
3832 if (to_rtx == orig_to_rtx)
3833 to_rtx = copy_rtx (to_rtx);
3834 RTX_UNCHANGING_P (to_rtx) = 1;
3837 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
3839 if (to_rtx == orig_to_rtx)
3840 to_rtx = copy_rtx (to_rtx);
3841 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3844 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3845 (want_value
3846 /* Spurious cast for HPUX compiler. */
3847 ? ((enum machine_mode)
3848 TYPE_MODE (TREE_TYPE (to)))
3849 : VOIDmode),
3850 unsignedp, TREE_TYPE (tem), get_alias_set (to));
3852 preserve_temp_slots (result);
3853 free_temp_slots ();
3854 pop_temp_slots ();
3856 /* If the value is meaningful, convert RESULT to the proper mode.
3857 Otherwise, return nothing. */
3858 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3859 TYPE_MODE (TREE_TYPE (from)),
3860 result,
3861 TREE_UNSIGNED (TREE_TYPE (to)))
3862 : NULL_RTX);
3865 /* If the rhs is a function call and its value is not an aggregate,
3866 call the function before we start to compute the lhs.
3867 This is needed for correct code for cases such as
3868 val = setjmp (buf) on machines where reference to val
3869 requires loading up part of an address in a separate insn.
3871 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3872 since it might be a promoted variable where the zero- or sign- extension
3873 needs to be done. Handling this in the normal way is safe because no
3874 computation is done before the call. */
3875 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
3876 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3877 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3878 && GET_CODE (DECL_RTL (to)) == REG))
3880 rtx value;
3882 push_temp_slots ();
3883 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3884 if (to_rtx == 0)
3885 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3887 /* Handle calls that return values in multiple non-contiguous locations.
3888 The Irix 6 ABI has examples of this. */
3889 if (GET_CODE (to_rtx) == PARALLEL)
3890 emit_group_load (to_rtx, value, TREE_TYPE (from),
3891 int_size_in_bytes (TREE_TYPE (from)));
3892 else if (GET_MODE (to_rtx) == BLKmode)
3893 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
3894 else
3896 if (POINTER_TYPE_P (TREE_TYPE (to)))
3897 value = convert_memory_address (GET_MODE (to_rtx), value);
3898 emit_move_insn (to_rtx, value);
3900 preserve_temp_slots (to_rtx);
3901 free_temp_slots ();
3902 pop_temp_slots ();
3903 return want_value ? to_rtx : NULL_RTX;
3906 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3907 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3909 if (to_rtx == 0)
3910 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3912 /* Don't move directly into a return register. */
3913 if (TREE_CODE (to) == RESULT_DECL
3914 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3916 rtx temp;
3918 push_temp_slots ();
3919 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3921 if (GET_CODE (to_rtx) == PARALLEL)
3922 emit_group_load (to_rtx, temp, TREE_TYPE (from),
3923 int_size_in_bytes (TREE_TYPE (from)));
3924 else
3925 emit_move_insn (to_rtx, temp);
3927 preserve_temp_slots (to_rtx);
3928 free_temp_slots ();
3929 pop_temp_slots ();
3930 return want_value ? to_rtx : NULL_RTX;
3933 /* In case we are returning the contents of an object which overlaps
3934 the place the value is being stored, use a safe function when copying
3935 a value through a pointer into a structure value return block. */
3936 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3937 && current_function_returns_struct
3938 && !current_function_returns_pcc_struct)
3940 rtx from_rtx, size;
3942 push_temp_slots ();
3943 size = expr_size (from);
3944 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
3946 if (TARGET_MEM_FUNCTIONS)
3947 emit_library_call (memmove_libfunc, LCT_NORMAL,
3948 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3949 XEXP (from_rtx, 0), Pmode,
3950 convert_to_mode (TYPE_MODE (sizetype),
3951 size, TREE_UNSIGNED (sizetype)),
3952 TYPE_MODE (sizetype));
3953 else
3954 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3955 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3956 XEXP (to_rtx, 0), Pmode,
3957 convert_to_mode (TYPE_MODE (integer_type_node),
3958 size,
3959 TREE_UNSIGNED (integer_type_node)),
3960 TYPE_MODE (integer_type_node));
3962 preserve_temp_slots (to_rtx);
3963 free_temp_slots ();
3964 pop_temp_slots ();
3965 return want_value ? to_rtx : NULL_RTX;
3968 /* Compute FROM and store the value in the rtx we got. */
3970 push_temp_slots ();
3971 result = store_expr (from, to_rtx, want_value);
3972 preserve_temp_slots (result);
3973 free_temp_slots ();
3974 pop_temp_slots ();
3975 return want_value ? result : NULL_RTX;
3978 /* Generate code for computing expression EXP,
3979 and storing the value into TARGET.
3980 TARGET may contain a QUEUED rtx.
3982 If WANT_VALUE & 1 is nonzero, return a copy of the value
3983 not in TARGET, so that we can be sure to use the proper
3984 value in a containing expression even if TARGET has something
3985 else stored in it. If possible, we copy the value through a pseudo
3986 and return that pseudo. Or, if the value is constant, we try to
3987 return the constant. In some cases, we return a pseudo
3988 copied *from* TARGET.
3990 If the mode is BLKmode then we may return TARGET itself.
3991 It turns out that in BLKmode it doesn't cause a problem.
3992 because C has no operators that could combine two different
3993 assignments into the same BLKmode object with different values
3994 with no sequence point. Will other languages need this to
3995 be more thorough?
3997 If WANT_VALUE & 1 is 0, we return NULL, to make sure
3998 to catch quickly any cases where the caller uses the value
3999 and fails to set WANT_VALUE.
4001 If WANT_VALUE & 2 is set, this is a store into a call param on the
4002 stack, and block moves may need to be treated specially. */
4005 store_expr (tree exp, rtx target, int want_value)
4007 rtx temp;
4008 rtx alt_rtl = NULL_RTX;
4009 int dont_return_target = 0;
4010 int dont_store_target = 0;
4012 if (VOID_TYPE_P (TREE_TYPE (exp)))
4014 /* C++ can generate ?: expressions with a throw expression in one
4015 branch and an rvalue in the other. Here, we resolve attempts to
4016 store the throw expression's nonexistent result. */
4017 if (want_value)
4018 abort ();
4019 expand_expr (exp, const0_rtx, VOIDmode, 0);
4020 return NULL_RTX;
4022 if (TREE_CODE (exp) == COMPOUND_EXPR)
4024 /* Perform first part of compound expression, then assign from second
4025 part. */
4026 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4027 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4028 emit_queue ();
4029 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4031 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4033 /* For conditional expression, get safe form of the target. Then
4034 test the condition, doing the appropriate assignment on either
4035 side. This avoids the creation of unnecessary temporaries.
4036 For non-BLKmode, it is more efficient not to do this. */
4038 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4040 emit_queue ();
4041 target = protect_from_queue (target, 1);
4043 do_pending_stack_adjust ();
4044 NO_DEFER_POP;
4045 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4046 start_cleanup_deferral ();
4047 store_expr (TREE_OPERAND (exp, 1), target, want_value & 2);
4048 end_cleanup_deferral ();
4049 emit_queue ();
4050 emit_jump_insn (gen_jump (lab2));
4051 emit_barrier ();
4052 emit_label (lab1);
4053 start_cleanup_deferral ();
4054 store_expr (TREE_OPERAND (exp, 2), target, want_value & 2);
4055 end_cleanup_deferral ();
4056 emit_queue ();
4057 emit_label (lab2);
4058 OK_DEFER_POP;
4060 return want_value & 1 ? target : NULL_RTX;
4062 else if (queued_subexp_p (target))
4063 /* If target contains a postincrement, let's not risk
4064 using it as the place to generate the rhs. */
4066 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4068 /* Expand EXP into a new pseudo. */
4069 temp = gen_reg_rtx (GET_MODE (target));
4070 temp = expand_expr (exp, temp, GET_MODE (target),
4071 (want_value & 2
4072 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4074 else
4075 temp = expand_expr (exp, NULL_RTX, GET_MODE (target),
4076 (want_value & 2
4077 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4079 /* If target is volatile, ANSI requires accessing the value
4080 *from* the target, if it is accessed. So make that happen.
4081 In no case return the target itself. */
4082 if (! MEM_VOLATILE_P (target) && (want_value & 1) != 0)
4083 dont_return_target = 1;
4085 else if ((want_value & 1) != 0
4086 && GET_CODE (target) == MEM
4087 && ! MEM_VOLATILE_P (target)
4088 && GET_MODE (target) != BLKmode)
4089 /* If target is in memory and caller wants value in a register instead,
4090 arrange that. Pass TARGET as target for expand_expr so that,
4091 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4092 We know expand_expr will not use the target in that case.
4093 Don't do this if TARGET is volatile because we are supposed
4094 to write it and then read it. */
4096 temp = expand_expr (exp, target, GET_MODE (target),
4097 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4098 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4100 /* If TEMP is already in the desired TARGET, only copy it from
4101 memory and don't store it there again. */
4102 if (temp == target
4103 || (rtx_equal_p (temp, target)
4104 && ! side_effects_p (temp) && ! side_effects_p (target)))
4105 dont_store_target = 1;
4106 temp = copy_to_reg (temp);
4108 dont_return_target = 1;
4110 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4111 /* If this is a scalar in a register that is stored in a wider mode
4112 than the declared mode, compute the result into its declared mode
4113 and then convert to the wider mode. Our value is the computed
4114 expression. */
4116 rtx inner_target = 0;
4118 /* If we don't want a value, we can do the conversion inside EXP,
4119 which will often result in some optimizations. Do the conversion
4120 in two steps: first change the signedness, if needed, then
4121 the extend. But don't do this if the type of EXP is a subtype
4122 of something else since then the conversion might involve
4123 more than just converting modes. */
4124 if ((want_value & 1) == 0
4125 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4126 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4128 if (TREE_UNSIGNED (TREE_TYPE (exp))
4129 != SUBREG_PROMOTED_UNSIGNED_P (target))
4130 exp = convert
4131 ((*lang_hooks.types.signed_or_unsigned_type)
4132 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4134 exp = convert ((*lang_hooks.types.type_for_mode)
4135 (GET_MODE (SUBREG_REG (target)),
4136 SUBREG_PROMOTED_UNSIGNED_P (target)),
4137 exp);
4139 inner_target = SUBREG_REG (target);
4142 temp = expand_expr (exp, inner_target, VOIDmode,
4143 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4145 /* If TEMP is a MEM and we want a result value, make the access
4146 now so it gets done only once. Strictly speaking, this is
4147 only necessary if the MEM is volatile, or if the address
4148 overlaps TARGET. But not performing the load twice also
4149 reduces the amount of rtl we generate and then have to CSE. */
4150 if (GET_CODE (temp) == MEM && (want_value & 1) != 0)
4151 temp = copy_to_reg (temp);
4153 /* If TEMP is a VOIDmode constant, use convert_modes to make
4154 sure that we properly convert it. */
4155 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4157 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4158 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4159 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4160 GET_MODE (target), temp,
4161 SUBREG_PROMOTED_UNSIGNED_P (target));
4164 convert_move (SUBREG_REG (target), temp,
4165 SUBREG_PROMOTED_UNSIGNED_P (target));
4167 /* If we promoted a constant, change the mode back down to match
4168 target. Otherwise, the caller might get confused by a result whose
4169 mode is larger than expected. */
4171 if ((want_value & 1) != 0 && GET_MODE (temp) != GET_MODE (target))
4173 if (GET_MODE (temp) != VOIDmode)
4175 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4176 SUBREG_PROMOTED_VAR_P (temp) = 1;
4177 SUBREG_PROMOTED_UNSIGNED_SET (temp,
4178 SUBREG_PROMOTED_UNSIGNED_P (target));
4180 else
4181 temp = convert_modes (GET_MODE (target),
4182 GET_MODE (SUBREG_REG (target)),
4183 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4186 return want_value & 1 ? temp : NULL_RTX;
4188 else
4190 temp = expand_expr_real (exp, target, GET_MODE (target),
4191 (want_value & 2
4192 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4193 &alt_rtl);
4194 /* Return TARGET if it's a specified hardware register.
4195 If TARGET is a volatile mem ref, either return TARGET
4196 or return a reg copied *from* TARGET; ANSI requires this.
4198 Otherwise, if TEMP is not TARGET, return TEMP
4199 if it is constant (for efficiency),
4200 or if we really want the correct value. */
4201 if (!(target && GET_CODE (target) == REG
4202 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4203 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4204 && ! rtx_equal_p (temp, target)
4205 && (CONSTANT_P (temp) || (want_value & 1) != 0))
4206 dont_return_target = 1;
4209 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4210 the same as that of TARGET, adjust the constant. This is needed, for
4211 example, in case it is a CONST_DOUBLE and we want only a word-sized
4212 value. */
4213 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4214 && TREE_CODE (exp) != ERROR_MARK
4215 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4216 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4217 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4219 /* If value was not generated in the target, store it there.
4220 Convert the value to TARGET's type first if necessary.
4221 If TEMP and TARGET compare equal according to rtx_equal_p, but
4222 one or both of them are volatile memory refs, we have to distinguish
4223 two cases:
4224 - expand_expr has used TARGET. In this case, we must not generate
4225 another copy. This can be detected by TARGET being equal according
4226 to == .
4227 - expand_expr has not used TARGET - that means that the source just
4228 happens to have the same RTX form. Since temp will have been created
4229 by expand_expr, it will compare unequal according to == .
4230 We must generate a copy in this case, to reach the correct number
4231 of volatile memory references. */
4233 if ((! rtx_equal_p (temp, target)
4234 || (temp != target && (side_effects_p (temp)
4235 || side_effects_p (target))))
4236 && TREE_CODE (exp) != ERROR_MARK
4237 && ! dont_store_target
4238 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4239 but TARGET is not valid memory reference, TEMP will differ
4240 from TARGET although it is really the same location. */
4241 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4242 /* If there's nothing to copy, don't bother. Don't call expr_size
4243 unless necessary, because some front-ends (C++) expr_size-hook
4244 aborts on objects that are not supposed to be bit-copied or
4245 bit-initialized. */
4246 && expr_size (exp) != const0_rtx)
4248 target = protect_from_queue (target, 1);
4249 if (GET_MODE (temp) != GET_MODE (target)
4250 && GET_MODE (temp) != VOIDmode)
4252 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4253 if (dont_return_target)
4255 /* In this case, we will return TEMP,
4256 so make sure it has the proper mode.
4257 But don't forget to store the value into TARGET. */
4258 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4259 emit_move_insn (target, temp);
4261 else
4262 convert_move (target, temp, unsignedp);
4265 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4267 /* Handle copying a string constant into an array. The string
4268 constant may be shorter than the array. So copy just the string's
4269 actual length, and clear the rest. First get the size of the data
4270 type of the string, which is actually the size of the target. */
4271 rtx size = expr_size (exp);
4273 if (GET_CODE (size) == CONST_INT
4274 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4275 emit_block_move (target, temp, size,
4276 (want_value & 2
4277 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4278 else
4280 /* Compute the size of the data to copy from the string. */
4281 tree copy_size
4282 = size_binop (MIN_EXPR,
4283 make_tree (sizetype, size),
4284 size_int (TREE_STRING_LENGTH (exp)));
4285 rtx copy_size_rtx
4286 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4287 (want_value & 2
4288 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4289 rtx label = 0;
4291 /* Copy that much. */
4292 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4293 TREE_UNSIGNED (sizetype));
4294 emit_block_move (target, temp, copy_size_rtx,
4295 (want_value & 2
4296 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4298 /* Figure out how much is left in TARGET that we have to clear.
4299 Do all calculations in ptr_mode. */
4300 if (GET_CODE (copy_size_rtx) == CONST_INT)
4302 size = plus_constant (size, -INTVAL (copy_size_rtx));
4303 target = adjust_address (target, BLKmode,
4304 INTVAL (copy_size_rtx));
4306 else
4308 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4309 copy_size_rtx, NULL_RTX, 0,
4310 OPTAB_LIB_WIDEN);
4312 #ifdef POINTERS_EXTEND_UNSIGNED
4313 if (GET_MODE (copy_size_rtx) != Pmode)
4314 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4315 TREE_UNSIGNED (sizetype));
4316 #endif
4318 target = offset_address (target, copy_size_rtx,
4319 highest_pow2_factor (copy_size));
4320 label = gen_label_rtx ();
4321 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4322 GET_MODE (size), 0, label);
4325 if (size != const0_rtx)
4326 clear_storage (target, size);
4328 if (label)
4329 emit_label (label);
4332 /* Handle calls that return values in multiple non-contiguous locations.
4333 The Irix 6 ABI has examples of this. */
4334 else if (GET_CODE (target) == PARALLEL)
4335 emit_group_load (target, temp, TREE_TYPE (exp),
4336 int_size_in_bytes (TREE_TYPE (exp)));
4337 else if (GET_MODE (temp) == BLKmode)
4338 emit_block_move (target, temp, expr_size (exp),
4339 (want_value & 2
4340 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4341 else
4343 temp = force_operand (temp, target);
4344 if (temp != target)
4345 emit_move_insn (target, temp);
4349 /* If we don't want a value, return NULL_RTX. */
4350 if ((want_value & 1) == 0)
4351 return NULL_RTX;
4353 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4354 ??? The latter test doesn't seem to make sense. */
4355 else if (dont_return_target && GET_CODE (temp) != MEM)
4356 return temp;
4358 /* Return TARGET itself if it is a hard register. */
4359 else if ((want_value & 1) != 0
4360 && GET_MODE (target) != BLKmode
4361 && ! (GET_CODE (target) == REG
4362 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4363 return copy_to_reg (target);
4365 else
4366 return target;
4369 /* Return 1 if EXP just contains zeros. FIXME merge with initializer_zerop. */
4371 static int
4372 is_zeros_p (tree exp)
4374 tree elt;
4376 switch (TREE_CODE (exp))
4378 case CONVERT_EXPR:
4379 case NOP_EXPR:
4380 case NON_LVALUE_EXPR:
4381 case VIEW_CONVERT_EXPR:
4382 return is_zeros_p (TREE_OPERAND (exp, 0));
4384 case INTEGER_CST:
4385 return integer_zerop (exp);
4387 case COMPLEX_CST:
4388 return
4389 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4391 case REAL_CST:
4392 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4394 case VECTOR_CST:
4395 for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4396 elt = TREE_CHAIN (elt))
4397 if (!is_zeros_p (TREE_VALUE (elt)))
4398 return 0;
4400 return 1;
4402 case CONSTRUCTOR:
4403 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4404 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4405 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4406 if (! is_zeros_p (TREE_VALUE (elt)))
4407 return 0;
4409 return 1;
4411 default:
4412 return 0;
4416 /* Return 1 if EXP contains mostly (3/4) zeros. */
4419 mostly_zeros_p (tree exp)
4421 if (TREE_CODE (exp) == CONSTRUCTOR)
4423 int elts = 0, zeros = 0;
4424 tree elt = CONSTRUCTOR_ELTS (exp);
4425 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4427 /* If there are no ranges of true bits, it is all zero. */
4428 return elt == NULL_TREE;
4430 for (; elt; elt = TREE_CHAIN (elt))
4432 /* We do not handle the case where the index is a RANGE_EXPR,
4433 so the statistic will be somewhat inaccurate.
4434 We do make a more accurate count in store_constructor itself,
4435 so since this function is only used for nested array elements,
4436 this should be close enough. */
4437 if (mostly_zeros_p (TREE_VALUE (elt)))
4438 zeros++;
4439 elts++;
4442 return 4 * zeros >= 3 * elts;
4445 return is_zeros_p (exp);
4448 /* Helper function for store_constructor.
4449 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4450 TYPE is the type of the CONSTRUCTOR, not the element type.
4451 CLEARED is as for store_constructor.
4452 ALIAS_SET is the alias set to use for any stores.
4454 This provides a recursive shortcut back to store_constructor when it isn't
4455 necessary to go through store_field. This is so that we can pass through
4456 the cleared field to let store_constructor know that we may not have to
4457 clear a substructure if the outer structure has already been cleared. */
4459 static void
4460 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4461 HOST_WIDE_INT bitpos, enum machine_mode mode,
4462 tree exp, tree type, int cleared, int alias_set)
4464 if (TREE_CODE (exp) == CONSTRUCTOR
4465 && bitpos % BITS_PER_UNIT == 0
4466 /* If we have a nonzero bitpos for a register target, then we just
4467 let store_field do the bitfield handling. This is unlikely to
4468 generate unnecessary clear instructions anyways. */
4469 && (bitpos == 0 || GET_CODE (target) == MEM))
4471 if (GET_CODE (target) == MEM)
4472 target
4473 = adjust_address (target,
4474 GET_MODE (target) == BLKmode
4475 || 0 != (bitpos
4476 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4477 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4480 /* Update the alias set, if required. */
4481 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4482 && MEM_ALIAS_SET (target) != 0)
4484 target = copy_rtx (target);
4485 set_mem_alias_set (target, alias_set);
4488 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4490 else
4491 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4492 alias_set);
4495 /* Store the value of constructor EXP into the rtx TARGET.
4496 TARGET is either a REG or a MEM; we know it cannot conflict, since
4497 safe_from_p has been called.
4498 CLEARED is true if TARGET is known to have been zero'd.
4499 SIZE is the number of bytes of TARGET we are allowed to modify: this
4500 may not be the same as the size of EXP if we are assigning to a field
4501 which has been packed to exclude padding bits. */
4503 static void
4504 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4506 tree type = TREE_TYPE (exp);
4507 #ifdef WORD_REGISTER_OPERATIONS
4508 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4509 #endif
4511 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4512 || TREE_CODE (type) == QUAL_UNION_TYPE)
4514 tree elt;
4516 /* If size is zero or the target is already cleared, do nothing. */
4517 if (size == 0 || cleared)
4518 cleared = 1;
4519 /* We either clear the aggregate or indicate the value is dead. */
4520 else if ((TREE_CODE (type) == UNION_TYPE
4521 || TREE_CODE (type) == QUAL_UNION_TYPE)
4522 && ! CONSTRUCTOR_ELTS (exp))
4523 /* If the constructor is empty, clear the union. */
4525 clear_storage (target, expr_size (exp));
4526 cleared = 1;
4529 /* If we are building a static constructor into a register,
4530 set the initial value as zero so we can fold the value into
4531 a constant. But if more than one register is involved,
4532 this probably loses. */
4533 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4534 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4536 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4537 cleared = 1;
4540 /* If the constructor has fewer fields than the structure
4541 or if we are initializing the structure to mostly zeros,
4542 clear the whole structure first. Don't do this if TARGET is a
4543 register whose mode size isn't equal to SIZE since clear_storage
4544 can't handle this case. */
4545 else if (((list_length (CONSTRUCTOR_ELTS (exp)) != fields_length (type))
4546 || mostly_zeros_p (exp))
4547 && (GET_CODE (target) != REG
4548 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4549 == size)))
4551 rtx xtarget = target;
4553 if (readonly_fields_p (type))
4555 xtarget = copy_rtx (xtarget);
4556 RTX_UNCHANGING_P (xtarget) = 1;
4559 clear_storage (xtarget, GEN_INT (size));
4560 cleared = 1;
4563 if (! cleared)
4564 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4566 /* Store each element of the constructor into
4567 the corresponding field of TARGET. */
4569 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4571 tree field = TREE_PURPOSE (elt);
4572 tree value = TREE_VALUE (elt);
4573 enum machine_mode mode;
4574 HOST_WIDE_INT bitsize;
4575 HOST_WIDE_INT bitpos = 0;
4576 tree offset;
4577 rtx to_rtx = target;
4579 /* Just ignore missing fields.
4580 We cleared the whole structure, above,
4581 if any fields are missing. */
4582 if (field == 0)
4583 continue;
4585 if (cleared && is_zeros_p (value))
4586 continue;
4588 if (host_integerp (DECL_SIZE (field), 1))
4589 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4590 else
4591 bitsize = -1;
4593 mode = DECL_MODE (field);
4594 if (DECL_BIT_FIELD (field))
4595 mode = VOIDmode;
4597 offset = DECL_FIELD_OFFSET (field);
4598 if (host_integerp (offset, 0)
4599 && host_integerp (bit_position (field), 0))
4601 bitpos = int_bit_position (field);
4602 offset = 0;
4604 else
4605 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4607 if (offset)
4609 rtx offset_rtx;
4611 if (CONTAINS_PLACEHOLDER_P (offset))
4612 offset = build (WITH_RECORD_EXPR, sizetype,
4613 offset, make_tree (TREE_TYPE (exp), target));
4615 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4616 if (GET_CODE (to_rtx) != MEM)
4617 abort ();
4619 #ifdef POINTERS_EXTEND_UNSIGNED
4620 if (GET_MODE (offset_rtx) != Pmode)
4621 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4622 #else
4623 if (GET_MODE (offset_rtx) != ptr_mode)
4624 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4625 #endif
4627 to_rtx = offset_address (to_rtx, offset_rtx,
4628 highest_pow2_factor (offset));
4631 if (TREE_READONLY (field))
4633 if (GET_CODE (to_rtx) == MEM)
4634 to_rtx = copy_rtx (to_rtx);
4636 RTX_UNCHANGING_P (to_rtx) = 1;
4639 #ifdef WORD_REGISTER_OPERATIONS
4640 /* If this initializes a field that is smaller than a word, at the
4641 start of a word, try to widen it to a full word.
4642 This special case allows us to output C++ member function
4643 initializations in a form that the optimizers can understand. */
4644 if (GET_CODE (target) == REG
4645 && bitsize < BITS_PER_WORD
4646 && bitpos % BITS_PER_WORD == 0
4647 && GET_MODE_CLASS (mode) == MODE_INT
4648 && TREE_CODE (value) == INTEGER_CST
4649 && exp_size >= 0
4650 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4652 tree type = TREE_TYPE (value);
4654 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4656 type = (*lang_hooks.types.type_for_size)
4657 (BITS_PER_WORD, TREE_UNSIGNED (type));
4658 value = convert (type, value);
4661 if (BYTES_BIG_ENDIAN)
4662 value
4663 = fold (build (LSHIFT_EXPR, type, value,
4664 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4665 bitsize = BITS_PER_WORD;
4666 mode = word_mode;
4668 #endif
4670 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4671 && DECL_NONADDRESSABLE_P (field))
4673 to_rtx = copy_rtx (to_rtx);
4674 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4677 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4678 value, type, cleared,
4679 get_alias_set (TREE_TYPE (field)));
4682 else if (TREE_CODE (type) == ARRAY_TYPE
4683 || TREE_CODE (type) == VECTOR_TYPE)
4685 tree elt;
4686 int i;
4687 int need_to_clear;
4688 tree domain = TYPE_DOMAIN (type);
4689 tree elttype = TREE_TYPE (type);
4690 int const_bounds_p;
4691 HOST_WIDE_INT minelt = 0;
4692 HOST_WIDE_INT maxelt = 0;
4693 int icode = 0;
4694 rtx *vector = NULL;
4695 int elt_size = 0;
4696 unsigned n_elts = 0;
4698 /* Vectors are like arrays, but the domain is stored via an array
4699 type indirectly. */
4700 if (TREE_CODE (type) == VECTOR_TYPE)
4702 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
4703 the same field as TYPE_DOMAIN, we are not guaranteed that
4704 it always will. */
4705 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
4706 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
4707 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
4709 enum machine_mode mode = GET_MODE (target);
4711 icode = (int) vec_init_optab->handlers[mode].insn_code;
4712 if (icode != CODE_FOR_nothing)
4714 unsigned int i;
4716 elt_size = GET_MODE_SIZE (GET_MODE_INNER (mode));
4717 n_elts = (GET_MODE_SIZE (mode) / elt_size);
4718 vector = alloca (n_elts);
4719 for (i = 0; i < n_elts; i++)
4720 vector [i] = CONST0_RTX (GET_MODE_INNER (mode));
4725 const_bounds_p = (TYPE_MIN_VALUE (domain)
4726 && TYPE_MAX_VALUE (domain)
4727 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4728 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4730 /* If we have constant bounds for the range of the type, get them. */
4731 if (const_bounds_p)
4733 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4734 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4737 /* If the constructor has fewer elements than the array,
4738 clear the whole array first. Similarly if this is
4739 static constructor of a non-BLKmode object. */
4740 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4741 need_to_clear = 1;
4742 else
4744 HOST_WIDE_INT count = 0, zero_count = 0;
4745 need_to_clear = ! const_bounds_p;
4747 /* This loop is a more accurate version of the loop in
4748 mostly_zeros_p (it handles RANGE_EXPR in an index).
4749 It is also needed to check for missing elements. */
4750 for (elt = CONSTRUCTOR_ELTS (exp);
4751 elt != NULL_TREE && ! need_to_clear;
4752 elt = TREE_CHAIN (elt))
4754 tree index = TREE_PURPOSE (elt);
4755 HOST_WIDE_INT this_node_count;
4757 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4759 tree lo_index = TREE_OPERAND (index, 0);
4760 tree hi_index = TREE_OPERAND (index, 1);
4762 if (! host_integerp (lo_index, 1)
4763 || ! host_integerp (hi_index, 1))
4765 need_to_clear = 1;
4766 break;
4769 this_node_count = (tree_low_cst (hi_index, 1)
4770 - tree_low_cst (lo_index, 1) + 1);
4772 else
4773 this_node_count = 1;
4775 count += this_node_count;
4776 if (mostly_zeros_p (TREE_VALUE (elt)))
4777 zero_count += this_node_count;
4780 /* Clear the entire array first if there are any missing elements,
4781 or if the incidence of zero elements is >= 75%. */
4782 if (! need_to_clear
4783 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4784 need_to_clear = 1;
4787 if (need_to_clear && size > 0 && !vector)
4789 if (! cleared)
4791 if (REG_P (target))
4792 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4793 else
4794 clear_storage (target, GEN_INT (size));
4796 cleared = 1;
4798 else if (REG_P (target))
4799 /* Inform later passes that the old value is dead. */
4800 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4802 /* Store each element of the constructor into
4803 the corresponding element of TARGET, determined
4804 by counting the elements. */
4805 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4806 elt;
4807 elt = TREE_CHAIN (elt), i++)
4809 enum machine_mode mode;
4810 HOST_WIDE_INT bitsize;
4811 HOST_WIDE_INT bitpos;
4812 int unsignedp;
4813 tree value = TREE_VALUE (elt);
4814 tree index = TREE_PURPOSE (elt);
4815 rtx xtarget = target;
4817 if (cleared && is_zeros_p (value))
4818 continue;
4820 unsignedp = TREE_UNSIGNED (elttype);
4821 mode = TYPE_MODE (elttype);
4822 if (mode == BLKmode)
4823 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4824 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4825 : -1);
4826 else
4827 bitsize = GET_MODE_BITSIZE (mode);
4829 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4831 tree lo_index = TREE_OPERAND (index, 0);
4832 tree hi_index = TREE_OPERAND (index, 1);
4833 rtx index_r, pos_rtx, loop_end;
4834 struct nesting *loop;
4835 HOST_WIDE_INT lo, hi, count;
4836 tree position;
4838 if (vector)
4839 abort ();
4841 /* If the range is constant and "small", unroll the loop. */
4842 if (const_bounds_p
4843 && host_integerp (lo_index, 0)
4844 && host_integerp (hi_index, 0)
4845 && (lo = tree_low_cst (lo_index, 0),
4846 hi = tree_low_cst (hi_index, 0),
4847 count = hi - lo + 1,
4848 (GET_CODE (target) != MEM
4849 || count <= 2
4850 || (host_integerp (TYPE_SIZE (elttype), 1)
4851 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4852 <= 40 * 8)))))
4854 lo -= minelt; hi -= minelt;
4855 for (; lo <= hi; lo++)
4857 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4859 if (GET_CODE (target) == MEM
4860 && !MEM_KEEP_ALIAS_SET_P (target)
4861 && TREE_CODE (type) == ARRAY_TYPE
4862 && TYPE_NONALIASED_COMPONENT (type))
4864 target = copy_rtx (target);
4865 MEM_KEEP_ALIAS_SET_P (target) = 1;
4868 store_constructor_field
4869 (target, bitsize, bitpos, mode, value, type, cleared,
4870 get_alias_set (elttype));
4873 else
4875 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4876 loop_end = gen_label_rtx ();
4878 unsignedp = TREE_UNSIGNED (domain);
4880 index = build_decl (VAR_DECL, NULL_TREE, domain);
4882 index_r
4883 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4884 &unsignedp, 0));
4885 SET_DECL_RTL (index, index_r);
4886 if (TREE_CODE (value) == SAVE_EXPR
4887 && SAVE_EXPR_RTL (value) == 0)
4889 /* Make sure value gets expanded once before the
4890 loop. */
4891 expand_expr (value, const0_rtx, VOIDmode, 0);
4892 emit_queue ();
4894 store_expr (lo_index, index_r, 0);
4895 loop = expand_start_loop (0);
4897 /* Assign value to element index. */
4898 position
4899 = convert (ssizetype,
4900 fold (build (MINUS_EXPR, TREE_TYPE (index),
4901 index, TYPE_MIN_VALUE (domain))));
4902 position = size_binop (MULT_EXPR, position,
4903 convert (ssizetype,
4904 TYPE_SIZE_UNIT (elttype)));
4906 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4907 xtarget = offset_address (target, pos_rtx,
4908 highest_pow2_factor (position));
4909 xtarget = adjust_address (xtarget, mode, 0);
4910 if (TREE_CODE (value) == CONSTRUCTOR)
4911 store_constructor (value, xtarget, cleared,
4912 bitsize / BITS_PER_UNIT);
4913 else
4914 store_expr (value, xtarget, 0);
4916 expand_exit_loop_if_false (loop,
4917 build (LT_EXPR, integer_type_node,
4918 index, hi_index));
4920 expand_increment (build (PREINCREMENT_EXPR,
4921 TREE_TYPE (index),
4922 index, integer_one_node), 0, 0);
4923 expand_end_loop ();
4924 emit_label (loop_end);
4927 else if ((index != 0 && ! host_integerp (index, 0))
4928 || ! host_integerp (TYPE_SIZE (elttype), 1))
4930 tree position;
4932 if (vector)
4933 abort ();
4935 if (index == 0)
4936 index = ssize_int (1);
4938 if (minelt)
4939 index = convert (ssizetype,
4940 fold (build (MINUS_EXPR, index,
4941 TYPE_MIN_VALUE (domain))));
4943 position = size_binop (MULT_EXPR, index,
4944 convert (ssizetype,
4945 TYPE_SIZE_UNIT (elttype)));
4946 xtarget = offset_address (target,
4947 expand_expr (position, 0, VOIDmode, 0),
4948 highest_pow2_factor (position));
4949 xtarget = adjust_address (xtarget, mode, 0);
4950 store_expr (value, xtarget, 0);
4952 else if (vector)
4954 int pos;
4956 if (index != 0)
4957 pos = tree_low_cst (index, 0) - minelt;
4958 else
4959 pos = i;
4960 vector[pos] = expand_expr (value, NULL_RTX, VOIDmode, 0);
4962 else
4964 if (index != 0)
4965 bitpos = ((tree_low_cst (index, 0) - minelt)
4966 * tree_low_cst (TYPE_SIZE (elttype), 1));
4967 else
4968 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4970 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
4971 && TREE_CODE (type) == ARRAY_TYPE
4972 && TYPE_NONALIASED_COMPONENT (type))
4974 target = copy_rtx (target);
4975 MEM_KEEP_ALIAS_SET_P (target) = 1;
4977 store_constructor_field (target, bitsize, bitpos, mode, value,
4978 type, cleared, get_alias_set (elttype));
4981 if (vector)
4983 emit_insn (GEN_FCN (icode) (target,
4984 gen_rtx_PARALLEL (GET_MODE (target),
4985 gen_rtvec_v (n_elts, vector))));
4989 /* Set constructor assignments. */
4990 else if (TREE_CODE (type) == SET_TYPE)
4992 tree elt = CONSTRUCTOR_ELTS (exp);
4993 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4994 tree domain = TYPE_DOMAIN (type);
4995 tree domain_min, domain_max, bitlength;
4997 /* The default implementation strategy is to extract the constant
4998 parts of the constructor, use that to initialize the target,
4999 and then "or" in whatever non-constant ranges we need in addition.
5001 If a large set is all zero or all ones, it is
5002 probably better to set it using memset (if available) or bzero.
5003 Also, if a large set has just a single range, it may also be
5004 better to first clear all the first clear the set (using
5005 bzero/memset), and set the bits we want. */
5007 /* Check for all zeros. */
5008 if (elt == NULL_TREE && size > 0)
5010 if (!cleared)
5011 clear_storage (target, GEN_INT (size));
5012 return;
5015 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
5016 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
5017 bitlength = size_binop (PLUS_EXPR,
5018 size_diffop (domain_max, domain_min),
5019 ssize_int (1));
5021 nbits = tree_low_cst (bitlength, 1);
5023 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5024 are "complicated" (more than one range), initialize (the
5025 constant parts) by copying from a constant. */
5026 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
5027 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
5029 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
5030 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
5031 char *bit_buffer = alloca (nbits);
5032 HOST_WIDE_INT word = 0;
5033 unsigned int bit_pos = 0;
5034 unsigned int ibit = 0;
5035 unsigned int offset = 0; /* In bytes from beginning of set. */
5037 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
5038 for (;;)
5040 if (bit_buffer[ibit])
5042 if (BYTES_BIG_ENDIAN)
5043 word |= (1 << (set_word_size - 1 - bit_pos));
5044 else
5045 word |= 1 << bit_pos;
5048 bit_pos++; ibit++;
5049 if (bit_pos >= set_word_size || ibit == nbits)
5051 if (word != 0 || ! cleared)
5053 rtx datum = GEN_INT (word);
5054 rtx to_rtx;
5056 /* The assumption here is that it is safe to use
5057 XEXP if the set is multi-word, but not if
5058 it's single-word. */
5059 if (GET_CODE (target) == MEM)
5060 to_rtx = adjust_address (target, mode, offset);
5061 else if (offset == 0)
5062 to_rtx = target;
5063 else
5064 abort ();
5065 emit_move_insn (to_rtx, datum);
5068 if (ibit == nbits)
5069 break;
5070 word = 0;
5071 bit_pos = 0;
5072 offset += set_word_size / BITS_PER_UNIT;
5076 else if (!cleared)
5077 /* Don't bother clearing storage if the set is all ones. */
5078 if (TREE_CHAIN (elt) != NULL_TREE
5079 || (TREE_PURPOSE (elt) == NULL_TREE
5080 ? nbits != 1
5081 : ( ! host_integerp (TREE_VALUE (elt), 0)
5082 || ! host_integerp (TREE_PURPOSE (elt), 0)
5083 || (tree_low_cst (TREE_VALUE (elt), 0)
5084 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5085 != (HOST_WIDE_INT) nbits))))
5086 clear_storage (target, expr_size (exp));
5088 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5090 /* Start of range of element or NULL. */
5091 tree startbit = TREE_PURPOSE (elt);
5092 /* End of range of element, or element value. */
5093 tree endbit = TREE_VALUE (elt);
5094 HOST_WIDE_INT startb, endb;
5095 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5097 bitlength_rtx = expand_expr (bitlength,
5098 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5100 /* Handle non-range tuple element like [ expr ]. */
5101 if (startbit == NULL_TREE)
5103 startbit = save_expr (endbit);
5104 endbit = startbit;
5107 startbit = convert (sizetype, startbit);
5108 endbit = convert (sizetype, endbit);
5109 if (! integer_zerop (domain_min))
5111 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5112 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5114 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5115 EXPAND_CONST_ADDRESS);
5116 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5117 EXPAND_CONST_ADDRESS);
5119 if (REG_P (target))
5121 targetx
5122 = assign_temp
5123 ((build_qualified_type ((*lang_hooks.types.type_for_mode)
5124 (GET_MODE (target), 0),
5125 TYPE_QUAL_CONST)),
5126 0, 1, 1);
5127 emit_move_insn (targetx, target);
5130 else if (GET_CODE (target) == MEM)
5131 targetx = target;
5132 else
5133 abort ();
5135 /* Optimization: If startbit and endbit are constants divisible
5136 by BITS_PER_UNIT, call memset instead. */
5137 if (TARGET_MEM_FUNCTIONS
5138 && TREE_CODE (startbit) == INTEGER_CST
5139 && TREE_CODE (endbit) == INTEGER_CST
5140 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5141 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5143 emit_library_call (memset_libfunc, LCT_NORMAL,
5144 VOIDmode, 3,
5145 plus_constant (XEXP (targetx, 0),
5146 startb / BITS_PER_UNIT),
5147 Pmode,
5148 constm1_rtx, TYPE_MODE (integer_type_node),
5149 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5150 TYPE_MODE (sizetype));
5152 else
5153 emit_library_call (setbits_libfunc, LCT_NORMAL,
5154 VOIDmode, 4, XEXP (targetx, 0),
5155 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5156 startbit_rtx, TYPE_MODE (sizetype),
5157 endbit_rtx, TYPE_MODE (sizetype));
5159 if (REG_P (target))
5160 emit_move_insn (target, targetx);
5164 else
5165 abort ();
5168 /* Store the value of EXP (an expression tree)
5169 into a subfield of TARGET which has mode MODE and occupies
5170 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5171 If MODE is VOIDmode, it means that we are storing into a bit-field.
5173 If VALUE_MODE is VOIDmode, return nothing in particular.
5174 UNSIGNEDP is not used in this case.
5176 Otherwise, return an rtx for the value stored. This rtx
5177 has mode VALUE_MODE if that is convenient to do.
5178 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5180 TYPE is the type of the underlying object,
5182 ALIAS_SET is the alias set for the destination. This value will
5183 (in general) be different from that for TARGET, since TARGET is a
5184 reference to the containing structure. */
5186 static rtx
5187 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5188 enum machine_mode mode, tree exp, enum machine_mode value_mode,
5189 int unsignedp, tree type, int alias_set)
5191 HOST_WIDE_INT width_mask = 0;
5193 if (TREE_CODE (exp) == ERROR_MARK)
5194 return const0_rtx;
5196 /* If we have nothing to store, do nothing unless the expression has
5197 side-effects. */
5198 if (bitsize == 0)
5199 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5200 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5201 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5203 /* If we are storing into an unaligned field of an aligned union that is
5204 in a register, we may have the mode of TARGET being an integer mode but
5205 MODE == BLKmode. In that case, get an aligned object whose size and
5206 alignment are the same as TARGET and store TARGET into it (we can avoid
5207 the store if the field being stored is the entire width of TARGET). Then
5208 call ourselves recursively to store the field into a BLKmode version of
5209 that object. Finally, load from the object into TARGET. This is not
5210 very efficient in general, but should only be slightly more expensive
5211 than the otherwise-required unaligned accesses. Perhaps this can be
5212 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5213 twice, once with emit_move_insn and once via store_field. */
5215 if (mode == BLKmode
5216 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5218 rtx object = assign_temp (type, 0, 1, 1);
5219 rtx blk_object = adjust_address (object, BLKmode, 0);
5221 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5222 emit_move_insn (object, target);
5224 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5225 alias_set);
5227 emit_move_insn (target, object);
5229 /* We want to return the BLKmode version of the data. */
5230 return blk_object;
5233 if (GET_CODE (target) == CONCAT)
5235 /* We're storing into a struct containing a single __complex. */
5237 if (bitpos != 0)
5238 abort ();
5239 return store_expr (exp, target, 0);
5242 /* If the structure is in a register or if the component
5243 is a bit field, we cannot use addressing to access it.
5244 Use bit-field techniques or SUBREG to store in it. */
5246 if (mode == VOIDmode
5247 || (mode != BLKmode && ! direct_store[(int) mode]
5248 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5249 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5250 || GET_CODE (target) == REG
5251 || GET_CODE (target) == SUBREG
5252 /* If the field isn't aligned enough to store as an ordinary memref,
5253 store it as a bit field. */
5254 || (mode != BLKmode
5255 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5256 || bitpos % GET_MODE_ALIGNMENT (mode))
5257 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5258 || (bitpos % BITS_PER_UNIT != 0)))
5259 /* If the RHS and field are a constant size and the size of the
5260 RHS isn't the same size as the bitfield, we must use bitfield
5261 operations. */
5262 || (bitsize >= 0
5263 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5264 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5266 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5268 /* If BITSIZE is narrower than the size of the type of EXP
5269 we will be narrowing TEMP. Normally, what's wanted are the
5270 low-order bits. However, if EXP's type is a record and this is
5271 big-endian machine, we want the upper BITSIZE bits. */
5272 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5273 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5274 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5275 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5276 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5277 - bitsize),
5278 NULL_RTX, 1);
5280 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5281 MODE. */
5282 if (mode != VOIDmode && mode != BLKmode
5283 && mode != TYPE_MODE (TREE_TYPE (exp)))
5284 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5286 /* If the modes of TARGET and TEMP are both BLKmode, both
5287 must be in memory and BITPOS must be aligned on a byte
5288 boundary. If so, we simply do a block copy. */
5289 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5291 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5292 || bitpos % BITS_PER_UNIT != 0)
5293 abort ();
5295 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5296 emit_block_move (target, temp,
5297 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5298 / BITS_PER_UNIT),
5299 BLOCK_OP_NORMAL);
5301 return value_mode == VOIDmode ? const0_rtx : target;
5304 /* Store the value in the bitfield. */
5305 store_bit_field (target, bitsize, bitpos, mode, temp,
5306 int_size_in_bytes (type));
5308 if (value_mode != VOIDmode)
5310 /* The caller wants an rtx for the value.
5311 If possible, avoid refetching from the bitfield itself. */
5312 if (width_mask != 0
5313 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5315 tree count;
5316 enum machine_mode tmode;
5318 tmode = GET_MODE (temp);
5319 if (tmode == VOIDmode)
5320 tmode = value_mode;
5322 if (unsignedp)
5323 return expand_and (tmode, temp,
5324 gen_int_mode (width_mask, tmode),
5325 NULL_RTX);
5327 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5328 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5329 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5332 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5333 NULL_RTX, value_mode, VOIDmode,
5334 int_size_in_bytes (type));
5336 return const0_rtx;
5338 else
5340 rtx addr = XEXP (target, 0);
5341 rtx to_rtx = target;
5343 /* If a value is wanted, it must be the lhs;
5344 so make the address stable for multiple use. */
5346 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5347 && ! CONSTANT_ADDRESS_P (addr)
5348 /* A frame-pointer reference is already stable. */
5349 && ! (GET_CODE (addr) == PLUS
5350 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5351 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5352 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5353 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5355 /* Now build a reference to just the desired component. */
5357 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5359 if (to_rtx == target)
5360 to_rtx = copy_rtx (to_rtx);
5362 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5363 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5364 set_mem_alias_set (to_rtx, alias_set);
5366 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5370 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5371 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5372 codes and find the ultimate containing object, which we return.
5374 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5375 bit position, and *PUNSIGNEDP to the signedness of the field.
5376 If the position of the field is variable, we store a tree
5377 giving the variable offset (in units) in *POFFSET.
5378 This offset is in addition to the bit position.
5379 If the position is not variable, we store 0 in *POFFSET.
5381 If any of the extraction expressions is volatile,
5382 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5384 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5385 is a mode that can be used to access the field. In that case, *PBITSIZE
5386 is redundant.
5388 If the field describes a variable-sized object, *PMODE is set to
5389 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5390 this case, but the address of the object can be found. */
5392 tree
5393 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5394 HOST_WIDE_INT *pbitpos, tree *poffset,
5395 enum machine_mode *pmode, int *punsignedp,
5396 int *pvolatilep)
5398 tree size_tree = 0;
5399 enum machine_mode mode = VOIDmode;
5400 tree offset = size_zero_node;
5401 tree bit_offset = bitsize_zero_node;
5402 tree placeholder_ptr = 0;
5403 tree tem;
5405 /* First get the mode, signedness, and size. We do this from just the
5406 outermost expression. */
5407 if (TREE_CODE (exp) == COMPONENT_REF)
5409 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5410 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5411 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5413 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5415 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5417 size_tree = TREE_OPERAND (exp, 1);
5418 *punsignedp = TREE_UNSIGNED (exp);
5420 else
5422 mode = TYPE_MODE (TREE_TYPE (exp));
5423 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5425 if (mode == BLKmode)
5426 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5427 else
5428 *pbitsize = GET_MODE_BITSIZE (mode);
5431 if (size_tree != 0)
5433 if (! host_integerp (size_tree, 1))
5434 mode = BLKmode, *pbitsize = -1;
5435 else
5436 *pbitsize = tree_low_cst (size_tree, 1);
5439 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5440 and find the ultimate containing object. */
5441 while (1)
5443 if (TREE_CODE (exp) == BIT_FIELD_REF)
5444 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5445 else if (TREE_CODE (exp) == COMPONENT_REF)
5447 tree field = TREE_OPERAND (exp, 1);
5448 tree this_offset = DECL_FIELD_OFFSET (field);
5450 /* If this field hasn't been filled in yet, don't go
5451 past it. This should only happen when folding expressions
5452 made during type construction. */
5453 if (this_offset == 0)
5454 break;
5455 else if (CONTAINS_PLACEHOLDER_P (this_offset))
5456 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5458 offset = size_binop (PLUS_EXPR, offset, this_offset);
5459 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5460 DECL_FIELD_BIT_OFFSET (field));
5462 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5465 else if (TREE_CODE (exp) == ARRAY_REF
5466 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5468 tree index = TREE_OPERAND (exp, 1);
5469 tree array = TREE_OPERAND (exp, 0);
5470 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5471 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5472 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5474 /* We assume all arrays have sizes that are a multiple of a byte.
5475 First subtract the lower bound, if any, in the type of the
5476 index, then convert to sizetype and multiply by the size of the
5477 array element. */
5478 if (low_bound != 0 && ! integer_zerop (low_bound))
5479 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5480 index, low_bound));
5482 /* If the index has a self-referential type, pass it to a
5483 WITH_RECORD_EXPR; if the component size is, pass our
5484 component to one. */
5485 if (CONTAINS_PLACEHOLDER_P (index))
5486 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5487 if (CONTAINS_PLACEHOLDER_P (unit_size))
5488 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5490 offset = size_binop (PLUS_EXPR, offset,
5491 size_binop (MULT_EXPR,
5492 convert (sizetype, index),
5493 unit_size));
5496 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5498 tree new = find_placeholder (exp, &placeholder_ptr);
5500 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5501 We might have been called from tree optimization where we
5502 haven't set up an object yet. */
5503 if (new == 0)
5504 break;
5505 else
5506 exp = new;
5508 continue;
5511 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5512 conversions that don't change the mode, and all view conversions
5513 except those that need to "step up" the alignment. */
5514 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5515 && ! (TREE_CODE (exp) == VIEW_CONVERT_EXPR
5516 && ! ((TYPE_ALIGN (TREE_TYPE (exp))
5517 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5518 && STRICT_ALIGNMENT
5519 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5520 < BIGGEST_ALIGNMENT)
5521 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5522 || TYPE_ALIGN_OK (TREE_TYPE
5523 (TREE_OPERAND (exp, 0))))))
5524 && ! ((TREE_CODE (exp) == NOP_EXPR
5525 || TREE_CODE (exp) == CONVERT_EXPR)
5526 && (TYPE_MODE (TREE_TYPE (exp))
5527 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5528 break;
5530 /* If any reference in the chain is volatile, the effect is volatile. */
5531 if (TREE_THIS_VOLATILE (exp))
5532 *pvolatilep = 1;
5534 exp = TREE_OPERAND (exp, 0);
5537 /* If OFFSET is constant, see if we can return the whole thing as a
5538 constant bit position. Otherwise, split it up. */
5539 if (host_integerp (offset, 0)
5540 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5541 bitsize_unit_node))
5542 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5543 && host_integerp (tem, 0))
5544 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5545 else
5546 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5548 *pmode = mode;
5549 return exp;
5552 /* Return 1 if T is an expression that get_inner_reference handles. */
5555 handled_component_p (tree t)
5557 switch (TREE_CODE (t))
5559 case BIT_FIELD_REF:
5560 case COMPONENT_REF:
5561 case ARRAY_REF:
5562 case ARRAY_RANGE_REF:
5563 case NON_LVALUE_EXPR:
5564 case VIEW_CONVERT_EXPR:
5565 return 1;
5567 /* ??? Sure they are handled, but get_inner_reference may return
5568 a different PBITSIZE, depending upon whether the expression is
5569 wrapped up in a NOP_EXPR or not, e.g. for bitfields. */
5570 case NOP_EXPR:
5571 case CONVERT_EXPR:
5572 return (TYPE_MODE (TREE_TYPE (t))
5573 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5575 default:
5576 return 0;
5580 /* Given an rtx VALUE that may contain additions and multiplications, return
5581 an equivalent value that just refers to a register, memory, or constant.
5582 This is done by generating instructions to perform the arithmetic and
5583 returning a pseudo-register containing the value.
5585 The returned value may be a REG, SUBREG, MEM or constant. */
5588 force_operand (rtx value, rtx target)
5590 rtx op1, op2;
5591 /* Use subtarget as the target for operand 0 of a binary operation. */
5592 rtx subtarget = get_subtarget (target);
5593 enum rtx_code code = GET_CODE (value);
5595 /* Check for subreg applied to an expression produced by loop optimizer. */
5596 if (code == SUBREG
5597 && GET_CODE (SUBREG_REG (value)) != REG
5598 && GET_CODE (SUBREG_REG (value)) != MEM)
5600 value = simplify_gen_subreg (GET_MODE (value),
5601 force_reg (GET_MODE (SUBREG_REG (value)),
5602 force_operand (SUBREG_REG (value),
5603 NULL_RTX)),
5604 GET_MODE (SUBREG_REG (value)),
5605 SUBREG_BYTE (value));
5606 code = GET_CODE (value);
5609 /* Check for a PIC address load. */
5610 if ((code == PLUS || code == MINUS)
5611 && XEXP (value, 0) == pic_offset_table_rtx
5612 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5613 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5614 || GET_CODE (XEXP (value, 1)) == CONST))
5616 if (!subtarget)
5617 subtarget = gen_reg_rtx (GET_MODE (value));
5618 emit_move_insn (subtarget, value);
5619 return subtarget;
5622 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5624 if (!target)
5625 target = gen_reg_rtx (GET_MODE (value));
5626 convert_move (target, force_operand (XEXP (value, 0), NULL),
5627 code == ZERO_EXTEND);
5628 return target;
5631 if (ARITHMETIC_P (value))
5633 op2 = XEXP (value, 1);
5634 if (!CONSTANT_P (op2) && !(GET_CODE (op2) == REG && op2 != subtarget))
5635 subtarget = 0;
5636 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5638 code = PLUS;
5639 op2 = negate_rtx (GET_MODE (value), op2);
5642 /* Check for an addition with OP2 a constant integer and our first
5643 operand a PLUS of a virtual register and something else. In that
5644 case, we want to emit the sum of the virtual register and the
5645 constant first and then add the other value. This allows virtual
5646 register instantiation to simply modify the constant rather than
5647 creating another one around this addition. */
5648 if (code == PLUS && GET_CODE (op2) == CONST_INT
5649 && GET_CODE (XEXP (value, 0)) == PLUS
5650 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5651 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5652 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5654 rtx temp = expand_simple_binop (GET_MODE (value), code,
5655 XEXP (XEXP (value, 0), 0), op2,
5656 subtarget, 0, OPTAB_LIB_WIDEN);
5657 return expand_simple_binop (GET_MODE (value), code, temp,
5658 force_operand (XEXP (XEXP (value,
5659 0), 1), 0),
5660 target, 0, OPTAB_LIB_WIDEN);
5663 op1 = force_operand (XEXP (value, 0), subtarget);
5664 op2 = force_operand (op2, NULL_RTX);
5665 switch (code)
5667 case MULT:
5668 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5669 case DIV:
5670 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5671 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5672 target, 1, OPTAB_LIB_WIDEN);
5673 else
5674 return expand_divmod (0,
5675 FLOAT_MODE_P (GET_MODE (value))
5676 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5677 GET_MODE (value), op1, op2, target, 0);
5678 break;
5679 case MOD:
5680 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5681 target, 0);
5682 break;
5683 case UDIV:
5684 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5685 target, 1);
5686 break;
5687 case UMOD:
5688 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5689 target, 1);
5690 break;
5691 case ASHIFTRT:
5692 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5693 target, 0, OPTAB_LIB_WIDEN);
5694 break;
5695 default:
5696 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5697 target, 1, OPTAB_LIB_WIDEN);
5700 if (UNARY_P (value))
5702 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5703 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5706 #ifdef INSN_SCHEDULING
5707 /* On machines that have insn scheduling, we want all memory reference to be
5708 explicit, so we need to deal with such paradoxical SUBREGs. */
5709 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
5710 && (GET_MODE_SIZE (GET_MODE (value))
5711 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5712 value
5713 = simplify_gen_subreg (GET_MODE (value),
5714 force_reg (GET_MODE (SUBREG_REG (value)),
5715 force_operand (SUBREG_REG (value),
5716 NULL_RTX)),
5717 GET_MODE (SUBREG_REG (value)),
5718 SUBREG_BYTE (value));
5719 #endif
5721 return value;
5724 /* Subroutine of expand_expr: return nonzero iff there is no way that
5725 EXP can reference X, which is being modified. TOP_P is nonzero if this
5726 call is going to be used to determine whether we need a temporary
5727 for EXP, as opposed to a recursive call to this function.
5729 It is always safe for this routine to return zero since it merely
5730 searches for optimization opportunities. */
5733 safe_from_p (rtx x, tree exp, int top_p)
5735 rtx exp_rtl = 0;
5736 int i, nops;
5737 static tree save_expr_list;
5739 if (x == 0
5740 /* If EXP has varying size, we MUST use a target since we currently
5741 have no way of allocating temporaries of variable size
5742 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5743 So we assume here that something at a higher level has prevented a
5744 clash. This is somewhat bogus, but the best we can do. Only
5745 do this when X is BLKmode and when we are at the top level. */
5746 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5747 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5748 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5749 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5750 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5751 != INTEGER_CST)
5752 && GET_MODE (x) == BLKmode)
5753 /* If X is in the outgoing argument area, it is always safe. */
5754 || (GET_CODE (x) == MEM
5755 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5756 || (GET_CODE (XEXP (x, 0)) == PLUS
5757 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5758 return 1;
5760 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5761 find the underlying pseudo. */
5762 if (GET_CODE (x) == SUBREG)
5764 x = SUBREG_REG (x);
5765 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5766 return 0;
5769 /* A SAVE_EXPR might appear many times in the expression passed to the
5770 top-level safe_from_p call, and if it has a complex subexpression,
5771 examining it multiple times could result in a combinatorial explosion.
5772 E.g. on an Alpha running at least 200MHz, a Fortran testcase compiled
5773 with optimization took about 28 minutes to compile -- even though it was
5774 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5775 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5776 we have processed. Note that the only test of top_p was above. */
5778 if (top_p)
5780 int rtn;
5781 tree t;
5783 save_expr_list = 0;
5785 rtn = safe_from_p (x, exp, 0);
5787 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5788 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5790 return rtn;
5793 /* Now look at our tree code and possibly recurse. */
5794 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5796 case 'd':
5797 exp_rtl = DECL_RTL_IF_SET (exp);
5798 break;
5800 case 'c':
5801 return 1;
5803 case 'x':
5804 if (TREE_CODE (exp) == TREE_LIST)
5806 while (1)
5808 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
5809 return 0;
5810 exp = TREE_CHAIN (exp);
5811 if (!exp)
5812 return 1;
5813 if (TREE_CODE (exp) != TREE_LIST)
5814 return safe_from_p (x, exp, 0);
5817 else if (TREE_CODE (exp) == ERROR_MARK)
5818 return 1; /* An already-visited SAVE_EXPR? */
5819 else
5820 return 0;
5822 case '2':
5823 case '<':
5824 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
5825 return 0;
5826 /* Fall through. */
5828 case '1':
5829 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5831 case 'e':
5832 case 'r':
5833 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5834 the expression. If it is set, we conflict iff we are that rtx or
5835 both are in memory. Otherwise, we check all operands of the
5836 expression recursively. */
5838 switch (TREE_CODE (exp))
5840 case ADDR_EXPR:
5841 /* If the operand is static or we are static, we can't conflict.
5842 Likewise if we don't conflict with the operand at all. */
5843 if (staticp (TREE_OPERAND (exp, 0))
5844 || TREE_STATIC (exp)
5845 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5846 return 1;
5848 /* Otherwise, the only way this can conflict is if we are taking
5849 the address of a DECL a that address if part of X, which is
5850 very rare. */
5851 exp = TREE_OPERAND (exp, 0);
5852 if (DECL_P (exp))
5854 if (!DECL_RTL_SET_P (exp)
5855 || GET_CODE (DECL_RTL (exp)) != MEM)
5856 return 0;
5857 else
5858 exp_rtl = XEXP (DECL_RTL (exp), 0);
5860 break;
5862 case INDIRECT_REF:
5863 if (GET_CODE (x) == MEM
5864 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5865 get_alias_set (exp)))
5866 return 0;
5867 break;
5869 case CALL_EXPR:
5870 /* Assume that the call will clobber all hard registers and
5871 all of memory. */
5872 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5873 || GET_CODE (x) == MEM)
5874 return 0;
5875 break;
5877 case RTL_EXPR:
5878 /* If a sequence exists, we would have to scan every instruction
5879 in the sequence to see if it was safe. This is probably not
5880 worthwhile. */
5881 if (RTL_EXPR_SEQUENCE (exp))
5882 return 0;
5884 exp_rtl = RTL_EXPR_RTL (exp);
5885 break;
5887 case WITH_CLEANUP_EXPR:
5888 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
5889 break;
5891 case CLEANUP_POINT_EXPR:
5892 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5894 case SAVE_EXPR:
5895 exp_rtl = SAVE_EXPR_RTL (exp);
5896 if (exp_rtl)
5897 break;
5899 /* If we've already scanned this, don't do it again. Otherwise,
5900 show we've scanned it and record for clearing the flag if we're
5901 going on. */
5902 if (TREE_PRIVATE (exp))
5903 return 1;
5905 TREE_PRIVATE (exp) = 1;
5906 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5908 TREE_PRIVATE (exp) = 0;
5909 return 0;
5912 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
5913 return 1;
5915 case BIND_EXPR:
5916 /* The only operand we look at is operand 1. The rest aren't
5917 part of the expression. */
5918 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5920 default:
5921 break;
5924 /* If we have an rtx, we do not need to scan our operands. */
5925 if (exp_rtl)
5926 break;
5928 nops = first_rtl_op (TREE_CODE (exp));
5929 for (i = 0; i < nops; i++)
5930 if (TREE_OPERAND (exp, i) != 0
5931 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5932 return 0;
5934 /* If this is a language-specific tree code, it may require
5935 special handling. */
5936 if ((unsigned int) TREE_CODE (exp)
5937 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5938 && !(*lang_hooks.safe_from_p) (x, exp))
5939 return 0;
5942 /* If we have an rtl, find any enclosed object. Then see if we conflict
5943 with it. */
5944 if (exp_rtl)
5946 if (GET_CODE (exp_rtl) == SUBREG)
5948 exp_rtl = SUBREG_REG (exp_rtl);
5949 if (GET_CODE (exp_rtl) == REG
5950 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5951 return 0;
5954 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5955 are memory and they conflict. */
5956 return ! (rtx_equal_p (x, exp_rtl)
5957 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5958 && true_dependence (exp_rtl, VOIDmode, x,
5959 rtx_addr_varies_p)));
5962 /* If we reach here, it is safe. */
5963 return 1;
5966 /* Subroutine of expand_expr: return rtx if EXP is a
5967 variable or parameter; else return 0. */
5969 static rtx
5970 var_rtx (tree exp)
5972 STRIP_NOPS (exp);
5973 switch (TREE_CODE (exp))
5975 case PARM_DECL:
5976 case VAR_DECL:
5977 return DECL_RTL (exp);
5978 default:
5979 return 0;
5983 /* Return the highest power of two that EXP is known to be a multiple of.
5984 This is used in updating alignment of MEMs in array references. */
5986 static unsigned HOST_WIDE_INT
5987 highest_pow2_factor (tree exp)
5989 unsigned HOST_WIDE_INT c0, c1;
5991 switch (TREE_CODE (exp))
5993 case INTEGER_CST:
5994 /* We can find the lowest bit that's a one. If the low
5995 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
5996 We need to handle this case since we can find it in a COND_EXPR,
5997 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
5998 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
5999 later ICE. */
6000 if (TREE_CONSTANT_OVERFLOW (exp))
6001 return BIGGEST_ALIGNMENT;
6002 else
6004 /* Note: tree_low_cst is intentionally not used here,
6005 we don't care about the upper bits. */
6006 c0 = TREE_INT_CST_LOW (exp);
6007 c0 &= -c0;
6008 return c0 ? c0 : BIGGEST_ALIGNMENT;
6010 break;
6012 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6013 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6014 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6015 return MIN (c0, c1);
6017 case MULT_EXPR:
6018 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6019 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6020 return c0 * c1;
6022 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6023 case CEIL_DIV_EXPR:
6024 if (integer_pow2p (TREE_OPERAND (exp, 1))
6025 && host_integerp (TREE_OPERAND (exp, 1), 1))
6027 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6028 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6029 return MAX (1, c0 / c1);
6031 break;
6033 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6034 case SAVE_EXPR: case WITH_RECORD_EXPR:
6035 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6037 case COMPOUND_EXPR:
6038 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6040 case COND_EXPR:
6041 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6042 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6043 return MIN (c0, c1);
6045 default:
6046 break;
6049 return 1;
6052 /* Similar, except that the alignment requirements of TARGET are
6053 taken into account. Assume it is at least as aligned as its
6054 type, unless it is a COMPONENT_REF in which case the layout of
6055 the structure gives the alignment. */
6057 static unsigned HOST_WIDE_INT
6058 highest_pow2_factor_for_target (tree target, tree exp)
6060 unsigned HOST_WIDE_INT target_align, factor;
6062 factor = highest_pow2_factor (exp);
6063 if (TREE_CODE (target) == COMPONENT_REF)
6064 target_align = DECL_ALIGN (TREE_OPERAND (target, 1)) / BITS_PER_UNIT;
6065 else
6066 target_align = TYPE_ALIGN (TREE_TYPE (target)) / BITS_PER_UNIT;
6067 return MAX (factor, target_align);
6070 /* Return an object on the placeholder list that matches EXP, a
6071 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6072 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6073 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6074 is a location which initially points to a starting location in the
6075 placeholder list (zero means start of the list) and where a pointer into
6076 the placeholder list at which the object is found is placed. */
6078 tree
6079 find_placeholder (tree exp, tree *plist)
6081 tree type = TREE_TYPE (exp);
6082 tree placeholder_expr;
6084 for (placeholder_expr
6085 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
6086 placeholder_expr != 0;
6087 placeholder_expr = TREE_CHAIN (placeholder_expr))
6089 tree need_type = TYPE_MAIN_VARIANT (type);
6090 tree elt;
6092 /* Find the outermost reference that is of the type we want. If none,
6093 see if any object has a type that is a pointer to the type we
6094 want. */
6095 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6096 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
6097 || TREE_CODE (elt) == COND_EXPR)
6098 ? TREE_OPERAND (elt, 1)
6099 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6100 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6101 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6102 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6103 ? TREE_OPERAND (elt, 0) : 0))
6104 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6106 if (plist)
6107 *plist = placeholder_expr;
6108 return elt;
6111 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6113 = ((TREE_CODE (elt) == COMPOUND_EXPR
6114 || TREE_CODE (elt) == COND_EXPR)
6115 ? TREE_OPERAND (elt, 1)
6116 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6117 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6118 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6119 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6120 ? TREE_OPERAND (elt, 0) : 0))
6121 if (POINTER_TYPE_P (TREE_TYPE (elt))
6122 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6123 == need_type))
6125 if (plist)
6126 *plist = placeholder_expr;
6127 return build1 (INDIRECT_REF, need_type, elt);
6131 return 0;
6134 /* Subroutine of expand_expr. Expand the two operands of a binary
6135 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6136 The value may be stored in TARGET if TARGET is nonzero. The
6137 MODIFIER argument is as documented by expand_expr. */
6139 static void
6140 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6141 enum expand_modifier modifier)
6143 if (! safe_from_p (target, exp1, 1))
6144 target = 0;
6145 if (operand_equal_p (exp0, exp1, 0))
6147 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6148 *op1 = copy_rtx (*op0);
6150 else
6152 /* If we need to preserve evaluation order, copy exp0 into its own
6153 temporary variable so that it can't be clobbered by exp1. */
6154 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6155 exp0 = save_expr (exp0);
6156 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6157 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6162 /* expand_expr: generate code for computing expression EXP.
6163 An rtx for the computed value is returned. The value is never null.
6164 In the case of a void EXP, const0_rtx is returned.
6166 The value may be stored in TARGET if TARGET is nonzero.
6167 TARGET is just a suggestion; callers must assume that
6168 the rtx returned may not be the same as TARGET.
6170 If TARGET is CONST0_RTX, it means that the value will be ignored.
6172 If TMODE is not VOIDmode, it suggests generating the
6173 result in mode TMODE. But this is done only when convenient.
6174 Otherwise, TMODE is ignored and the value generated in its natural mode.
6175 TMODE is just a suggestion; callers must assume that
6176 the rtx returned may not have mode TMODE.
6178 Note that TARGET may have neither TMODE nor MODE. In that case, it
6179 probably will not be used.
6181 If MODIFIER is EXPAND_SUM then when EXP is an addition
6182 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6183 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6184 products as above, or REG or MEM, or constant.
6185 Ordinarily in such cases we would output mul or add instructions
6186 and then return a pseudo reg containing the sum.
6188 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6189 it also marks a label as absolutely required (it can't be dead).
6190 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6191 This is used for outputting expressions used in initializers.
6193 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6194 with a constant address even if that address is not normally legitimate.
6195 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6197 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6198 a call parameter. Such targets require special care as we haven't yet
6199 marked TARGET so that it's safe from being trashed by libcalls. We
6200 don't want to use TARGET for anything but the final result;
6201 Intermediate values must go elsewhere. Additionally, calls to
6202 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6204 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6205 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6206 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6207 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6208 recursively. */
6211 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6212 enum expand_modifier modifier, rtx *alt_rtl)
6214 rtx op0, op1, temp;
6215 tree type = TREE_TYPE (exp);
6216 int unsignedp = TREE_UNSIGNED (type);
6217 enum machine_mode mode;
6218 enum tree_code code = TREE_CODE (exp);
6219 optab this_optab;
6220 rtx subtarget, original_target;
6221 int ignore;
6222 tree context;
6224 /* Handle ERROR_MARK before anybody tries to access its type. */
6225 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6227 op0 = CONST0_RTX (tmode);
6228 if (op0 != 0)
6229 return op0;
6230 return const0_rtx;
6233 mode = TYPE_MODE (type);
6234 /* Use subtarget as the target for operand 0 of a binary operation. */
6235 subtarget = get_subtarget (target);
6236 original_target = target;
6237 ignore = (target == const0_rtx
6238 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6239 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6240 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
6241 && TREE_CODE (type) == VOID_TYPE));
6243 /* If we are going to ignore this result, we need only do something
6244 if there is a side-effect somewhere in the expression. If there
6245 is, short-circuit the most common cases here. Note that we must
6246 not call expand_expr with anything but const0_rtx in case this
6247 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6249 if (ignore)
6251 if (! TREE_SIDE_EFFECTS (exp))
6252 return const0_rtx;
6254 /* Ensure we reference a volatile object even if value is ignored, but
6255 don't do this if all we are doing is taking its address. */
6256 if (TREE_THIS_VOLATILE (exp)
6257 && TREE_CODE (exp) != FUNCTION_DECL
6258 && mode != VOIDmode && mode != BLKmode
6259 && modifier != EXPAND_CONST_ADDRESS)
6261 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6262 if (GET_CODE (temp) == MEM)
6263 temp = copy_to_reg (temp);
6264 return const0_rtx;
6267 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6268 || code == INDIRECT_REF || code == BUFFER_REF)
6269 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6270 modifier);
6272 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6273 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6275 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6276 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6277 return const0_rtx;
6279 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6280 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6281 /* If the second operand has no side effects, just evaluate
6282 the first. */
6283 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6284 modifier);
6285 else if (code == BIT_FIELD_REF)
6287 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6288 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6289 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6290 return const0_rtx;
6293 target = 0;
6296 /* If will do cse, generate all results into pseudo registers
6297 since 1) that allows cse to find more things
6298 and 2) otherwise cse could produce an insn the machine
6299 cannot support. An exception is a CONSTRUCTOR into a multi-word
6300 MEM: that's much more likely to be most efficient into the MEM.
6301 Another is a CALL_EXPR which must return in memory. */
6303 if (! cse_not_expected && mode != BLKmode && target
6304 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6305 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6306 && ! (code == CALL_EXPR && aggregate_value_p (exp, exp)))
6307 target = 0;
6309 switch (code)
6311 case LABEL_DECL:
6313 tree function = decl_function_context (exp);
6314 /* Labels in containing functions, or labels used from initializers,
6315 must be forced. */
6316 if (modifier == EXPAND_INITIALIZER
6317 || (function != current_function_decl
6318 && function != inline_function_decl
6319 && function != 0))
6320 temp = force_label_rtx (exp);
6321 else
6322 temp = label_rtx (exp);
6324 temp = gen_rtx_MEM (FUNCTION_MODE, gen_rtx_LABEL_REF (Pmode, temp));
6325 if (function != current_function_decl
6326 && function != inline_function_decl && function != 0)
6327 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6328 return temp;
6331 case PARM_DECL:
6332 if (!DECL_RTL_SET_P (exp))
6334 error ("%Jprior parameter's size depends on '%D'", exp, exp);
6335 return CONST0_RTX (mode);
6338 /* ... fall through ... */
6340 case VAR_DECL:
6341 /* If a static var's type was incomplete when the decl was written,
6342 but the type is complete now, lay out the decl now. */
6343 if (DECL_SIZE (exp) == 0
6344 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6345 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6346 layout_decl (exp, 0);
6348 /* ... fall through ... */
6350 case FUNCTION_DECL:
6351 case RESULT_DECL:
6352 if (DECL_RTL (exp) == 0)
6353 abort ();
6355 /* Ensure variable marked as used even if it doesn't go through
6356 a parser. If it hasn't be used yet, write out an external
6357 definition. */
6358 if (! TREE_USED (exp))
6360 assemble_external (exp);
6361 TREE_USED (exp) = 1;
6364 /* Show we haven't gotten RTL for this yet. */
6365 temp = 0;
6367 /* Handle variables inherited from containing functions. */
6368 context = decl_function_context (exp);
6370 /* We treat inline_function_decl as an alias for the current function
6371 because that is the inline function whose vars, types, etc.
6372 are being merged into the current function.
6373 See expand_inline_function. */
6375 if (context != 0 && context != current_function_decl
6376 && context != inline_function_decl
6377 /* If var is static, we don't need a static chain to access it. */
6378 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6379 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6381 rtx addr;
6383 /* Mark as non-local and addressable. */
6384 DECL_NONLOCAL (exp) = 1;
6385 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6386 abort ();
6387 (*lang_hooks.mark_addressable) (exp);
6388 if (GET_CODE (DECL_RTL (exp)) != MEM)
6389 abort ();
6390 addr = XEXP (DECL_RTL (exp), 0);
6391 if (GET_CODE (addr) == MEM)
6392 addr
6393 = replace_equiv_address (addr,
6394 fix_lexical_addr (XEXP (addr, 0), exp));
6395 else
6396 addr = fix_lexical_addr (addr, exp);
6398 temp = replace_equiv_address (DECL_RTL (exp), addr);
6401 /* This is the case of an array whose size is to be determined
6402 from its initializer, while the initializer is still being parsed.
6403 See expand_decl. */
6405 else if (GET_CODE (DECL_RTL (exp)) == MEM
6406 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6407 temp = validize_mem (DECL_RTL (exp));
6409 /* If DECL_RTL is memory, we are in the normal case and either
6410 the address is not valid or it is not a register and -fforce-addr
6411 is specified, get the address into a register. */
6413 else if (GET_CODE (DECL_RTL (exp)) == MEM
6414 && modifier != EXPAND_CONST_ADDRESS
6415 && modifier != EXPAND_SUM
6416 && modifier != EXPAND_INITIALIZER
6417 && (! memory_address_p (DECL_MODE (exp),
6418 XEXP (DECL_RTL (exp), 0))
6419 || (flag_force_addr
6420 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6422 if (alt_rtl)
6423 *alt_rtl = DECL_RTL (exp);
6424 temp = replace_equiv_address (DECL_RTL (exp),
6425 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6428 /* If we got something, return it. But first, set the alignment
6429 if the address is a register. */
6430 if (temp != 0)
6432 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6433 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6435 return temp;
6438 /* If the mode of DECL_RTL does not match that of the decl, it
6439 must be a promoted value. We return a SUBREG of the wanted mode,
6440 but mark it so that we know that it was already extended. */
6442 if (GET_CODE (DECL_RTL (exp)) == REG
6443 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6445 /* Get the signedness used for this variable. Ensure we get the
6446 same mode we got when the variable was declared. */
6447 if (GET_MODE (DECL_RTL (exp))
6448 != promote_mode (type, DECL_MODE (exp), &unsignedp,
6449 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6450 abort ();
6452 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6453 SUBREG_PROMOTED_VAR_P (temp) = 1;
6454 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6455 return temp;
6458 return DECL_RTL (exp);
6460 case INTEGER_CST:
6461 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6462 TREE_INT_CST_HIGH (exp), mode);
6464 /* ??? If overflow is set, fold will have done an incomplete job,
6465 which can result in (plus xx (const_int 0)), which can get
6466 simplified by validate_replace_rtx during virtual register
6467 instantiation, which can result in unrecognizable insns.
6468 Avoid this by forcing all overflows into registers. */
6469 if (TREE_CONSTANT_OVERFLOW (exp)
6470 && modifier != EXPAND_INITIALIZER)
6471 temp = force_reg (mode, temp);
6473 return temp;
6475 case VECTOR_CST:
6476 return const_vector_from_tree (exp);
6478 case CONST_DECL:
6479 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6481 case REAL_CST:
6482 /* If optimized, generate immediate CONST_DOUBLE
6483 which will be turned into memory by reload if necessary.
6485 We used to force a register so that loop.c could see it. But
6486 this does not allow gen_* patterns to perform optimizations with
6487 the constants. It also produces two insns in cases like "x = 1.0;".
6488 On most machines, floating-point constants are not permitted in
6489 many insns, so we'd end up copying it to a register in any case.
6491 Now, we do the copying in expand_binop, if appropriate. */
6492 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6493 TYPE_MODE (TREE_TYPE (exp)));
6495 case COMPLEX_CST:
6496 /* Handle evaluating a complex constant in a CONCAT target. */
6497 if (original_target && GET_CODE (original_target) == CONCAT)
6499 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6500 rtx rtarg, itarg;
6502 rtarg = XEXP (original_target, 0);
6503 itarg = XEXP (original_target, 1);
6505 /* Move the real and imaginary parts separately. */
6506 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
6507 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
6509 if (op0 != rtarg)
6510 emit_move_insn (rtarg, op0);
6511 if (op1 != itarg)
6512 emit_move_insn (itarg, op1);
6514 return original_target;
6517 /* ... fall through ... */
6519 case STRING_CST:
6520 temp = output_constant_def (exp, 1);
6522 /* temp contains a constant address.
6523 On RISC machines where a constant address isn't valid,
6524 make some insns to get that address into a register. */
6525 if (modifier != EXPAND_CONST_ADDRESS
6526 && modifier != EXPAND_INITIALIZER
6527 && modifier != EXPAND_SUM
6528 && (! memory_address_p (mode, XEXP (temp, 0))
6529 || flag_force_addr))
6530 return replace_equiv_address (temp,
6531 copy_rtx (XEXP (temp, 0)));
6532 return temp;
6534 case EXPR_WITH_FILE_LOCATION:
6536 rtx to_return;
6537 struct file_stack fs;
6539 fs.location = input_location;
6540 fs.next = expr_wfl_stack;
6541 input_filename = EXPR_WFL_FILENAME (exp);
6542 input_line = EXPR_WFL_LINENO (exp);
6543 expr_wfl_stack = &fs;
6544 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6545 emit_line_note (input_location);
6546 /* Possibly avoid switching back and forth here. */
6547 to_return = expand_expr (EXPR_WFL_NODE (exp),
6548 (ignore ? const0_rtx : target),
6549 tmode, modifier);
6550 if (expr_wfl_stack != &fs)
6551 abort ();
6552 input_location = fs.location;
6553 expr_wfl_stack = fs.next;
6554 return to_return;
6557 case SAVE_EXPR:
6558 context = decl_function_context (exp);
6560 /* If this SAVE_EXPR was at global context, assume we are an
6561 initialization function and move it into our context. */
6562 if (context == 0)
6563 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6565 /* We treat inline_function_decl as an alias for the current function
6566 because that is the inline function whose vars, types, etc.
6567 are being merged into the current function.
6568 See expand_inline_function. */
6569 if (context == current_function_decl || context == inline_function_decl)
6570 context = 0;
6572 /* If this is non-local, handle it. */
6573 if (context)
6575 /* The following call just exists to abort if the context is
6576 not of a containing function. */
6577 find_function_data (context);
6579 temp = SAVE_EXPR_RTL (exp);
6580 if (temp && GET_CODE (temp) == REG)
6582 put_var_into_stack (exp, /*rescan=*/true);
6583 temp = SAVE_EXPR_RTL (exp);
6585 if (temp == 0 || GET_CODE (temp) != MEM)
6586 abort ();
6587 return
6588 replace_equiv_address (temp,
6589 fix_lexical_addr (XEXP (temp, 0), exp));
6591 if (SAVE_EXPR_RTL (exp) == 0)
6593 if (mode == VOIDmode)
6594 temp = const0_rtx;
6595 else
6596 temp = assign_temp (build_qualified_type (type,
6597 (TYPE_QUALS (type)
6598 | TYPE_QUAL_CONST)),
6599 3, 0, 0);
6601 SAVE_EXPR_RTL (exp) = temp;
6602 if (!optimize && GET_CODE (temp) == REG)
6603 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6604 save_expr_regs);
6606 /* If the mode of TEMP does not match that of the expression, it
6607 must be a promoted value. We pass store_expr a SUBREG of the
6608 wanted mode but mark it so that we know that it was already
6609 extended. */
6611 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6613 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6614 promote_mode (type, mode, &unsignedp, 0);
6615 SUBREG_PROMOTED_VAR_P (temp) = 1;
6616 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6619 if (temp == const0_rtx)
6620 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6621 else
6622 store_expr (TREE_OPERAND (exp, 0), temp,
6623 modifier == EXPAND_STACK_PARM ? 2 : 0);
6625 TREE_USED (exp) = 1;
6628 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6629 must be a promoted value. We return a SUBREG of the wanted mode,
6630 but mark it so that we know that it was already extended. */
6632 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6633 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6635 /* Compute the signedness and make the proper SUBREG. */
6636 promote_mode (type, mode, &unsignedp, 0);
6637 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6638 SUBREG_PROMOTED_VAR_P (temp) = 1;
6639 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6640 return temp;
6643 return SAVE_EXPR_RTL (exp);
6645 case UNSAVE_EXPR:
6647 rtx temp;
6648 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6649 TREE_OPERAND (exp, 0)
6650 = (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0));
6651 return temp;
6654 case PLACEHOLDER_EXPR:
6656 tree old_list = placeholder_list;
6657 tree placeholder_expr = 0;
6659 exp = find_placeholder (exp, &placeholder_expr);
6660 if (exp == 0)
6661 abort ();
6663 placeholder_list = TREE_CHAIN (placeholder_expr);
6664 temp = expand_expr (exp, original_target, tmode, modifier);
6665 placeholder_list = old_list;
6666 return temp;
6669 case WITH_RECORD_EXPR:
6670 /* Put the object on the placeholder list, expand our first operand,
6671 and pop the list. */
6672 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6673 placeholder_list);
6674 target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
6675 modifier);
6676 placeholder_list = TREE_CHAIN (placeholder_list);
6677 return target;
6679 case GOTO_EXPR:
6680 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6681 expand_goto (TREE_OPERAND (exp, 0));
6682 else
6683 expand_computed_goto (TREE_OPERAND (exp, 0));
6684 return const0_rtx;
6686 case EXIT_EXPR:
6687 expand_exit_loop_if_false (NULL,
6688 invert_truthvalue (TREE_OPERAND (exp, 0)));
6689 return const0_rtx;
6691 case LABELED_BLOCK_EXPR:
6692 if (LABELED_BLOCK_BODY (exp))
6693 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
6694 /* Should perhaps use expand_label, but this is simpler and safer. */
6695 do_pending_stack_adjust ();
6696 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6697 return const0_rtx;
6699 case EXIT_BLOCK_EXPR:
6700 if (EXIT_BLOCK_RETURN (exp))
6701 sorry ("returned value in block_exit_expr");
6702 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6703 return const0_rtx;
6705 case LOOP_EXPR:
6706 push_temp_slots ();
6707 expand_start_loop (1);
6708 expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
6709 expand_end_loop ();
6710 pop_temp_slots ();
6712 return const0_rtx;
6714 case BIND_EXPR:
6716 tree vars = TREE_OPERAND (exp, 0);
6718 /* Need to open a binding contour here because
6719 if there are any cleanups they must be contained here. */
6720 expand_start_bindings (2);
6722 /* Mark the corresponding BLOCK for output in its proper place. */
6723 if (TREE_OPERAND (exp, 2) != 0
6724 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6725 (*lang_hooks.decls.insert_block) (TREE_OPERAND (exp, 2));
6727 /* If VARS have not yet been expanded, expand them now. */
6728 while (vars)
6730 if (!DECL_RTL_SET_P (vars))
6731 expand_decl (vars);
6732 expand_decl_init (vars);
6733 vars = TREE_CHAIN (vars);
6736 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
6738 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6740 return temp;
6743 case RTL_EXPR:
6744 if (RTL_EXPR_SEQUENCE (exp))
6746 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6747 abort ();
6748 emit_insn (RTL_EXPR_SEQUENCE (exp));
6749 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6751 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6752 free_temps_for_rtl_expr (exp);
6753 if (alt_rtl)
6754 *alt_rtl = RTL_EXPR_ALT_RTL (exp);
6755 return RTL_EXPR_RTL (exp);
6757 case CONSTRUCTOR:
6758 /* If we don't need the result, just ensure we evaluate any
6759 subexpressions. */
6760 if (ignore)
6762 tree elt;
6764 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6765 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6767 return const0_rtx;
6770 /* All elts simple constants => refer to a constant in memory. But
6771 if this is a non-BLKmode mode, let it store a field at a time
6772 since that should make a CONST_INT or CONST_DOUBLE when we
6773 fold. Likewise, if we have a target we can use, it is best to
6774 store directly into the target unless the type is large enough
6775 that memcpy will be used. If we are making an initializer and
6776 all operands are constant, put it in memory as well.
6778 FIXME: Avoid trying to fill vector constructors piece-meal.
6779 Output them with output_constant_def below unless we're sure
6780 they're zeros. This should go away when vector initializers
6781 are treated like VECTOR_CST instead of arrays.
6783 else if ((TREE_STATIC (exp)
6784 && ((mode == BLKmode
6785 && ! (target != 0 && safe_from_p (target, exp, 1)))
6786 || TREE_ADDRESSABLE (exp)
6787 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6788 && (! MOVE_BY_PIECES_P
6789 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6790 TYPE_ALIGN (type)))
6791 && ((TREE_CODE (type) == VECTOR_TYPE
6792 && !is_zeros_p (exp))
6793 || ! mostly_zeros_p (exp)))))
6794 || ((modifier == EXPAND_INITIALIZER
6795 || modifier == EXPAND_CONST_ADDRESS)
6796 && TREE_CONSTANT (exp)))
6798 rtx constructor = output_constant_def (exp, 1);
6800 if (modifier != EXPAND_CONST_ADDRESS
6801 && modifier != EXPAND_INITIALIZER
6802 && modifier != EXPAND_SUM)
6803 constructor = validize_mem (constructor);
6805 return constructor;
6807 else
6809 /* Handle calls that pass values in multiple non-contiguous
6810 locations. The Irix 6 ABI has examples of this. */
6811 if (target == 0 || ! safe_from_p (target, exp, 1)
6812 || GET_CODE (target) == PARALLEL
6813 || modifier == EXPAND_STACK_PARM)
6814 target
6815 = assign_temp (build_qualified_type (type,
6816 (TYPE_QUALS (type)
6817 | (TREE_READONLY (exp)
6818 * TYPE_QUAL_CONST))),
6819 0, TREE_ADDRESSABLE (exp), 1);
6821 store_constructor (exp, target, 0, int_expr_size (exp));
6822 return target;
6825 case INDIRECT_REF:
6827 tree exp1 = TREE_OPERAND (exp, 0);
6828 tree index;
6829 tree string = string_constant (exp1, &index);
6831 /* Try to optimize reads from const strings. */
6832 if (string
6833 && TREE_CODE (string) == STRING_CST
6834 && TREE_CODE (index) == INTEGER_CST
6835 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6836 && GET_MODE_CLASS (mode) == MODE_INT
6837 && GET_MODE_SIZE (mode) == 1
6838 && modifier != EXPAND_WRITE)
6839 return gen_int_mode (TREE_STRING_POINTER (string)
6840 [TREE_INT_CST_LOW (index)], mode);
6842 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6843 op0 = memory_address (mode, op0);
6844 temp = gen_rtx_MEM (mode, op0);
6845 set_mem_attributes (temp, exp, 0);
6847 /* If we are writing to this object and its type is a record with
6848 readonly fields, we must mark it as readonly so it will
6849 conflict with readonly references to those fields. */
6850 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
6851 RTX_UNCHANGING_P (temp) = 1;
6853 return temp;
6856 case ARRAY_REF:
6857 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6858 abort ();
6861 tree array = TREE_OPERAND (exp, 0);
6862 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6863 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6864 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6865 HOST_WIDE_INT i;
6867 /* Optimize the special-case of a zero lower bound.
6869 We convert the low_bound to sizetype to avoid some problems
6870 with constant folding. (E.g. suppose the lower bound is 1,
6871 and its mode is QI. Without the conversion, (ARRAY
6872 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6873 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6875 if (! integer_zerop (low_bound))
6876 index = size_diffop (index, convert (sizetype, low_bound));
6878 /* Fold an expression like: "foo"[2].
6879 This is not done in fold so it won't happen inside &.
6880 Don't fold if this is for wide characters since it's too
6881 difficult to do correctly and this is a very rare case. */
6883 if (modifier != EXPAND_CONST_ADDRESS
6884 && modifier != EXPAND_INITIALIZER
6885 && modifier != EXPAND_MEMORY
6886 && TREE_CODE (array) == STRING_CST
6887 && TREE_CODE (index) == INTEGER_CST
6888 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6889 && GET_MODE_CLASS (mode) == MODE_INT
6890 && GET_MODE_SIZE (mode) == 1)
6891 return gen_int_mode (TREE_STRING_POINTER (array)
6892 [TREE_INT_CST_LOW (index)], mode);
6894 /* If this is a constant index into a constant array,
6895 just get the value from the array. Handle both the cases when
6896 we have an explicit constructor and when our operand is a variable
6897 that was declared const. */
6899 if (modifier != EXPAND_CONST_ADDRESS
6900 && modifier != EXPAND_INITIALIZER
6901 && modifier != EXPAND_MEMORY
6902 && TREE_CODE (array) == CONSTRUCTOR
6903 && ! TREE_SIDE_EFFECTS (array)
6904 && TREE_CODE (index) == INTEGER_CST
6905 && 0 > compare_tree_int (index,
6906 list_length (CONSTRUCTOR_ELTS
6907 (TREE_OPERAND (exp, 0)))))
6909 tree elem;
6911 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6912 i = TREE_INT_CST_LOW (index);
6913 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6916 if (elem)
6917 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6918 modifier);
6921 else if (optimize >= 1
6922 && modifier != EXPAND_CONST_ADDRESS
6923 && modifier != EXPAND_INITIALIZER
6924 && modifier != EXPAND_MEMORY
6925 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6926 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6927 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
6928 && targetm.binds_local_p (array))
6930 if (TREE_CODE (index) == INTEGER_CST)
6932 tree init = DECL_INITIAL (array);
6934 if (TREE_CODE (init) == CONSTRUCTOR)
6936 tree elem;
6938 for (elem = CONSTRUCTOR_ELTS (init);
6939 (elem
6940 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6941 elem = TREE_CHAIN (elem))
6944 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6945 return expand_expr (fold (TREE_VALUE (elem)), target,
6946 tmode, modifier);
6948 else if (TREE_CODE (init) == STRING_CST
6949 && 0 > compare_tree_int (index,
6950 TREE_STRING_LENGTH (init)))
6952 tree type = TREE_TYPE (TREE_TYPE (init));
6953 enum machine_mode mode = TYPE_MODE (type);
6955 if (GET_MODE_CLASS (mode) == MODE_INT
6956 && GET_MODE_SIZE (mode) == 1)
6957 return gen_int_mode (TREE_STRING_POINTER (init)
6958 [TREE_INT_CST_LOW (index)], mode);
6963 goto normal_inner_ref;
6965 case COMPONENT_REF:
6966 /* If the operand is a CONSTRUCTOR, we can just extract the
6967 appropriate field if it is present. */
6968 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
6970 tree elt;
6972 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6973 elt = TREE_CHAIN (elt))
6974 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6975 /* We can normally use the value of the field in the
6976 CONSTRUCTOR. However, if this is a bitfield in
6977 an integral mode that we can fit in a HOST_WIDE_INT,
6978 we must mask only the number of bits in the bitfield,
6979 since this is done implicitly by the constructor. If
6980 the bitfield does not meet either of those conditions,
6981 we can't do this optimization. */
6982 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6983 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6984 == MODE_INT)
6985 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6986 <= HOST_BITS_PER_WIDE_INT))))
6988 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
6989 && modifier == EXPAND_STACK_PARM)
6990 target = 0;
6991 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6992 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6994 HOST_WIDE_INT bitsize
6995 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6996 enum machine_mode imode
6997 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6999 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
7001 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7002 op0 = expand_and (imode, op0, op1, target);
7004 else
7006 tree count
7007 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
7010 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7011 target, 0);
7012 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7013 target, 0);
7017 return op0;
7020 goto normal_inner_ref;
7022 case BIT_FIELD_REF:
7023 case ARRAY_RANGE_REF:
7024 normal_inner_ref:
7026 enum machine_mode mode1;
7027 HOST_WIDE_INT bitsize, bitpos;
7028 tree offset;
7029 int volatilep = 0;
7030 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7031 &mode1, &unsignedp, &volatilep);
7032 rtx orig_op0;
7034 /* If we got back the original object, something is wrong. Perhaps
7035 we are evaluating an expression too early. In any event, don't
7036 infinitely recurse. */
7037 if (tem == exp)
7038 abort ();
7040 /* If TEM's type is a union of variable size, pass TARGET to the inner
7041 computation, since it will need a temporary and TARGET is known
7042 to have to do. This occurs in unchecked conversion in Ada. */
7044 orig_op0 = op0
7045 = expand_expr (tem,
7046 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7047 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7048 != INTEGER_CST)
7049 && modifier != EXPAND_STACK_PARM
7050 ? target : NULL_RTX),
7051 VOIDmode,
7052 (modifier == EXPAND_INITIALIZER
7053 || modifier == EXPAND_CONST_ADDRESS
7054 || modifier == EXPAND_STACK_PARM)
7055 ? modifier : EXPAND_NORMAL);
7057 /* If this is a constant, put it into a register if it is a
7058 legitimate constant and OFFSET is 0 and memory if it isn't. */
7059 if (CONSTANT_P (op0))
7061 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7062 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7063 && offset == 0)
7064 op0 = force_reg (mode, op0);
7065 else
7066 op0 = validize_mem (force_const_mem (mode, op0));
7069 /* Otherwise, if this object not in memory and we either have an
7070 offset or a BLKmode result, put it there. This case can't occur in
7071 C, but can in Ada if we have unchecked conversion of an expression
7072 from a scalar type to an array or record type or for an
7073 ARRAY_RANGE_REF whose type is BLKmode. */
7074 else if (GET_CODE (op0) != MEM
7075 && (offset != 0
7076 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7078 /* If the operand is a SAVE_EXPR, we can deal with this by
7079 forcing the SAVE_EXPR into memory. */
7080 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7082 put_var_into_stack (TREE_OPERAND (exp, 0),
7083 /*rescan=*/true);
7084 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7086 else
7088 tree nt
7089 = build_qualified_type (TREE_TYPE (tem),
7090 (TYPE_QUALS (TREE_TYPE (tem))
7091 | TYPE_QUAL_CONST));
7092 rtx memloc = assign_temp (nt, 1, 1, 1);
7094 emit_move_insn (memloc, op0);
7095 op0 = memloc;
7099 if (offset != 0)
7101 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7102 EXPAND_SUM);
7104 if (GET_CODE (op0) != MEM)
7105 abort ();
7107 #ifdef POINTERS_EXTEND_UNSIGNED
7108 if (GET_MODE (offset_rtx) != Pmode)
7109 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7110 #else
7111 if (GET_MODE (offset_rtx) != ptr_mode)
7112 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7113 #endif
7115 if (GET_MODE (op0) == BLKmode
7116 /* A constant address in OP0 can have VOIDmode, we must
7117 not try to call force_reg in that case. */
7118 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7119 && bitsize != 0
7120 && (bitpos % bitsize) == 0
7121 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7122 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7124 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7125 bitpos = 0;
7128 op0 = offset_address (op0, offset_rtx,
7129 highest_pow2_factor (offset));
7132 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7133 record its alignment as BIGGEST_ALIGNMENT. */
7134 if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
7135 && is_aligning_offset (offset, tem))
7136 set_mem_align (op0, BIGGEST_ALIGNMENT);
7138 /* Don't forget about volatility even if this is a bitfield. */
7139 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7141 if (op0 == orig_op0)
7142 op0 = copy_rtx (op0);
7144 MEM_VOLATILE_P (op0) = 1;
7147 /* The following code doesn't handle CONCAT.
7148 Assume only bitpos == 0 can be used for CONCAT, due to
7149 one element arrays having the same mode as its element. */
7150 if (GET_CODE (op0) == CONCAT)
7152 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7153 abort ();
7154 return op0;
7157 /* In cases where an aligned union has an unaligned object
7158 as a field, we might be extracting a BLKmode value from
7159 an integer-mode (e.g., SImode) object. Handle this case
7160 by doing the extract into an object as wide as the field
7161 (which we know to be the width of a basic mode), then
7162 storing into memory, and changing the mode to BLKmode. */
7163 if (mode1 == VOIDmode
7164 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7165 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7166 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7167 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7168 && modifier != EXPAND_CONST_ADDRESS
7169 && modifier != EXPAND_INITIALIZER)
7170 /* If the field isn't aligned enough to fetch as a memref,
7171 fetch it as a bit field. */
7172 || (mode1 != BLKmode
7173 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7174 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
7175 || (GET_CODE (op0) == MEM
7176 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7177 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
7178 && ((modifier == EXPAND_CONST_ADDRESS
7179 || modifier == EXPAND_INITIALIZER)
7180 ? STRICT_ALIGNMENT
7181 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7182 || (bitpos % BITS_PER_UNIT != 0)))
7183 /* If the type and the field are a constant size and the
7184 size of the type isn't the same size as the bitfield,
7185 we must use bitfield operations. */
7186 || (bitsize >= 0
7187 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7188 == INTEGER_CST)
7189 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7190 bitsize)))
7192 enum machine_mode ext_mode = mode;
7194 if (ext_mode == BLKmode
7195 && ! (target != 0 && GET_CODE (op0) == MEM
7196 && GET_CODE (target) == MEM
7197 && bitpos % BITS_PER_UNIT == 0))
7198 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7200 if (ext_mode == BLKmode)
7202 if (target == 0)
7203 target = assign_temp (type, 0, 1, 1);
7205 if (bitsize == 0)
7206 return target;
7208 /* In this case, BITPOS must start at a byte boundary and
7209 TARGET, if specified, must be a MEM. */
7210 if (GET_CODE (op0) != MEM
7211 || (target != 0 && GET_CODE (target) != MEM)
7212 || bitpos % BITS_PER_UNIT != 0)
7213 abort ();
7215 emit_block_move (target,
7216 adjust_address (op0, VOIDmode,
7217 bitpos / BITS_PER_UNIT),
7218 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7219 / BITS_PER_UNIT),
7220 (modifier == EXPAND_STACK_PARM
7221 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7223 return target;
7226 op0 = validize_mem (op0);
7228 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7229 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7231 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7232 (modifier == EXPAND_STACK_PARM
7233 ? NULL_RTX : target),
7234 ext_mode, ext_mode,
7235 int_size_in_bytes (TREE_TYPE (tem)));
7237 /* If the result is a record type and BITSIZE is narrower than
7238 the mode of OP0, an integral mode, and this is a big endian
7239 machine, we must put the field into the high-order bits. */
7240 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7241 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7242 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7243 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7244 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7245 - bitsize),
7246 op0, 1);
7248 /* If the result type is BLKmode, store the data into a temporary
7249 of the appropriate type, but with the mode corresponding to the
7250 mode for the data we have (op0's mode). It's tempting to make
7251 this a constant type, since we know it's only being stored once,
7252 but that can cause problems if we are taking the address of this
7253 COMPONENT_REF because the MEM of any reference via that address
7254 will have flags corresponding to the type, which will not
7255 necessarily be constant. */
7256 if (mode == BLKmode)
7258 rtx new
7259 = assign_stack_temp_for_type
7260 (ext_mode, GET_MODE_BITSIZE (ext_mode), 0, type);
7262 emit_move_insn (new, op0);
7263 op0 = copy_rtx (new);
7264 PUT_MODE (op0, BLKmode);
7265 set_mem_attributes (op0, exp, 1);
7268 return op0;
7271 /* If the result is BLKmode, use that to access the object
7272 now as well. */
7273 if (mode == BLKmode)
7274 mode1 = BLKmode;
7276 /* Get a reference to just this component. */
7277 if (modifier == EXPAND_CONST_ADDRESS
7278 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7279 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7280 else
7281 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7283 if (op0 == orig_op0)
7284 op0 = copy_rtx (op0);
7286 set_mem_attributes (op0, exp, 0);
7287 if (GET_CODE (XEXP (op0, 0)) == REG)
7288 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7290 MEM_VOLATILE_P (op0) |= volatilep;
7291 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7292 || modifier == EXPAND_CONST_ADDRESS
7293 || modifier == EXPAND_INITIALIZER)
7294 return op0;
7295 else if (target == 0)
7296 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7298 convert_move (target, op0, unsignedp);
7299 return target;
7302 case VTABLE_REF:
7304 rtx insn, before = get_last_insn (), vtbl_ref;
7306 /* Evaluate the interior expression. */
7307 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7308 tmode, modifier);
7310 /* Get or create an instruction off which to hang a note. */
7311 if (REG_P (subtarget))
7313 target = subtarget;
7314 insn = get_last_insn ();
7315 if (insn == before)
7316 abort ();
7317 if (! INSN_P (insn))
7318 insn = prev_nonnote_insn (insn);
7320 else
7322 target = gen_reg_rtx (GET_MODE (subtarget));
7323 insn = emit_move_insn (target, subtarget);
7326 /* Collect the data for the note. */
7327 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7328 vtbl_ref = plus_constant (vtbl_ref,
7329 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7330 /* Discard the initial CONST that was added. */
7331 vtbl_ref = XEXP (vtbl_ref, 0);
7333 REG_NOTES (insn)
7334 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7336 return target;
7339 /* Intended for a reference to a buffer of a file-object in Pascal.
7340 But it's not certain that a special tree code will really be
7341 necessary for these. INDIRECT_REF might work for them. */
7342 case BUFFER_REF:
7343 abort ();
7345 case IN_EXPR:
7347 /* Pascal set IN expression.
7349 Algorithm:
7350 rlo = set_low - (set_low%bits_per_word);
7351 the_word = set [ (index - rlo)/bits_per_word ];
7352 bit_index = index % bits_per_word;
7353 bitmask = 1 << bit_index;
7354 return !!(the_word & bitmask); */
7356 tree set = TREE_OPERAND (exp, 0);
7357 tree index = TREE_OPERAND (exp, 1);
7358 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7359 tree set_type = TREE_TYPE (set);
7360 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7361 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7362 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7363 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7364 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7365 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7366 rtx setaddr = XEXP (setval, 0);
7367 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7368 rtx rlow;
7369 rtx diff, quo, rem, addr, bit, result;
7371 /* If domain is empty, answer is no. Likewise if index is constant
7372 and out of bounds. */
7373 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7374 && TREE_CODE (set_low_bound) == INTEGER_CST
7375 && tree_int_cst_lt (set_high_bound, set_low_bound))
7376 || (TREE_CODE (index) == INTEGER_CST
7377 && TREE_CODE (set_low_bound) == INTEGER_CST
7378 && tree_int_cst_lt (index, set_low_bound))
7379 || (TREE_CODE (set_high_bound) == INTEGER_CST
7380 && TREE_CODE (index) == INTEGER_CST
7381 && tree_int_cst_lt (set_high_bound, index))))
7382 return const0_rtx;
7384 if (target == 0)
7385 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7387 /* If we get here, we have to generate the code for both cases
7388 (in range and out of range). */
7390 op0 = gen_label_rtx ();
7391 op1 = gen_label_rtx ();
7393 if (! (GET_CODE (index_val) == CONST_INT
7394 && GET_CODE (lo_r) == CONST_INT))
7395 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7396 GET_MODE (index_val), iunsignedp, op1);
7398 if (! (GET_CODE (index_val) == CONST_INT
7399 && GET_CODE (hi_r) == CONST_INT))
7400 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7401 GET_MODE (index_val), iunsignedp, op1);
7403 /* Calculate the element number of bit zero in the first word
7404 of the set. */
7405 if (GET_CODE (lo_r) == CONST_INT)
7406 rlow = GEN_INT (INTVAL (lo_r)
7407 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7408 else
7409 rlow = expand_binop (index_mode, and_optab, lo_r,
7410 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7411 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7413 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7414 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7416 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7417 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7418 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7419 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7421 addr = memory_address (byte_mode,
7422 expand_binop (index_mode, add_optab, diff,
7423 setaddr, NULL_RTX, iunsignedp,
7424 OPTAB_LIB_WIDEN));
7426 /* Extract the bit we want to examine. */
7427 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7428 gen_rtx_MEM (byte_mode, addr),
7429 make_tree (TREE_TYPE (index), rem),
7430 NULL_RTX, 1);
7431 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7432 GET_MODE (target) == byte_mode ? target : 0,
7433 1, OPTAB_LIB_WIDEN);
7435 if (result != target)
7436 convert_move (target, result, 1);
7438 /* Output the code to handle the out-of-range case. */
7439 emit_jump (op0);
7440 emit_label (op1);
7441 emit_move_insn (target, const0_rtx);
7442 emit_label (op0);
7443 return target;
7446 case WITH_CLEANUP_EXPR:
7447 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7449 WITH_CLEANUP_EXPR_RTL (exp)
7450 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7451 expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1),
7452 CLEANUP_EH_ONLY (exp));
7454 /* That's it for this cleanup. */
7455 TREE_OPERAND (exp, 1) = 0;
7457 return WITH_CLEANUP_EXPR_RTL (exp);
7459 case CLEANUP_POINT_EXPR:
7461 /* Start a new binding layer that will keep track of all cleanup
7462 actions to be performed. */
7463 expand_start_bindings (2);
7465 target_temp_slot_level = temp_slot_level;
7467 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7468 /* If we're going to use this value, load it up now. */
7469 if (! ignore)
7470 op0 = force_not_mem (op0);
7471 preserve_temp_slots (op0);
7472 expand_end_bindings (NULL_TREE, 0, 0);
7474 return op0;
7476 case CALL_EXPR:
7477 /* Check for a built-in function. */
7478 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7479 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7480 == FUNCTION_DECL)
7481 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7483 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7484 == BUILT_IN_FRONTEND)
7485 return (*lang_hooks.expand_expr) (exp, original_target,
7486 tmode, modifier,
7487 alt_rtl);
7488 else
7489 return expand_builtin (exp, target, subtarget, tmode, ignore);
7492 return expand_call (exp, target, ignore);
7494 case NON_LVALUE_EXPR:
7495 case NOP_EXPR:
7496 case CONVERT_EXPR:
7497 case REFERENCE_EXPR:
7498 if (TREE_OPERAND (exp, 0) == error_mark_node)
7499 return const0_rtx;
7501 if (TREE_CODE (type) == UNION_TYPE)
7503 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7505 /* If both input and output are BLKmode, this conversion isn't doing
7506 anything except possibly changing memory attribute. */
7507 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7509 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7510 modifier);
7512 result = copy_rtx (result);
7513 set_mem_attributes (result, exp, 0);
7514 return result;
7517 if (target == 0)
7519 if (TYPE_MODE (type) != BLKmode)
7520 target = gen_reg_rtx (TYPE_MODE (type));
7521 else
7522 target = assign_temp (type, 0, 1, 1);
7525 if (GET_CODE (target) == MEM)
7526 /* Store data into beginning of memory target. */
7527 store_expr (TREE_OPERAND (exp, 0),
7528 adjust_address (target, TYPE_MODE (valtype), 0),
7529 modifier == EXPAND_STACK_PARM ? 2 : 0);
7531 else if (GET_CODE (target) == REG)
7532 /* Store this field into a union of the proper type. */
7533 store_field (target,
7534 MIN ((int_size_in_bytes (TREE_TYPE
7535 (TREE_OPERAND (exp, 0)))
7536 * BITS_PER_UNIT),
7537 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7538 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7539 VOIDmode, 0, type, 0);
7540 else
7541 abort ();
7543 /* Return the entire union. */
7544 return target;
7547 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7549 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7550 modifier);
7552 /* If the signedness of the conversion differs and OP0 is
7553 a promoted SUBREG, clear that indication since we now
7554 have to do the proper extension. */
7555 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7556 && GET_CODE (op0) == SUBREG)
7557 SUBREG_PROMOTED_VAR_P (op0) = 0;
7559 return op0;
7562 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7563 if (GET_MODE (op0) == mode)
7564 return op0;
7566 /* If OP0 is a constant, just convert it into the proper mode. */
7567 if (CONSTANT_P (op0))
7569 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7570 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7572 if (modifier == EXPAND_INITIALIZER)
7573 return simplify_gen_subreg (mode, op0, inner_mode,
7574 subreg_lowpart_offset (mode,
7575 inner_mode));
7576 else
7577 return convert_modes (mode, inner_mode, op0,
7578 TREE_UNSIGNED (inner_type));
7581 if (modifier == EXPAND_INITIALIZER)
7582 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7584 if (target == 0)
7585 return
7586 convert_to_mode (mode, op0,
7587 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7588 else
7589 convert_move (target, op0,
7590 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7591 return target;
7593 case VIEW_CONVERT_EXPR:
7594 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7596 /* If the input and output modes are both the same, we are done.
7597 Otherwise, if neither mode is BLKmode and both are integral and within
7598 a word, we can use gen_lowpart. If neither is true, make sure the
7599 operand is in memory and convert the MEM to the new mode. */
7600 if (TYPE_MODE (type) == GET_MODE (op0))
7602 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7603 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7604 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
7605 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7606 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7607 op0 = gen_lowpart (TYPE_MODE (type), op0);
7608 else if (GET_CODE (op0) != MEM)
7610 /* If the operand is not a MEM, force it into memory. Since we
7611 are going to be be changing the mode of the MEM, don't call
7612 force_const_mem for constants because we don't allow pool
7613 constants to change mode. */
7614 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7616 if (TREE_ADDRESSABLE (exp))
7617 abort ();
7619 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7620 target
7621 = assign_stack_temp_for_type
7622 (TYPE_MODE (inner_type),
7623 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7625 emit_move_insn (target, op0);
7626 op0 = target;
7629 /* At this point, OP0 is in the correct mode. If the output type is such
7630 that the operand is known to be aligned, indicate that it is.
7631 Otherwise, we need only be concerned about alignment for non-BLKmode
7632 results. */
7633 if (GET_CODE (op0) == MEM)
7635 op0 = copy_rtx (op0);
7637 if (TYPE_ALIGN_OK (type))
7638 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7639 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7640 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7642 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7643 HOST_WIDE_INT temp_size
7644 = MAX (int_size_in_bytes (inner_type),
7645 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7646 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7647 temp_size, 0, type);
7648 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7650 if (TREE_ADDRESSABLE (exp))
7651 abort ();
7653 if (GET_MODE (op0) == BLKmode)
7654 emit_block_move (new_with_op0_mode, op0,
7655 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7656 (modifier == EXPAND_STACK_PARM
7657 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7658 else
7659 emit_move_insn (new_with_op0_mode, op0);
7661 op0 = new;
7664 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7667 return op0;
7669 case PLUS_EXPR:
7670 this_optab = ! unsignedp && flag_trapv
7671 && (GET_MODE_CLASS (mode) == MODE_INT)
7672 ? addv_optab : add_optab;
7674 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7675 something else, make sure we add the register to the constant and
7676 then to the other thing. This case can occur during strength
7677 reduction and doing it this way will produce better code if the
7678 frame pointer or argument pointer is eliminated.
7680 fold-const.c will ensure that the constant is always in the inner
7681 PLUS_EXPR, so the only case we need to do anything about is if
7682 sp, ap, or fp is our second argument, in which case we must swap
7683 the innermost first argument and our second argument. */
7685 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7686 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7687 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7688 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7689 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7690 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7692 tree t = TREE_OPERAND (exp, 1);
7694 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7695 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7698 /* If the result is to be ptr_mode and we are adding an integer to
7699 something, we might be forming a constant. So try to use
7700 plus_constant. If it produces a sum and we can't accept it,
7701 use force_operand. This allows P = &ARR[const] to generate
7702 efficient code on machines where a SYMBOL_REF is not a valid
7703 address.
7705 If this is an EXPAND_SUM call, always return the sum. */
7706 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7707 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7709 if (modifier == EXPAND_STACK_PARM)
7710 target = 0;
7711 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7712 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7713 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7715 rtx constant_part;
7717 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7718 EXPAND_SUM);
7719 /* Use immed_double_const to ensure that the constant is
7720 truncated according to the mode of OP1, then sign extended
7721 to a HOST_WIDE_INT. Using the constant directly can result
7722 in non-canonical RTL in a 64x32 cross compile. */
7723 constant_part
7724 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7725 (HOST_WIDE_INT) 0,
7726 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7727 op1 = plus_constant (op1, INTVAL (constant_part));
7728 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7729 op1 = force_operand (op1, target);
7730 return op1;
7733 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7734 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7735 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7737 rtx constant_part;
7739 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7740 (modifier == EXPAND_INITIALIZER
7741 ? EXPAND_INITIALIZER : EXPAND_SUM));
7742 if (! CONSTANT_P (op0))
7744 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7745 VOIDmode, modifier);
7746 /* Return a PLUS if modifier says it's OK. */
7747 if (modifier == EXPAND_SUM
7748 || modifier == EXPAND_INITIALIZER)
7749 return simplify_gen_binary (PLUS, mode, op0, op1);
7750 goto binop2;
7752 /* Use immed_double_const to ensure that the constant is
7753 truncated according to the mode of OP1, then sign extended
7754 to a HOST_WIDE_INT. Using the constant directly can result
7755 in non-canonical RTL in a 64x32 cross compile. */
7756 constant_part
7757 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7758 (HOST_WIDE_INT) 0,
7759 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7760 op0 = plus_constant (op0, INTVAL (constant_part));
7761 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7762 op0 = force_operand (op0, target);
7763 return op0;
7767 /* No sense saving up arithmetic to be done
7768 if it's all in the wrong mode to form part of an address.
7769 And force_operand won't know whether to sign-extend or
7770 zero-extend. */
7771 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7772 || mode != ptr_mode)
7774 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7775 subtarget, &op0, &op1, 0);
7776 if (op0 == const0_rtx)
7777 return op1;
7778 if (op1 == const0_rtx)
7779 return op0;
7780 goto binop2;
7783 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7784 subtarget, &op0, &op1, modifier);
7785 return simplify_gen_binary (PLUS, mode, op0, op1);
7787 case MINUS_EXPR:
7788 /* For initializers, we are allowed to return a MINUS of two
7789 symbolic constants. Here we handle all cases when both operands
7790 are constant. */
7791 /* Handle difference of two symbolic constants,
7792 for the sake of an initializer. */
7793 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7794 && really_constant_p (TREE_OPERAND (exp, 0))
7795 && really_constant_p (TREE_OPERAND (exp, 1)))
7797 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7798 NULL_RTX, &op0, &op1, modifier);
7800 /* If the last operand is a CONST_INT, use plus_constant of
7801 the negated constant. Else make the MINUS. */
7802 if (GET_CODE (op1) == CONST_INT)
7803 return plus_constant (op0, - INTVAL (op1));
7804 else
7805 return gen_rtx_MINUS (mode, op0, op1);
7808 this_optab = ! unsignedp && flag_trapv
7809 && (GET_MODE_CLASS(mode) == MODE_INT)
7810 ? subv_optab : sub_optab;
7812 /* No sense saving up arithmetic to be done
7813 if it's all in the wrong mode to form part of an address.
7814 And force_operand won't know whether to sign-extend or
7815 zero-extend. */
7816 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7817 || mode != ptr_mode)
7818 goto binop;
7820 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7821 subtarget, &op0, &op1, modifier);
7823 /* Convert A - const to A + (-const). */
7824 if (GET_CODE (op1) == CONST_INT)
7826 op1 = negate_rtx (mode, op1);
7827 return simplify_gen_binary (PLUS, mode, op0, op1);
7830 goto binop2;
7832 case MULT_EXPR:
7833 /* If first operand is constant, swap them.
7834 Thus the following special case checks need only
7835 check the second operand. */
7836 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7838 tree t1 = TREE_OPERAND (exp, 0);
7839 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7840 TREE_OPERAND (exp, 1) = t1;
7843 /* Attempt to return something suitable for generating an
7844 indexed address, for machines that support that. */
7846 if (modifier == EXPAND_SUM && mode == ptr_mode
7847 && host_integerp (TREE_OPERAND (exp, 1), 0))
7849 tree exp1 = TREE_OPERAND (exp, 1);
7851 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7852 EXPAND_SUM);
7854 if (GET_CODE (op0) != REG)
7855 op0 = force_operand (op0, NULL_RTX);
7856 if (GET_CODE (op0) != REG)
7857 op0 = copy_to_mode_reg (mode, op0);
7859 return gen_rtx_MULT (mode, op0,
7860 gen_int_mode (tree_low_cst (exp1, 0),
7861 TYPE_MODE (TREE_TYPE (exp1))));
7864 if (modifier == EXPAND_STACK_PARM)
7865 target = 0;
7867 /* Check for multiplying things that have been extended
7868 from a narrower type. If this machine supports multiplying
7869 in that narrower type with a result in the desired type,
7870 do it that way, and avoid the explicit type-conversion. */
7871 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7872 && TREE_CODE (type) == INTEGER_TYPE
7873 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7874 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7875 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7876 && int_fits_type_p (TREE_OPERAND (exp, 1),
7877 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7878 /* Don't use a widening multiply if a shift will do. */
7879 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7880 > HOST_BITS_PER_WIDE_INT)
7881 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7883 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7884 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7886 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7887 /* If both operands are extended, they must either both
7888 be zero-extended or both be sign-extended. */
7889 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7891 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7893 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
7894 enum machine_mode innermode = TYPE_MODE (op0type);
7895 bool zextend_p = TREE_UNSIGNED (op0type);
7896 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
7897 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
7899 if (mode == GET_MODE_WIDER_MODE (innermode))
7901 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7903 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7904 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7905 TREE_OPERAND (exp, 1),
7906 NULL_RTX, &op0, &op1, 0);
7907 else
7908 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7909 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7910 NULL_RTX, &op0, &op1, 0);
7911 goto binop2;
7913 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7914 && innermode == word_mode)
7916 rtx htem, hipart;
7917 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7918 NULL_RTX, VOIDmode, 0);
7919 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7920 op1 = convert_modes (innermode, mode,
7921 expand_expr (TREE_OPERAND (exp, 1),
7922 NULL_RTX, VOIDmode, 0),
7923 unsignedp);
7924 else
7925 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7926 NULL_RTX, VOIDmode, 0);
7927 temp = expand_binop (mode, other_optab, op0, op1, target,
7928 unsignedp, OPTAB_LIB_WIDEN);
7929 hipart = gen_highpart (innermode, temp);
7930 htem = expand_mult_highpart_adjust (innermode, hipart,
7931 op0, op1, hipart,
7932 zextend_p);
7933 if (htem != hipart)
7934 emit_move_insn (hipart, htem);
7935 return temp;
7939 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7940 subtarget, &op0, &op1, 0);
7941 return expand_mult (mode, op0, op1, target, unsignedp);
7943 case TRUNC_DIV_EXPR:
7944 case FLOOR_DIV_EXPR:
7945 case CEIL_DIV_EXPR:
7946 case ROUND_DIV_EXPR:
7947 case EXACT_DIV_EXPR:
7948 if (modifier == EXPAND_STACK_PARM)
7949 target = 0;
7950 /* Possible optimization: compute the dividend with EXPAND_SUM
7951 then if the divisor is constant can optimize the case
7952 where some terms of the dividend have coeffs divisible by it. */
7953 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7954 subtarget, &op0, &op1, 0);
7955 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7957 case RDIV_EXPR:
7958 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7959 expensive divide. If not, combine will rebuild the original
7960 computation. */
7961 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7962 && TREE_CODE (type) == REAL_TYPE
7963 && !real_onep (TREE_OPERAND (exp, 0)))
7964 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7965 build (RDIV_EXPR, type,
7966 build_real (type, dconst1),
7967 TREE_OPERAND (exp, 1))),
7968 target, tmode, modifier);
7969 this_optab = sdiv_optab;
7970 goto binop;
7972 case TRUNC_MOD_EXPR:
7973 case FLOOR_MOD_EXPR:
7974 case CEIL_MOD_EXPR:
7975 case ROUND_MOD_EXPR:
7976 if (modifier == EXPAND_STACK_PARM)
7977 target = 0;
7978 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7979 subtarget, &op0, &op1, 0);
7980 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7982 case FIX_ROUND_EXPR:
7983 case FIX_FLOOR_EXPR:
7984 case FIX_CEIL_EXPR:
7985 abort (); /* Not used for C. */
7987 case FIX_TRUNC_EXPR:
7988 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7989 if (target == 0 || modifier == EXPAND_STACK_PARM)
7990 target = gen_reg_rtx (mode);
7991 expand_fix (target, op0, unsignedp);
7992 return target;
7994 case FLOAT_EXPR:
7995 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7996 if (target == 0 || modifier == EXPAND_STACK_PARM)
7997 target = gen_reg_rtx (mode);
7998 /* expand_float can't figure out what to do if FROM has VOIDmode.
7999 So give it the correct mode. With -O, cse will optimize this. */
8000 if (GET_MODE (op0) == VOIDmode)
8001 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8002 op0);
8003 expand_float (target, op0,
8004 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8005 return target;
8007 case NEGATE_EXPR:
8008 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8009 if (modifier == EXPAND_STACK_PARM)
8010 target = 0;
8011 temp = expand_unop (mode,
8012 ! unsignedp && flag_trapv
8013 && (GET_MODE_CLASS(mode) == MODE_INT)
8014 ? negv_optab : neg_optab, op0, target, 0);
8015 if (temp == 0)
8016 abort ();
8017 return temp;
8019 case ABS_EXPR:
8020 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8021 if (modifier == EXPAND_STACK_PARM)
8022 target = 0;
8024 /* ABS_EXPR is not valid for complex arguments. */
8025 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
8026 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
8027 abort ();
8029 /* Unsigned abs is simply the operand. Testing here means we don't
8030 risk generating incorrect code below. */
8031 if (TREE_UNSIGNED (type))
8032 return op0;
8034 return expand_abs (mode, op0, target, unsignedp,
8035 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8037 case MAX_EXPR:
8038 case MIN_EXPR:
8039 target = original_target;
8040 if (target == 0
8041 || modifier == EXPAND_STACK_PARM
8042 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
8043 || GET_MODE (target) != mode
8044 || (GET_CODE (target) == REG
8045 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8046 target = gen_reg_rtx (mode);
8047 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8048 target, &op0, &op1, 0);
8050 /* First try to do it with a special MIN or MAX instruction.
8051 If that does not win, use a conditional jump to select the proper
8052 value. */
8053 this_optab = (unsignedp
8054 ? (code == MIN_EXPR ? umin_optab : umax_optab)
8055 : (code == MIN_EXPR ? smin_optab : smax_optab));
8057 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8058 OPTAB_WIDEN);
8059 if (temp != 0)
8060 return temp;
8062 /* At this point, a MEM target is no longer useful; we will get better
8063 code without it. */
8065 if (GET_CODE (target) == MEM)
8066 target = gen_reg_rtx (mode);
8068 /* If op1 was placed in target, swap op0 and op1. */
8069 if (target != op0 && target == op1)
8071 rtx tem = op0;
8072 op0 = op1;
8073 op1 = tem;
8076 if (target != op0)
8077 emit_move_insn (target, op0);
8079 op0 = gen_label_rtx ();
8081 /* If this mode is an integer too wide to compare properly,
8082 compare word by word. Rely on cse to optimize constant cases. */
8083 if (GET_MODE_CLASS (mode) == MODE_INT
8084 && ! can_compare_p (GE, mode, ccp_jump))
8086 if (code == MAX_EXPR)
8087 do_jump_by_parts_greater_rtx (mode, unsignedp, target, op1,
8088 NULL_RTX, op0);
8089 else
8090 do_jump_by_parts_greater_rtx (mode, unsignedp, op1, target,
8091 NULL_RTX, op0);
8093 else
8095 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
8096 unsignedp, mode, NULL_RTX, NULL_RTX, op0);
8098 emit_move_insn (target, op1);
8099 emit_label (op0);
8100 return target;
8102 case BIT_NOT_EXPR:
8103 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8104 if (modifier == EXPAND_STACK_PARM)
8105 target = 0;
8106 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8107 if (temp == 0)
8108 abort ();
8109 return temp;
8111 /* ??? Can optimize bitwise operations with one arg constant.
8112 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8113 and (a bitwise1 b) bitwise2 b (etc)
8114 but that is probably not worth while. */
8116 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8117 boolean values when we want in all cases to compute both of them. In
8118 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8119 as actual zero-or-1 values and then bitwise anding. In cases where
8120 there cannot be any side effects, better code would be made by
8121 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8122 how to recognize those cases. */
8124 case TRUTH_AND_EXPR:
8125 case BIT_AND_EXPR:
8126 this_optab = and_optab;
8127 goto binop;
8129 case TRUTH_OR_EXPR:
8130 case BIT_IOR_EXPR:
8131 this_optab = ior_optab;
8132 goto binop;
8134 case TRUTH_XOR_EXPR:
8135 case BIT_XOR_EXPR:
8136 this_optab = xor_optab;
8137 goto binop;
8139 case LSHIFT_EXPR:
8140 case RSHIFT_EXPR:
8141 case LROTATE_EXPR:
8142 case RROTATE_EXPR:
8143 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8144 subtarget = 0;
8145 if (modifier == EXPAND_STACK_PARM)
8146 target = 0;
8147 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8148 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8149 unsignedp);
8151 /* Could determine the answer when only additive constants differ. Also,
8152 the addition of one can be handled by changing the condition. */
8153 case LT_EXPR:
8154 case LE_EXPR:
8155 case GT_EXPR:
8156 case GE_EXPR:
8157 case EQ_EXPR:
8158 case NE_EXPR:
8159 case UNORDERED_EXPR:
8160 case ORDERED_EXPR:
8161 case UNLT_EXPR:
8162 case UNLE_EXPR:
8163 case UNGT_EXPR:
8164 case UNGE_EXPR:
8165 case UNEQ_EXPR:
8166 temp = do_store_flag (exp,
8167 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8168 tmode != VOIDmode ? tmode : mode, 0);
8169 if (temp != 0)
8170 return temp;
8172 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8173 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8174 && original_target
8175 && GET_CODE (original_target) == REG
8176 && (GET_MODE (original_target)
8177 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8179 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8180 VOIDmode, 0);
8182 /* If temp is constant, we can just compute the result. */
8183 if (GET_CODE (temp) == CONST_INT)
8185 if (INTVAL (temp) != 0)
8186 emit_move_insn (target, const1_rtx);
8187 else
8188 emit_move_insn (target, const0_rtx);
8190 return target;
8193 if (temp != original_target)
8195 enum machine_mode mode1 = GET_MODE (temp);
8196 if (mode1 == VOIDmode)
8197 mode1 = tmode != VOIDmode ? tmode : mode;
8199 temp = copy_to_mode_reg (mode1, temp);
8202 op1 = gen_label_rtx ();
8203 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8204 GET_MODE (temp), unsignedp, op1);
8205 emit_move_insn (temp, const1_rtx);
8206 emit_label (op1);
8207 return temp;
8210 /* If no set-flag instruction, must generate a conditional
8211 store into a temporary variable. Drop through
8212 and handle this like && and ||. */
8214 case TRUTH_ANDIF_EXPR:
8215 case TRUTH_ORIF_EXPR:
8216 if (! ignore
8217 && (target == 0
8218 || modifier == EXPAND_STACK_PARM
8219 || ! safe_from_p (target, exp, 1)
8220 /* Make sure we don't have a hard reg (such as function's return
8221 value) live across basic blocks, if not optimizing. */
8222 || (!optimize && GET_CODE (target) == REG
8223 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8224 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8226 if (target)
8227 emit_clr_insn (target);
8229 op1 = gen_label_rtx ();
8230 jumpifnot (exp, op1);
8232 if (target)
8233 emit_0_to_1_insn (target);
8235 emit_label (op1);
8236 return ignore ? const0_rtx : target;
8238 case TRUTH_NOT_EXPR:
8239 if (modifier == EXPAND_STACK_PARM)
8240 target = 0;
8241 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8242 /* The parser is careful to generate TRUTH_NOT_EXPR
8243 only with operands that are always zero or one. */
8244 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8245 target, 1, OPTAB_LIB_WIDEN);
8246 if (temp == 0)
8247 abort ();
8248 return temp;
8250 case COMPOUND_EXPR:
8251 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8252 emit_queue ();
8253 return expand_expr_real (TREE_OPERAND (exp, 1),
8254 (ignore ? const0_rtx : target),
8255 VOIDmode, modifier, alt_rtl);
8257 case COND_EXPR:
8258 /* If we would have a "singleton" (see below) were it not for a
8259 conversion in each arm, bring that conversion back out. */
8260 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8261 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8262 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8263 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8265 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8266 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8268 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8269 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8270 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8271 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8272 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8273 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8274 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8275 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8276 return expand_expr (build1 (NOP_EXPR, type,
8277 build (COND_EXPR, TREE_TYPE (iftrue),
8278 TREE_OPERAND (exp, 0),
8279 iftrue, iffalse)),
8280 target, tmode, modifier);
8284 /* Note that COND_EXPRs whose type is a structure or union
8285 are required to be constructed to contain assignments of
8286 a temporary variable, so that we can evaluate them here
8287 for side effect only. If type is void, we must do likewise. */
8289 /* If an arm of the branch requires a cleanup,
8290 only that cleanup is performed. */
8292 tree singleton = 0;
8293 tree binary_op = 0, unary_op = 0;
8295 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8296 convert it to our mode, if necessary. */
8297 if (integer_onep (TREE_OPERAND (exp, 1))
8298 && integer_zerop (TREE_OPERAND (exp, 2))
8299 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8301 if (ignore)
8303 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8304 modifier);
8305 return const0_rtx;
8308 if (modifier == EXPAND_STACK_PARM)
8309 target = 0;
8310 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8311 if (GET_MODE (op0) == mode)
8312 return op0;
8314 if (target == 0)
8315 target = gen_reg_rtx (mode);
8316 convert_move (target, op0, unsignedp);
8317 return target;
8320 /* Check for X ? A + B : A. If we have this, we can copy A to the
8321 output and conditionally add B. Similarly for unary operations.
8322 Don't do this if X has side-effects because those side effects
8323 might affect A or B and the "?" operation is a sequence point in
8324 ANSI. (operand_equal_p tests for side effects.) */
8326 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8327 && operand_equal_p (TREE_OPERAND (exp, 2),
8328 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8329 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8330 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8331 && operand_equal_p (TREE_OPERAND (exp, 1),
8332 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8333 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8334 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8335 && operand_equal_p (TREE_OPERAND (exp, 2),
8336 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8337 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8338 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8339 && operand_equal_p (TREE_OPERAND (exp, 1),
8340 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8341 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8343 /* If we are not to produce a result, we have no target. Otherwise,
8344 if a target was specified use it; it will not be used as an
8345 intermediate target unless it is safe. If no target, use a
8346 temporary. */
8348 if (ignore)
8349 temp = 0;
8350 else if (modifier == EXPAND_STACK_PARM)
8351 temp = assign_temp (type, 0, 0, 1);
8352 else if (original_target
8353 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8354 || (singleton && GET_CODE (original_target) == REG
8355 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8356 && original_target == var_rtx (singleton)))
8357 && GET_MODE (original_target) == mode
8358 #ifdef HAVE_conditional_move
8359 && (! can_conditionally_move_p (mode)
8360 || GET_CODE (original_target) == REG
8361 || TREE_ADDRESSABLE (type))
8362 #endif
8363 && (GET_CODE (original_target) != MEM
8364 || TREE_ADDRESSABLE (type)))
8365 temp = original_target;
8366 else if (TREE_ADDRESSABLE (type))
8367 abort ();
8368 else
8369 temp = assign_temp (type, 0, 0, 1);
8371 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8372 do the test of X as a store-flag operation, do this as
8373 A + ((X != 0) << log C). Similarly for other simple binary
8374 operators. Only do for C == 1 if BRANCH_COST is low. */
8375 if (temp && singleton && binary_op
8376 && (TREE_CODE (binary_op) == PLUS_EXPR
8377 || TREE_CODE (binary_op) == MINUS_EXPR
8378 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8379 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8380 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8381 : integer_onep (TREE_OPERAND (binary_op, 1)))
8382 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8384 rtx result;
8385 tree cond;
8386 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8387 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8388 ? addv_optab : add_optab)
8389 : TREE_CODE (binary_op) == MINUS_EXPR
8390 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8391 ? subv_optab : sub_optab)
8392 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8393 : xor_optab);
8395 /* If we had X ? A : A + 1, do this as A + (X == 0). */
8396 if (singleton == TREE_OPERAND (exp, 1))
8397 cond = invert_truthvalue (TREE_OPERAND (exp, 0));
8398 else
8399 cond = TREE_OPERAND (exp, 0);
8401 result = do_store_flag (cond, (safe_from_p (temp, singleton, 1)
8402 ? temp : NULL_RTX),
8403 mode, BRANCH_COST <= 1);
8405 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8406 result = expand_shift (LSHIFT_EXPR, mode, result,
8407 build_int_2 (tree_log2
8408 (TREE_OPERAND
8409 (binary_op, 1)),
8411 (safe_from_p (temp, singleton, 1)
8412 ? temp : NULL_RTX), 0);
8414 if (result)
8416 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8417 return expand_binop (mode, boptab, op1, result, temp,
8418 unsignedp, OPTAB_LIB_WIDEN);
8422 do_pending_stack_adjust ();
8423 NO_DEFER_POP;
8424 op0 = gen_label_rtx ();
8426 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8428 if (temp != 0)
8430 /* If the target conflicts with the other operand of the
8431 binary op, we can't use it. Also, we can't use the target
8432 if it is a hard register, because evaluating the condition
8433 might clobber it. */
8434 if ((binary_op
8435 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8436 || (GET_CODE (temp) == REG
8437 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8438 temp = gen_reg_rtx (mode);
8439 store_expr (singleton, temp,
8440 modifier == EXPAND_STACK_PARM ? 2 : 0);
8442 else
8443 expand_expr (singleton,
8444 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8445 if (singleton == TREE_OPERAND (exp, 1))
8446 jumpif (TREE_OPERAND (exp, 0), op0);
8447 else
8448 jumpifnot (TREE_OPERAND (exp, 0), op0);
8450 start_cleanup_deferral ();
8451 if (binary_op && temp == 0)
8452 /* Just touch the other operand. */
8453 expand_expr (TREE_OPERAND (binary_op, 1),
8454 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8455 else if (binary_op)
8456 store_expr (build (TREE_CODE (binary_op), type,
8457 make_tree (type, temp),
8458 TREE_OPERAND (binary_op, 1)),
8459 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8460 else
8461 store_expr (build1 (TREE_CODE (unary_op), type,
8462 make_tree (type, temp)),
8463 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8464 op1 = op0;
8466 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8467 comparison operator. If we have one of these cases, set the
8468 output to A, branch on A (cse will merge these two references),
8469 then set the output to FOO. */
8470 else if (temp
8471 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8472 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8473 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8474 TREE_OPERAND (exp, 1), 0)
8475 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8476 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8477 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8479 if (GET_CODE (temp) == REG
8480 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8481 temp = gen_reg_rtx (mode);
8482 store_expr (TREE_OPERAND (exp, 1), temp,
8483 modifier == EXPAND_STACK_PARM ? 2 : 0);
8484 jumpif (TREE_OPERAND (exp, 0), op0);
8486 start_cleanup_deferral ();
8487 if (TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8488 store_expr (TREE_OPERAND (exp, 2), temp,
8489 modifier == EXPAND_STACK_PARM ? 2 : 0);
8490 else
8491 expand_expr (TREE_OPERAND (exp, 2),
8492 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8493 op1 = op0;
8495 else if (temp
8496 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8497 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8498 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8499 TREE_OPERAND (exp, 2), 0)
8500 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8501 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8502 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8504 if (GET_CODE (temp) == REG
8505 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8506 temp = gen_reg_rtx (mode);
8507 store_expr (TREE_OPERAND (exp, 2), temp,
8508 modifier == EXPAND_STACK_PARM ? 2 : 0);
8509 jumpifnot (TREE_OPERAND (exp, 0), op0);
8511 start_cleanup_deferral ();
8512 if (TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8513 store_expr (TREE_OPERAND (exp, 1), temp,
8514 modifier == EXPAND_STACK_PARM ? 2 : 0);
8515 else
8516 expand_expr (TREE_OPERAND (exp, 1),
8517 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8518 op1 = op0;
8520 else
8522 op1 = gen_label_rtx ();
8523 jumpifnot (TREE_OPERAND (exp, 0), op0);
8525 start_cleanup_deferral ();
8527 /* One branch of the cond can be void, if it never returns. For
8528 example A ? throw : E */
8529 if (temp != 0
8530 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8531 store_expr (TREE_OPERAND (exp, 1), temp,
8532 modifier == EXPAND_STACK_PARM ? 2 : 0);
8533 else
8534 expand_expr (TREE_OPERAND (exp, 1),
8535 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8536 end_cleanup_deferral ();
8537 emit_queue ();
8538 emit_jump_insn (gen_jump (op1));
8539 emit_barrier ();
8540 emit_label (op0);
8541 start_cleanup_deferral ();
8542 if (temp != 0
8543 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8544 store_expr (TREE_OPERAND (exp, 2), temp,
8545 modifier == EXPAND_STACK_PARM ? 2 : 0);
8546 else
8547 expand_expr (TREE_OPERAND (exp, 2),
8548 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8551 end_cleanup_deferral ();
8553 emit_queue ();
8554 emit_label (op1);
8555 OK_DEFER_POP;
8557 return temp;
8560 case TARGET_EXPR:
8562 /* Something needs to be initialized, but we didn't know
8563 where that thing was when building the tree. For example,
8564 it could be the return value of a function, or a parameter
8565 to a function which lays down in the stack, or a temporary
8566 variable which must be passed by reference.
8568 We guarantee that the expression will either be constructed
8569 or copied into our original target. */
8571 tree slot = TREE_OPERAND (exp, 0);
8572 tree cleanups = NULL_TREE;
8573 tree exp1;
8575 if (TREE_CODE (slot) != VAR_DECL)
8576 abort ();
8578 if (! ignore)
8579 target = original_target;
8581 /* Set this here so that if we get a target that refers to a
8582 register variable that's already been used, put_reg_into_stack
8583 knows that it should fix up those uses. */
8584 TREE_USED (slot) = 1;
8586 if (target == 0)
8588 if (DECL_RTL_SET_P (slot))
8590 target = DECL_RTL (slot);
8591 /* If we have already expanded the slot, so don't do
8592 it again. (mrs) */
8593 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8594 return target;
8596 else
8598 target = assign_temp (type, 2, 0, 1);
8599 /* All temp slots at this level must not conflict. */
8600 preserve_temp_slots (target);
8601 SET_DECL_RTL (slot, target);
8602 if (TREE_ADDRESSABLE (slot))
8603 put_var_into_stack (slot, /*rescan=*/false);
8605 /* Since SLOT is not known to the called function
8606 to belong to its stack frame, we must build an explicit
8607 cleanup. This case occurs when we must build up a reference
8608 to pass the reference as an argument. In this case,
8609 it is very likely that such a reference need not be
8610 built here. */
8612 if (TREE_OPERAND (exp, 2) == 0)
8613 TREE_OPERAND (exp, 2)
8614 = (*lang_hooks.maybe_build_cleanup) (slot);
8615 cleanups = TREE_OPERAND (exp, 2);
8618 else
8620 /* This case does occur, when expanding a parameter which
8621 needs to be constructed on the stack. The target
8622 is the actual stack address that we want to initialize.
8623 The function we call will perform the cleanup in this case. */
8625 /* If we have already assigned it space, use that space,
8626 not target that we were passed in, as our target
8627 parameter is only a hint. */
8628 if (DECL_RTL_SET_P (slot))
8630 target = DECL_RTL (slot);
8631 /* If we have already expanded the slot, so don't do
8632 it again. (mrs) */
8633 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8634 return target;
8636 else
8638 SET_DECL_RTL (slot, target);
8639 /* If we must have an addressable slot, then make sure that
8640 the RTL that we just stored in slot is OK. */
8641 if (TREE_ADDRESSABLE (slot))
8642 put_var_into_stack (slot, /*rescan=*/true);
8646 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8647 /* Mark it as expanded. */
8648 TREE_OPERAND (exp, 1) = NULL_TREE;
8650 store_expr (exp1, target, modifier == EXPAND_STACK_PARM ? 2 : 0);
8652 expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
8654 return target;
8657 case INIT_EXPR:
8659 tree lhs = TREE_OPERAND (exp, 0);
8660 tree rhs = TREE_OPERAND (exp, 1);
8662 temp = expand_assignment (lhs, rhs, ! ignore);
8663 return temp;
8666 case MODIFY_EXPR:
8668 /* If lhs is complex, expand calls in rhs before computing it.
8669 That's so we don't compute a pointer and save it over a
8670 call. If lhs is simple, compute it first so we can give it
8671 as a target if the rhs is just a call. This avoids an
8672 extra temp and copy and that prevents a partial-subsumption
8673 which makes bad code. Actually we could treat
8674 component_ref's of vars like vars. */
8676 tree lhs = TREE_OPERAND (exp, 0);
8677 tree rhs = TREE_OPERAND (exp, 1);
8679 temp = 0;
8681 /* Check for |= or &= of a bitfield of size one into another bitfield
8682 of size 1. In this case, (unless we need the result of the
8683 assignment) we can do this more efficiently with a
8684 test followed by an assignment, if necessary.
8686 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8687 things change so we do, this code should be enhanced to
8688 support it. */
8689 if (ignore
8690 && TREE_CODE (lhs) == COMPONENT_REF
8691 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8692 || TREE_CODE (rhs) == BIT_AND_EXPR)
8693 && TREE_OPERAND (rhs, 0) == lhs
8694 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8695 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8696 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8698 rtx label = gen_label_rtx ();
8700 do_jump (TREE_OPERAND (rhs, 1),
8701 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8702 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8703 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8704 (TREE_CODE (rhs) == BIT_IOR_EXPR
8705 ? integer_one_node
8706 : integer_zero_node)),
8708 do_pending_stack_adjust ();
8709 emit_label (label);
8710 return const0_rtx;
8713 temp = expand_assignment (lhs, rhs, ! ignore);
8715 return temp;
8718 case RETURN_EXPR:
8719 if (!TREE_OPERAND (exp, 0))
8720 expand_null_return ();
8721 else
8722 expand_return (TREE_OPERAND (exp, 0));
8723 return const0_rtx;
8725 case PREINCREMENT_EXPR:
8726 case PREDECREMENT_EXPR:
8727 return expand_increment (exp, 0, ignore);
8729 case POSTINCREMENT_EXPR:
8730 case POSTDECREMENT_EXPR:
8731 /* Faster to treat as pre-increment if result is not used. */
8732 return expand_increment (exp, ! ignore, ignore);
8734 case ADDR_EXPR:
8735 if (modifier == EXPAND_STACK_PARM)
8736 target = 0;
8737 /* Are we taking the address of a nested function? */
8738 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8739 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8740 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8741 && ! TREE_STATIC (exp))
8743 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8744 op0 = force_operand (op0, target);
8746 /* If we are taking the address of something erroneous, just
8747 return a zero. */
8748 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8749 return const0_rtx;
8750 /* If we are taking the address of a constant and are at the
8751 top level, we have to use output_constant_def since we can't
8752 call force_const_mem at top level. */
8753 else if (cfun == 0
8754 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8755 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
8756 == 'c')))
8757 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
8758 else
8760 /* We make sure to pass const0_rtx down if we came in with
8761 ignore set, to avoid doing the cleanups twice for something. */
8762 op0 = expand_expr (TREE_OPERAND (exp, 0),
8763 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8764 (modifier == EXPAND_INITIALIZER
8765 ? modifier : EXPAND_CONST_ADDRESS));
8767 /* If we are going to ignore the result, OP0 will have been set
8768 to const0_rtx, so just return it. Don't get confused and
8769 think we are taking the address of the constant. */
8770 if (ignore)
8771 return op0;
8773 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8774 clever and returns a REG when given a MEM. */
8775 op0 = protect_from_queue (op0, 1);
8777 /* We would like the object in memory. If it is a constant, we can
8778 have it be statically allocated into memory. For a non-constant,
8779 we need to allocate some memory and store the value into it. */
8781 if (CONSTANT_P (op0))
8782 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8783 op0);
8784 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8785 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8786 || GET_CODE (op0) == PARALLEL || GET_CODE (op0) == LO_SUM)
8788 /* If the operand is a SAVE_EXPR, we can deal with this by
8789 forcing the SAVE_EXPR into memory. */
8790 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
8792 put_var_into_stack (TREE_OPERAND (exp, 0),
8793 /*rescan=*/true);
8794 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
8796 else
8798 /* If this object is in a register, it can't be BLKmode. */
8799 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8800 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8802 if (GET_CODE (op0) == PARALLEL)
8803 /* Handle calls that pass values in multiple
8804 non-contiguous locations. The Irix 6 ABI has examples
8805 of this. */
8806 emit_group_store (memloc, op0, inner_type,
8807 int_size_in_bytes (inner_type));
8808 else
8809 emit_move_insn (memloc, op0);
8811 op0 = memloc;
8815 if (GET_CODE (op0) != MEM)
8816 abort ();
8818 mark_temp_addr_taken (op0);
8819 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8821 op0 = XEXP (op0, 0);
8822 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
8823 op0 = convert_memory_address (ptr_mode, op0);
8824 return op0;
8827 /* If OP0 is not aligned as least as much as the type requires, we
8828 need to make a temporary, copy OP0 to it, and take the address of
8829 the temporary. We want to use the alignment of the type, not of
8830 the operand. Note that this is incorrect for FUNCTION_TYPE, but
8831 the test for BLKmode means that can't happen. The test for
8832 BLKmode is because we never make mis-aligned MEMs with
8833 non-BLKmode.
8835 We don't need to do this at all if the machine doesn't have
8836 strict alignment. */
8837 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
8838 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
8839 > MEM_ALIGN (op0))
8840 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
8842 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8843 rtx new;
8845 if (TYPE_ALIGN_OK (inner_type))
8846 abort ();
8848 if (TREE_ADDRESSABLE (inner_type))
8850 /* We can't make a bitwise copy of this object, so fail. */
8851 error ("cannot take the address of an unaligned member");
8852 return const0_rtx;
8855 new = assign_stack_temp_for_type
8856 (TYPE_MODE (inner_type),
8857 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
8858 : int_size_in_bytes (inner_type),
8859 1, build_qualified_type (inner_type,
8860 (TYPE_QUALS (inner_type)
8861 | TYPE_QUAL_CONST)));
8863 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
8864 (modifier == EXPAND_STACK_PARM
8865 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
8867 op0 = new;
8870 op0 = force_operand (XEXP (op0, 0), target);
8873 if (flag_force_addr
8874 && GET_CODE (op0) != REG
8875 && modifier != EXPAND_CONST_ADDRESS
8876 && modifier != EXPAND_INITIALIZER
8877 && modifier != EXPAND_SUM)
8878 op0 = force_reg (Pmode, op0);
8880 if (GET_CODE (op0) == REG
8881 && ! REG_USERVAR_P (op0))
8882 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8884 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
8885 op0 = convert_memory_address (ptr_mode, op0);
8887 return op0;
8889 case ENTRY_VALUE_EXPR:
8890 abort ();
8892 /* COMPLEX type for Extended Pascal & Fortran */
8893 case COMPLEX_EXPR:
8895 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8896 rtx insns;
8898 /* Get the rtx code of the operands. */
8899 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8900 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8902 if (! target)
8903 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8905 start_sequence ();
8907 /* Move the real (op0) and imaginary (op1) parts to their location. */
8908 emit_move_insn (gen_realpart (mode, target), op0);
8909 emit_move_insn (gen_imagpart (mode, target), op1);
8911 insns = get_insns ();
8912 end_sequence ();
8914 /* Complex construction should appear as a single unit. */
8915 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8916 each with a separate pseudo as destination.
8917 It's not correct for flow to treat them as a unit. */
8918 if (GET_CODE (target) != CONCAT)
8919 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8920 else
8921 emit_insn (insns);
8923 return target;
8926 case REALPART_EXPR:
8927 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8928 return gen_realpart (mode, op0);
8930 case IMAGPART_EXPR:
8931 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8932 return gen_imagpart (mode, op0);
8934 case CONJ_EXPR:
8936 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8937 rtx imag_t;
8938 rtx insns;
8940 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8942 if (! target)
8943 target = gen_reg_rtx (mode);
8945 start_sequence ();
8947 /* Store the realpart and the negated imagpart to target. */
8948 emit_move_insn (gen_realpart (partmode, target),
8949 gen_realpart (partmode, op0));
8951 imag_t = gen_imagpart (partmode, target);
8952 temp = expand_unop (partmode,
8953 ! unsignedp && flag_trapv
8954 && (GET_MODE_CLASS(partmode) == MODE_INT)
8955 ? negv_optab : neg_optab,
8956 gen_imagpart (partmode, op0), imag_t, 0);
8957 if (temp != imag_t)
8958 emit_move_insn (imag_t, temp);
8960 insns = get_insns ();
8961 end_sequence ();
8963 /* Conjugate should appear as a single unit
8964 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8965 each with a separate pseudo as destination.
8966 It's not correct for flow to treat them as a unit. */
8967 if (GET_CODE (target) != CONCAT)
8968 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8969 else
8970 emit_insn (insns);
8972 return target;
8975 case TRY_CATCH_EXPR:
8977 tree handler = TREE_OPERAND (exp, 1);
8979 expand_eh_region_start ();
8981 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8983 expand_eh_region_end_cleanup (handler);
8985 return op0;
8988 case TRY_FINALLY_EXPR:
8990 tree try_block = TREE_OPERAND (exp, 0);
8991 tree finally_block = TREE_OPERAND (exp, 1);
8993 if (!optimize || unsafe_for_reeval (finally_block) > 1)
8995 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
8996 is not sufficient, so we cannot expand the block twice.
8997 So we play games with GOTO_SUBROUTINE_EXPR to let us
8998 expand the thing only once. */
8999 /* When not optimizing, we go ahead with this form since
9000 (1) user breakpoints operate more predictably without
9001 code duplication, and
9002 (2) we're not running any of the global optimizers
9003 that would explode in time/space with the highly
9004 connected CFG created by the indirect branching. */
9006 rtx finally_label = gen_label_rtx ();
9007 rtx done_label = gen_label_rtx ();
9008 rtx return_link = gen_reg_rtx (Pmode);
9009 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
9010 (tree) finally_label, (tree) return_link);
9011 TREE_SIDE_EFFECTS (cleanup) = 1;
9013 /* Start a new binding layer that will keep track of all cleanup
9014 actions to be performed. */
9015 expand_start_bindings (2);
9016 target_temp_slot_level = temp_slot_level;
9018 expand_decl_cleanup (NULL_TREE, cleanup);
9019 op0 = expand_expr (try_block, target, tmode, modifier);
9021 preserve_temp_slots (op0);
9022 expand_end_bindings (NULL_TREE, 0, 0);
9023 emit_jump (done_label);
9024 emit_label (finally_label);
9025 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
9026 emit_indirect_jump (return_link);
9027 emit_label (done_label);
9029 else
9031 expand_start_bindings (2);
9032 target_temp_slot_level = temp_slot_level;
9034 expand_decl_cleanup (NULL_TREE, finally_block);
9035 op0 = expand_expr (try_block, target, tmode, modifier);
9037 preserve_temp_slots (op0);
9038 expand_end_bindings (NULL_TREE, 0, 0);
9041 return op0;
9044 case GOTO_SUBROUTINE_EXPR:
9046 rtx subr = (rtx) TREE_OPERAND (exp, 0);
9047 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
9048 rtx return_address = gen_label_rtx ();
9049 emit_move_insn (return_link,
9050 gen_rtx_LABEL_REF (Pmode, return_address));
9051 emit_jump (subr);
9052 emit_label (return_address);
9053 return const0_rtx;
9056 case VA_ARG_EXPR:
9057 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
9059 case EXC_PTR_EXPR:
9060 return get_exception_pointer (cfun);
9062 case FDESC_EXPR:
9063 /* Function descriptors are not valid except for as
9064 initialization constants, and should not be expanded. */
9065 abort ();
9067 default:
9068 return (*lang_hooks.expand_expr) (exp, original_target, tmode, modifier,
9069 alt_rtl);
9072 /* Here to do an ordinary binary operator, generating an instruction
9073 from the optab already placed in `this_optab'. */
9074 binop:
9075 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9076 subtarget, &op0, &op1, 0);
9077 binop2:
9078 if (modifier == EXPAND_STACK_PARM)
9079 target = 0;
9080 temp = expand_binop (mode, this_optab, op0, op1, target,
9081 unsignedp, OPTAB_LIB_WIDEN);
9082 if (temp == 0)
9083 abort ();
9084 return temp;
9087 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9088 when applied to the address of EXP produces an address known to be
9089 aligned more than BIGGEST_ALIGNMENT. */
9091 static int
9092 is_aligning_offset (tree offset, tree exp)
9094 /* Strip off any conversions and WITH_RECORD_EXPR nodes. */
9095 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9096 || TREE_CODE (offset) == NOP_EXPR
9097 || TREE_CODE (offset) == CONVERT_EXPR
9098 || TREE_CODE (offset) == WITH_RECORD_EXPR)
9099 offset = TREE_OPERAND (offset, 0);
9101 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9102 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9103 if (TREE_CODE (offset) != BIT_AND_EXPR
9104 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9105 || compare_tree_int (TREE_OPERAND (offset, 1),
9106 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
9107 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9108 return 0;
9110 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9111 It must be NEGATE_EXPR. Then strip any more conversions. */
9112 offset = TREE_OPERAND (offset, 0);
9113 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9114 || TREE_CODE (offset) == NOP_EXPR
9115 || TREE_CODE (offset) == CONVERT_EXPR)
9116 offset = TREE_OPERAND (offset, 0);
9118 if (TREE_CODE (offset) != NEGATE_EXPR)
9119 return 0;
9121 offset = TREE_OPERAND (offset, 0);
9122 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9123 || TREE_CODE (offset) == NOP_EXPR
9124 || TREE_CODE (offset) == CONVERT_EXPR)
9125 offset = TREE_OPERAND (offset, 0);
9127 /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
9128 whose type is the same as EXP. */
9129 return (TREE_CODE (offset) == ADDR_EXPR
9130 && (TREE_OPERAND (offset, 0) == exp
9131 || (TREE_CODE (TREE_OPERAND (offset, 0)) == PLACEHOLDER_EXPR
9132 && (TREE_TYPE (TREE_OPERAND (offset, 0))
9133 == TREE_TYPE (exp)))));
9136 /* Return the tree node if an ARG corresponds to a string constant or zero
9137 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9138 in bytes within the string that ARG is accessing. The type of the
9139 offset will be `sizetype'. */
9141 tree
9142 string_constant (tree arg, tree *ptr_offset)
9144 STRIP_NOPS (arg);
9146 if (TREE_CODE (arg) == ADDR_EXPR
9147 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9149 *ptr_offset = size_zero_node;
9150 return TREE_OPERAND (arg, 0);
9152 else if (TREE_CODE (arg) == PLUS_EXPR)
9154 tree arg0 = TREE_OPERAND (arg, 0);
9155 tree arg1 = TREE_OPERAND (arg, 1);
9157 STRIP_NOPS (arg0);
9158 STRIP_NOPS (arg1);
9160 if (TREE_CODE (arg0) == ADDR_EXPR
9161 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9163 *ptr_offset = convert (sizetype, arg1);
9164 return TREE_OPERAND (arg0, 0);
9166 else if (TREE_CODE (arg1) == ADDR_EXPR
9167 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9169 *ptr_offset = convert (sizetype, arg0);
9170 return TREE_OPERAND (arg1, 0);
9174 return 0;
9177 /* Expand code for a post- or pre- increment or decrement
9178 and return the RTX for the result.
9179 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9181 static rtx
9182 expand_increment (tree exp, int post, int ignore)
9184 rtx op0, op1;
9185 rtx temp, value;
9186 tree incremented = TREE_OPERAND (exp, 0);
9187 optab this_optab = add_optab;
9188 int icode;
9189 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9190 int op0_is_copy = 0;
9191 int single_insn = 0;
9192 /* 1 means we can't store into OP0 directly,
9193 because it is a subreg narrower than a word,
9194 and we don't dare clobber the rest of the word. */
9195 int bad_subreg = 0;
9197 /* Stabilize any component ref that might need to be
9198 evaluated more than once below. */
9199 if (!post
9200 || TREE_CODE (incremented) == BIT_FIELD_REF
9201 || (TREE_CODE (incremented) == COMPONENT_REF
9202 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9203 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9204 incremented = stabilize_reference (incremented);
9205 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9206 ones into save exprs so that they don't accidentally get evaluated
9207 more than once by the code below. */
9208 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9209 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9210 incremented = save_expr (incremented);
9212 /* Compute the operands as RTX.
9213 Note whether OP0 is the actual lvalue or a copy of it:
9214 I believe it is a copy iff it is a register or subreg
9215 and insns were generated in computing it. */
9217 temp = get_last_insn ();
9218 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9220 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9221 in place but instead must do sign- or zero-extension during assignment,
9222 so we copy it into a new register and let the code below use it as
9223 a copy.
9225 Note that we can safely modify this SUBREG since it is know not to be
9226 shared (it was made by the expand_expr call above). */
9228 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9230 if (post)
9231 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9232 else
9233 bad_subreg = 1;
9235 else if (GET_CODE (op0) == SUBREG
9236 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9238 /* We cannot increment this SUBREG in place. If we are
9239 post-incrementing, get a copy of the old value. Otherwise,
9240 just mark that we cannot increment in place. */
9241 if (post)
9242 op0 = copy_to_reg (op0);
9243 else
9244 bad_subreg = 1;
9247 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9248 && temp != get_last_insn ());
9249 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9251 /* Decide whether incrementing or decrementing. */
9252 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9253 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9254 this_optab = sub_optab;
9256 /* Convert decrement by a constant into a negative increment. */
9257 if (this_optab == sub_optab
9258 && GET_CODE (op1) == CONST_INT)
9260 op1 = GEN_INT (-INTVAL (op1));
9261 this_optab = add_optab;
9264 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9265 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9267 /* For a preincrement, see if we can do this with a single instruction. */
9268 if (!post)
9270 icode = (int) this_optab->handlers[(int) mode].insn_code;
9271 if (icode != (int) CODE_FOR_nothing
9272 /* Make sure that OP0 is valid for operands 0 and 1
9273 of the insn we want to queue. */
9274 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9275 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9276 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9277 single_insn = 1;
9280 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9281 then we cannot just increment OP0. We must therefore contrive to
9282 increment the original value. Then, for postincrement, we can return
9283 OP0 since it is a copy of the old value. For preincrement, expand here
9284 unless we can do it with a single insn.
9286 Likewise if storing directly into OP0 would clobber high bits
9287 we need to preserve (bad_subreg). */
9288 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9290 /* This is the easiest way to increment the value wherever it is.
9291 Problems with multiple evaluation of INCREMENTED are prevented
9292 because either (1) it is a component_ref or preincrement,
9293 in which case it was stabilized above, or (2) it is an array_ref
9294 with constant index in an array in a register, which is
9295 safe to reevaluate. */
9296 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9297 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9298 ? MINUS_EXPR : PLUS_EXPR),
9299 TREE_TYPE (exp),
9300 incremented,
9301 TREE_OPERAND (exp, 1));
9303 while (TREE_CODE (incremented) == NOP_EXPR
9304 || TREE_CODE (incremented) == CONVERT_EXPR)
9306 newexp = convert (TREE_TYPE (incremented), newexp);
9307 incremented = TREE_OPERAND (incremented, 0);
9310 temp = expand_assignment (incremented, newexp, ! post && ! ignore);
9311 return post ? op0 : temp;
9314 if (post)
9316 /* We have a true reference to the value in OP0.
9317 If there is an insn to add or subtract in this mode, queue it.
9318 Queuing the increment insn avoids the register shuffling
9319 that often results if we must increment now and first save
9320 the old value for subsequent use. */
9322 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9323 op0 = stabilize (op0);
9324 #endif
9326 icode = (int) this_optab->handlers[(int) mode].insn_code;
9327 if (icode != (int) CODE_FOR_nothing
9328 /* Make sure that OP0 is valid for operands 0 and 1
9329 of the insn we want to queue. */
9330 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9331 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9333 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9334 op1 = force_reg (mode, op1);
9336 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9338 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9340 rtx addr = (general_operand (XEXP (op0, 0), mode)
9341 ? force_reg (Pmode, XEXP (op0, 0))
9342 : copy_to_reg (XEXP (op0, 0)));
9343 rtx temp, result;
9345 op0 = replace_equiv_address (op0, addr);
9346 temp = force_reg (GET_MODE (op0), op0);
9347 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9348 op1 = force_reg (mode, op1);
9350 /* The increment queue is LIFO, thus we have to `queue'
9351 the instructions in reverse order. */
9352 enqueue_insn (op0, gen_move_insn (op0, temp));
9353 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9354 return result;
9358 /* Preincrement, or we can't increment with one simple insn. */
9359 if (post)
9360 /* Save a copy of the value before inc or dec, to return it later. */
9361 temp = value = copy_to_reg (op0);
9362 else
9363 /* Arrange to return the incremented value. */
9364 /* Copy the rtx because expand_binop will protect from the queue,
9365 and the results of that would be invalid for us to return
9366 if our caller does emit_queue before using our result. */
9367 temp = copy_rtx (value = op0);
9369 /* Increment however we can. */
9370 op1 = expand_binop (mode, this_optab, value, op1, op0,
9371 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9373 /* Make sure the value is stored into OP0. */
9374 if (op1 != op0)
9375 emit_move_insn (op0, op1);
9377 return temp;
9380 /* Generate code to calculate EXP using a store-flag instruction
9381 and return an rtx for the result. EXP is either a comparison
9382 or a TRUTH_NOT_EXPR whose operand is a comparison.
9384 If TARGET is nonzero, store the result there if convenient.
9386 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9387 cheap.
9389 Return zero if there is no suitable set-flag instruction
9390 available on this machine.
9392 Once expand_expr has been called on the arguments of the comparison,
9393 we are committed to doing the store flag, since it is not safe to
9394 re-evaluate the expression. We emit the store-flag insn by calling
9395 emit_store_flag, but only expand the arguments if we have a reason
9396 to believe that emit_store_flag will be successful. If we think that
9397 it will, but it isn't, we have to simulate the store-flag with a
9398 set/jump/set sequence. */
9400 static rtx
9401 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
9403 enum rtx_code code;
9404 tree arg0, arg1, type;
9405 tree tem;
9406 enum machine_mode operand_mode;
9407 int invert = 0;
9408 int unsignedp;
9409 rtx op0, op1;
9410 enum insn_code icode;
9411 rtx subtarget = target;
9412 rtx result, label;
9414 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9415 result at the end. We can't simply invert the test since it would
9416 have already been inverted if it were valid. This case occurs for
9417 some floating-point comparisons. */
9419 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9420 invert = 1, exp = TREE_OPERAND (exp, 0);
9422 arg0 = TREE_OPERAND (exp, 0);
9423 arg1 = TREE_OPERAND (exp, 1);
9425 /* Don't crash if the comparison was erroneous. */
9426 if (arg0 == error_mark_node || arg1 == error_mark_node)
9427 return const0_rtx;
9429 type = TREE_TYPE (arg0);
9430 operand_mode = TYPE_MODE (type);
9431 unsignedp = TREE_UNSIGNED (type);
9433 /* We won't bother with BLKmode store-flag operations because it would mean
9434 passing a lot of information to emit_store_flag. */
9435 if (operand_mode == BLKmode)
9436 return 0;
9438 /* We won't bother with store-flag operations involving function pointers
9439 when function pointers must be canonicalized before comparisons. */
9440 #ifdef HAVE_canonicalize_funcptr_for_compare
9441 if (HAVE_canonicalize_funcptr_for_compare
9442 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9443 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9444 == FUNCTION_TYPE))
9445 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9446 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9447 == FUNCTION_TYPE))))
9448 return 0;
9449 #endif
9451 STRIP_NOPS (arg0);
9452 STRIP_NOPS (arg1);
9454 /* Get the rtx comparison code to use. We know that EXP is a comparison
9455 operation of some type. Some comparisons against 1 and -1 can be
9456 converted to comparisons with zero. Do so here so that the tests
9457 below will be aware that we have a comparison with zero. These
9458 tests will not catch constants in the first operand, but constants
9459 are rarely passed as the first operand. */
9461 switch (TREE_CODE (exp))
9463 case EQ_EXPR:
9464 code = EQ;
9465 break;
9466 case NE_EXPR:
9467 code = NE;
9468 break;
9469 case LT_EXPR:
9470 if (integer_onep (arg1))
9471 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9472 else
9473 code = unsignedp ? LTU : LT;
9474 break;
9475 case LE_EXPR:
9476 if (! unsignedp && integer_all_onesp (arg1))
9477 arg1 = integer_zero_node, code = LT;
9478 else
9479 code = unsignedp ? LEU : LE;
9480 break;
9481 case GT_EXPR:
9482 if (! unsignedp && integer_all_onesp (arg1))
9483 arg1 = integer_zero_node, code = GE;
9484 else
9485 code = unsignedp ? GTU : GT;
9486 break;
9487 case GE_EXPR:
9488 if (integer_onep (arg1))
9489 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9490 else
9491 code = unsignedp ? GEU : GE;
9492 break;
9494 case UNORDERED_EXPR:
9495 code = UNORDERED;
9496 break;
9497 case ORDERED_EXPR:
9498 code = ORDERED;
9499 break;
9500 case UNLT_EXPR:
9501 code = UNLT;
9502 break;
9503 case UNLE_EXPR:
9504 code = UNLE;
9505 break;
9506 case UNGT_EXPR:
9507 code = UNGT;
9508 break;
9509 case UNGE_EXPR:
9510 code = UNGE;
9511 break;
9512 case UNEQ_EXPR:
9513 code = UNEQ;
9514 break;
9516 default:
9517 abort ();
9520 /* Put a constant second. */
9521 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9523 tem = arg0; arg0 = arg1; arg1 = tem;
9524 code = swap_condition (code);
9527 /* If this is an equality or inequality test of a single bit, we can
9528 do this by shifting the bit being tested to the low-order bit and
9529 masking the result with the constant 1. If the condition was EQ,
9530 we xor it with 1. This does not require an scc insn and is faster
9531 than an scc insn even if we have it.
9533 The code to make this transformation was moved into fold_single_bit_test,
9534 so we just call into the folder and expand its result. */
9536 if ((code == NE || code == EQ)
9537 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9538 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9540 tree type = (*lang_hooks.types.type_for_mode) (mode, unsignedp);
9541 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
9542 arg0, arg1, type),
9543 target, VOIDmode, EXPAND_NORMAL);
9546 /* Now see if we are likely to be able to do this. Return if not. */
9547 if (! can_compare_p (code, operand_mode, ccp_store_flag))
9548 return 0;
9550 icode = setcc_gen_code[(int) code];
9551 if (icode == CODE_FOR_nothing
9552 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
9554 /* We can only do this if it is one of the special cases that
9555 can be handled without an scc insn. */
9556 if ((code == LT && integer_zerop (arg1))
9557 || (! only_cheap && code == GE && integer_zerop (arg1)))
9559 else if (BRANCH_COST >= 0
9560 && ! only_cheap && (code == NE || code == EQ)
9561 && TREE_CODE (type) != REAL_TYPE
9562 && ((abs_optab->handlers[(int) operand_mode].insn_code
9563 != CODE_FOR_nothing)
9564 || (ffs_optab->handlers[(int) operand_mode].insn_code
9565 != CODE_FOR_nothing)))
9567 else
9568 return 0;
9571 if (! get_subtarget (target)
9572 || GET_MODE (subtarget) != operand_mode)
9573 subtarget = 0;
9575 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
9577 if (target == 0)
9578 target = gen_reg_rtx (mode);
9580 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
9581 because, if the emit_store_flag does anything it will succeed and
9582 OP0 and OP1 will not be used subsequently. */
9584 result = emit_store_flag (target, code,
9585 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
9586 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
9587 operand_mode, unsignedp, 1);
9589 if (result)
9591 if (invert)
9592 result = expand_binop (mode, xor_optab, result, const1_rtx,
9593 result, 0, OPTAB_LIB_WIDEN);
9594 return result;
9597 /* If this failed, we have to do this with set/compare/jump/set code. */
9598 if (GET_CODE (target) != REG
9599 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9600 target = gen_reg_rtx (GET_MODE (target));
9602 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9603 result = compare_from_rtx (op0, op1, code, unsignedp,
9604 operand_mode, NULL_RTX);
9605 if (GET_CODE (result) == CONST_INT)
9606 return (((result == const0_rtx && ! invert)
9607 || (result != const0_rtx && invert))
9608 ? const0_rtx : const1_rtx);
9610 /* The code of RESULT may not match CODE if compare_from_rtx
9611 decided to swap its operands and reverse the original code.
9613 We know that compare_from_rtx returns either a CONST_INT or
9614 a new comparison code, so it is safe to just extract the
9615 code from RESULT. */
9616 code = GET_CODE (result);
9618 label = gen_label_rtx ();
9619 if (bcc_gen_fctn[(int) code] == 0)
9620 abort ();
9622 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
9623 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9624 emit_label (label);
9626 return target;
9630 /* Stubs in case we haven't got a casesi insn. */
9631 #ifndef HAVE_casesi
9632 # define HAVE_casesi 0
9633 # define gen_casesi(a, b, c, d, e) (0)
9634 # define CODE_FOR_casesi CODE_FOR_nothing
9635 #endif
9637 /* If the machine does not have a case insn that compares the bounds,
9638 this means extra overhead for dispatch tables, which raises the
9639 threshold for using them. */
9640 #ifndef CASE_VALUES_THRESHOLD
9641 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9642 #endif /* CASE_VALUES_THRESHOLD */
9644 unsigned int
9645 case_values_threshold (void)
9647 return CASE_VALUES_THRESHOLD;
9650 /* Attempt to generate a casesi instruction. Returns 1 if successful,
9651 0 otherwise (i.e. if there is no casesi instruction). */
9653 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
9654 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
9656 enum machine_mode index_mode = SImode;
9657 int index_bits = GET_MODE_BITSIZE (index_mode);
9658 rtx op1, op2, index;
9659 enum machine_mode op_mode;
9661 if (! HAVE_casesi)
9662 return 0;
9664 /* Convert the index to SImode. */
9665 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
9667 enum machine_mode omode = TYPE_MODE (index_type);
9668 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
9670 /* We must handle the endpoints in the original mode. */
9671 index_expr = build (MINUS_EXPR, index_type,
9672 index_expr, minval);
9673 minval = integer_zero_node;
9674 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9675 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
9676 omode, 1, default_label);
9677 /* Now we can safely truncate. */
9678 index = convert_to_mode (index_mode, index, 0);
9680 else
9682 if (TYPE_MODE (index_type) != index_mode)
9684 index_expr = convert ((*lang_hooks.types.type_for_size)
9685 (index_bits, 0), index_expr);
9686 index_type = TREE_TYPE (index_expr);
9689 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9691 emit_queue ();
9692 index = protect_from_queue (index, 0);
9693 do_pending_stack_adjust ();
9695 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
9696 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
9697 (index, op_mode))
9698 index = copy_to_mode_reg (op_mode, index);
9700 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
9702 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
9703 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
9704 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
9705 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
9706 (op1, op_mode))
9707 op1 = copy_to_mode_reg (op_mode, op1);
9709 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
9711 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
9712 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
9713 op2, TREE_UNSIGNED (TREE_TYPE (range)));
9714 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
9715 (op2, op_mode))
9716 op2 = copy_to_mode_reg (op_mode, op2);
9718 emit_jump_insn (gen_casesi (index, op1, op2,
9719 table_label, default_label));
9720 return 1;
9723 /* Attempt to generate a tablejump instruction; same concept. */
9724 #ifndef HAVE_tablejump
9725 #define HAVE_tablejump 0
9726 #define gen_tablejump(x, y) (0)
9727 #endif
9729 /* Subroutine of the next function.
9731 INDEX is the value being switched on, with the lowest value
9732 in the table already subtracted.
9733 MODE is its expected mode (needed if INDEX is constant).
9734 RANGE is the length of the jump table.
9735 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9737 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9738 index value is out of range. */
9740 static void
9741 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
9742 rtx default_label)
9744 rtx temp, vector;
9746 if (INTVAL (range) > cfun->max_jumptable_ents)
9747 cfun->max_jumptable_ents = INTVAL (range);
9749 /* Do an unsigned comparison (in the proper mode) between the index
9750 expression and the value which represents the length of the range.
9751 Since we just finished subtracting the lower bound of the range
9752 from the index expression, this comparison allows us to simultaneously
9753 check that the original index expression value is both greater than
9754 or equal to the minimum value of the range and less than or equal to
9755 the maximum value of the range. */
9757 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
9758 default_label);
9760 /* If index is in range, it must fit in Pmode.
9761 Convert to Pmode so we can index with it. */
9762 if (mode != Pmode)
9763 index = convert_to_mode (Pmode, index, 1);
9765 /* Don't let a MEM slip through, because then INDEX that comes
9766 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9767 and break_out_memory_refs will go to work on it and mess it up. */
9768 #ifdef PIC_CASE_VECTOR_ADDRESS
9769 if (flag_pic && GET_CODE (index) != REG)
9770 index = copy_to_mode_reg (Pmode, index);
9771 #endif
9773 /* If flag_force_addr were to affect this address
9774 it could interfere with the tricky assumptions made
9775 about addresses that contain label-refs,
9776 which may be valid only very near the tablejump itself. */
9777 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9778 GET_MODE_SIZE, because this indicates how large insns are. The other
9779 uses should all be Pmode, because they are addresses. This code
9780 could fail if addresses and insns are not the same size. */
9781 index = gen_rtx_PLUS (Pmode,
9782 gen_rtx_MULT (Pmode, index,
9783 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9784 gen_rtx_LABEL_REF (Pmode, table_label));
9785 #ifdef PIC_CASE_VECTOR_ADDRESS
9786 if (flag_pic)
9787 index = PIC_CASE_VECTOR_ADDRESS (index);
9788 else
9789 #endif
9790 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9791 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9792 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
9793 RTX_UNCHANGING_P (vector) = 1;
9794 MEM_NOTRAP_P (vector) = 1;
9795 convert_move (temp, vector, 0);
9797 emit_jump_insn (gen_tablejump (temp, table_label));
9799 /* If we are generating PIC code or if the table is PC-relative, the
9800 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9801 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9802 emit_barrier ();
9806 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
9807 rtx table_label, rtx default_label)
9809 rtx index;
9811 if (! HAVE_tablejump)
9812 return 0;
9814 index_expr = fold (build (MINUS_EXPR, index_type,
9815 convert (index_type, index_expr),
9816 convert (index_type, minval)));
9817 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9818 emit_queue ();
9819 index = protect_from_queue (index, 0);
9820 do_pending_stack_adjust ();
9822 do_tablejump (index, TYPE_MODE (index_type),
9823 convert_modes (TYPE_MODE (index_type),
9824 TYPE_MODE (TREE_TYPE (range)),
9825 expand_expr (range, NULL_RTX,
9826 VOIDmode, 0),
9827 TREE_UNSIGNED (TREE_TYPE (range))),
9828 table_label, default_label);
9829 return 1;
9832 /* Nonzero if the mode is a valid vector mode for this architecture.
9833 This returns nonzero even if there is no hardware support for the
9834 vector mode, but we can emulate with narrower modes. */
9837 vector_mode_valid_p (enum machine_mode mode)
9839 enum mode_class class = GET_MODE_CLASS (mode);
9840 enum machine_mode innermode;
9842 /* Doh! What's going on? */
9843 if (class != MODE_VECTOR_INT
9844 && class != MODE_VECTOR_FLOAT)
9845 return 0;
9847 /* Hardware support. Woo hoo! */
9848 if (VECTOR_MODE_SUPPORTED_P (mode))
9849 return 1;
9851 innermode = GET_MODE_INNER (mode);
9853 /* We should probably return 1 if requesting V4DI and we have no DI,
9854 but we have V2DI, but this is probably very unlikely. */
9856 /* If we have support for the inner mode, we can safely emulate it.
9857 We may not have V2DI, but me can emulate with a pair of DIs. */
9858 return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
9861 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
9862 static rtx
9863 const_vector_from_tree (tree exp)
9865 rtvec v;
9866 int units, i;
9867 tree link, elt;
9868 enum machine_mode inner, mode;
9870 mode = TYPE_MODE (TREE_TYPE (exp));
9872 if (is_zeros_p (exp))
9873 return CONST0_RTX (mode);
9875 units = GET_MODE_NUNITS (mode);
9876 inner = GET_MODE_INNER (mode);
9878 v = rtvec_alloc (units);
9880 link = TREE_VECTOR_CST_ELTS (exp);
9881 for (i = 0; link; link = TREE_CHAIN (link), ++i)
9883 elt = TREE_VALUE (link);
9885 if (TREE_CODE (elt) == REAL_CST)
9886 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
9887 inner);
9888 else
9889 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
9890 TREE_INT_CST_HIGH (elt),
9891 inner);
9894 /* Initialize remaining elements to 0. */
9895 for (; i < units; ++i)
9896 RTVEC_ELT (v, i) = CONST0_RTX (inner);
9898 return gen_rtx_raw_CONST_VECTOR (mode, v);
9901 #include "gt-expr.h"