2004-03-04 Paolo Carlini <pcarlini@suse.de>
[official-gcc.git] / gcc / expr.c
blob9bd27402768dcc7575e38c070ece2736816ed3ea
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "flags.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "except.h"
34 #include "function.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38 #include "expr.h"
39 #include "optabs.h"
40 #include "libfuncs.h"
41 #include "recog.h"
42 #include "reload.h"
43 #include "output.h"
44 #include "typeclass.h"
45 #include "toplev.h"
46 #include "ggc.h"
47 #include "langhooks.h"
48 #include "intl.h"
49 #include "tm_p.h"
50 #include "target.h"
52 /* Decide whether a function's arguments should be processed
53 from first to last or from last to first.
55 They should if the stack and args grow in opposite directions, but
56 only if we have push insns. */
58 #ifdef PUSH_ROUNDING
60 #ifndef PUSH_ARGS_REVERSED
61 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
62 #define PUSH_ARGS_REVERSED /* If it's last to first. */
63 #endif
64 #endif
66 #endif
68 #ifndef STACK_PUSH_CODE
69 #ifdef STACK_GROWS_DOWNWARD
70 #define STACK_PUSH_CODE PRE_DEC
71 #else
72 #define STACK_PUSH_CODE PRE_INC
73 #endif
74 #endif
76 /* Convert defined/undefined to boolean. */
77 #ifdef TARGET_MEM_FUNCTIONS
78 #undef TARGET_MEM_FUNCTIONS
79 #define TARGET_MEM_FUNCTIONS 1
80 #else
81 #define TARGET_MEM_FUNCTIONS 0
82 #endif
85 /* If this is nonzero, we do not bother generating VOLATILE
86 around volatile memory references, and we are willing to
87 output indirect addresses. If cse is to follow, we reject
88 indirect addresses so a useful potential cse is generated;
89 if it is used only once, instruction combination will produce
90 the same indirect address eventually. */
91 int cse_not_expected;
93 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
94 tree placeholder_list = 0;
96 /* This structure is used by move_by_pieces to describe the move to
97 be performed. */
98 struct move_by_pieces
100 rtx to;
101 rtx to_addr;
102 int autinc_to;
103 int explicit_inc_to;
104 rtx from;
105 rtx from_addr;
106 int autinc_from;
107 int explicit_inc_from;
108 unsigned HOST_WIDE_INT len;
109 HOST_WIDE_INT offset;
110 int reverse;
113 /* This structure is used by store_by_pieces to describe the clear to
114 be performed. */
116 struct store_by_pieces
118 rtx to;
119 rtx to_addr;
120 int autinc_to;
121 int explicit_inc_to;
122 unsigned HOST_WIDE_INT len;
123 HOST_WIDE_INT offset;
124 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
125 void *constfundata;
126 int reverse;
129 static rtx enqueue_insn (rtx, rtx);
130 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
131 unsigned int);
132 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
133 struct move_by_pieces *);
134 static bool block_move_libcall_safe_for_call_parm (void);
135 static bool emit_block_move_via_movstr (rtx, rtx, rtx, unsigned);
136 static rtx emit_block_move_via_libcall (rtx, rtx, rtx);
137 static tree emit_block_move_libcall_fn (int);
138 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
139 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
140 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
141 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
142 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
143 struct store_by_pieces *);
144 static bool clear_storage_via_clrstr (rtx, rtx, unsigned);
145 static rtx clear_storage_via_libcall (rtx, rtx);
146 static tree clear_storage_libcall_fn (int);
147 static rtx compress_float_constant (rtx, rtx);
148 static rtx get_subtarget (rtx);
149 static int is_zeros_p (tree);
150 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
151 HOST_WIDE_INT, enum machine_mode,
152 tree, tree, int, int);
153 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
154 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
155 tree, enum machine_mode, int, tree, int);
156 static rtx var_rtx (tree);
158 static unsigned HOST_WIDE_INT highest_pow2_factor (tree);
159 static unsigned HOST_WIDE_INT highest_pow2_factor_for_type (tree, tree);
161 static int is_aligning_offset (tree, tree);
162 static rtx expand_increment (tree, int, int);
163 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
164 enum expand_modifier);
165 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
166 #ifdef PUSH_ROUNDING
167 static void emit_single_push_insn (enum machine_mode, rtx, tree);
168 #endif
169 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
170 static rtx const_vector_from_tree (tree);
172 /* Record for each mode whether we can move a register directly to or
173 from an object of that mode in memory. If we can't, we won't try
174 to use that mode directly when accessing a field of that mode. */
176 static char direct_load[NUM_MACHINE_MODES];
177 static char direct_store[NUM_MACHINE_MODES];
179 /* Record for each mode whether we can float-extend from memory. */
181 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
183 /* This macro is used to determine whether move_by_pieces should be called
184 to perform a structure copy. */
185 #ifndef MOVE_BY_PIECES_P
186 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
187 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
188 #endif
190 /* This macro is used to determine whether clear_by_pieces should be
191 called to clear storage. */
192 #ifndef CLEAR_BY_PIECES_P
193 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
194 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
195 #endif
197 /* This macro is used to determine whether store_by_pieces should be
198 called to "memset" storage with byte values other than zero, or
199 to "memcpy" storage when the source is a constant string. */
200 #ifndef STORE_BY_PIECES_P
201 #define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
202 #endif
204 /* This array records the insn_code of insns to perform block moves. */
205 enum insn_code movstr_optab[NUM_MACHINE_MODES];
207 /* This array records the insn_code of insns to perform block clears. */
208 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
210 /* These arrays record the insn_code of two different kinds of insns
211 to perform block compares. */
212 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
213 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
215 /* Stack of EXPR_WITH_FILE_LOCATION nested expressions. */
216 struct file_stack *expr_wfl_stack;
218 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
220 #ifndef SLOW_UNALIGNED_ACCESS
221 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
222 #endif
224 /* This is run once per compilation to set up which modes can be used
225 directly in memory and to initialize the block move optab. */
227 void
228 init_expr_once (void)
230 rtx insn, pat;
231 enum machine_mode mode;
232 int num_clobbers;
233 rtx mem, mem1;
234 rtx reg;
236 /* Try indexing by frame ptr and try by stack ptr.
237 It is known that on the Convex the stack ptr isn't a valid index.
238 With luck, one or the other is valid on any machine. */
239 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
240 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
242 /* A scratch register we can modify in-place below to avoid
243 useless RTL allocations. */
244 reg = gen_rtx_REG (VOIDmode, -1);
246 insn = rtx_alloc (INSN);
247 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
248 PATTERN (insn) = pat;
250 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
251 mode = (enum machine_mode) ((int) mode + 1))
253 int regno;
255 direct_load[(int) mode] = direct_store[(int) mode] = 0;
256 PUT_MODE (mem, mode);
257 PUT_MODE (mem1, mode);
258 PUT_MODE (reg, mode);
260 /* See if there is some register that can be used in this mode and
261 directly loaded or stored from memory. */
263 if (mode != VOIDmode && mode != BLKmode)
264 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
265 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
266 regno++)
268 if (! HARD_REGNO_MODE_OK (regno, mode))
269 continue;
271 REGNO (reg) = regno;
273 SET_SRC (pat) = mem;
274 SET_DEST (pat) = reg;
275 if (recog (pat, insn, &num_clobbers) >= 0)
276 direct_load[(int) mode] = 1;
278 SET_SRC (pat) = mem1;
279 SET_DEST (pat) = reg;
280 if (recog (pat, insn, &num_clobbers) >= 0)
281 direct_load[(int) mode] = 1;
283 SET_SRC (pat) = reg;
284 SET_DEST (pat) = mem;
285 if (recog (pat, insn, &num_clobbers) >= 0)
286 direct_store[(int) mode] = 1;
288 SET_SRC (pat) = reg;
289 SET_DEST (pat) = mem1;
290 if (recog (pat, insn, &num_clobbers) >= 0)
291 direct_store[(int) mode] = 1;
295 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
297 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
298 mode = GET_MODE_WIDER_MODE (mode))
300 enum machine_mode srcmode;
301 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
302 srcmode = GET_MODE_WIDER_MODE (srcmode))
304 enum insn_code ic;
306 ic = can_extend_p (mode, srcmode, 0);
307 if (ic == CODE_FOR_nothing)
308 continue;
310 PUT_MODE (mem, srcmode);
312 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
313 float_extend_from_mem[mode][srcmode] = true;
318 /* This is run at the start of compiling a function. */
320 void
321 init_expr (void)
323 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
326 /* Small sanity check that the queue is empty at the end of a function. */
328 void
329 finish_expr_for_function (void)
331 if (pending_chain)
332 abort ();
335 /* Manage the queue of increment instructions to be output
336 for POSTINCREMENT_EXPR expressions, etc. */
338 /* Queue up to increment (or change) VAR later. BODY says how:
339 BODY should be the same thing you would pass to emit_insn
340 to increment right away. It will go to emit_insn later on.
342 The value is a QUEUED expression to be used in place of VAR
343 where you want to guarantee the pre-incrementation value of VAR. */
345 static rtx
346 enqueue_insn (rtx var, rtx body)
348 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
349 body, pending_chain);
350 return pending_chain;
353 /* Use protect_from_queue to convert a QUEUED expression
354 into something that you can put immediately into an instruction.
355 If the queued incrementation has not happened yet,
356 protect_from_queue returns the variable itself.
357 If the incrementation has happened, protect_from_queue returns a temp
358 that contains a copy of the old value of the variable.
360 Any time an rtx which might possibly be a QUEUED is to be put
361 into an instruction, it must be passed through protect_from_queue first.
362 QUEUED expressions are not meaningful in instructions.
364 Do not pass a value through protect_from_queue and then hold
365 on to it for a while before putting it in an instruction!
366 If the queue is flushed in between, incorrect code will result. */
369 protect_from_queue (rtx x, int modify)
371 RTX_CODE code = GET_CODE (x);
373 #if 0 /* A QUEUED can hang around after the queue is forced out. */
374 /* Shortcut for most common case. */
375 if (pending_chain == 0)
376 return x;
377 #endif
379 if (code != QUEUED)
381 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
382 use of autoincrement. Make a copy of the contents of the memory
383 location rather than a copy of the address, but not if the value is
384 of mode BLKmode. Don't modify X in place since it might be
385 shared. */
386 if (code == MEM && GET_MODE (x) != BLKmode
387 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
389 rtx y = XEXP (x, 0);
390 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
392 if (QUEUED_INSN (y))
394 rtx temp = gen_reg_rtx (GET_MODE (x));
396 emit_insn_before (gen_move_insn (temp, new),
397 QUEUED_INSN (y));
398 return temp;
401 /* Copy the address into a pseudo, so that the returned value
402 remains correct across calls to emit_queue. */
403 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
406 /* Otherwise, recursively protect the subexpressions of all
407 the kinds of rtx's that can contain a QUEUED. */
408 if (code == MEM)
410 rtx tem = protect_from_queue (XEXP (x, 0), 0);
411 if (tem != XEXP (x, 0))
413 x = copy_rtx (x);
414 XEXP (x, 0) = tem;
417 else if (code == PLUS || code == MULT)
419 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
420 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
421 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
423 x = copy_rtx (x);
424 XEXP (x, 0) = new0;
425 XEXP (x, 1) = new1;
428 return x;
430 /* If the increment has not happened, use the variable itself. Copy it
431 into a new pseudo so that the value remains correct across calls to
432 emit_queue. */
433 if (QUEUED_INSN (x) == 0)
434 return copy_to_reg (QUEUED_VAR (x));
435 /* If the increment has happened and a pre-increment copy exists,
436 use that copy. */
437 if (QUEUED_COPY (x) != 0)
438 return QUEUED_COPY (x);
439 /* The increment has happened but we haven't set up a pre-increment copy.
440 Set one up now, and use it. */
441 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
442 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
443 QUEUED_INSN (x));
444 return QUEUED_COPY (x);
447 /* Return nonzero if X contains a QUEUED expression:
448 if it contains anything that will be altered by a queued increment.
449 We handle only combinations of MEM, PLUS, MINUS and MULT operators
450 since memory addresses generally contain only those. */
453 queued_subexp_p (rtx x)
455 enum rtx_code code = GET_CODE (x);
456 switch (code)
458 case QUEUED:
459 return 1;
460 case MEM:
461 return queued_subexp_p (XEXP (x, 0));
462 case MULT:
463 case PLUS:
464 case MINUS:
465 return (queued_subexp_p (XEXP (x, 0))
466 || queued_subexp_p (XEXP (x, 1)));
467 default:
468 return 0;
472 /* Perform all the pending incrementations. */
474 void
475 emit_queue (void)
477 rtx p;
478 while ((p = pending_chain))
480 rtx body = QUEUED_BODY (p);
482 switch (GET_CODE (body))
484 case INSN:
485 case JUMP_INSN:
486 case CALL_INSN:
487 case CODE_LABEL:
488 case BARRIER:
489 case NOTE:
490 QUEUED_INSN (p) = body;
491 emit_insn (body);
492 break;
494 #ifdef ENABLE_CHECKING
495 case SEQUENCE:
496 abort ();
497 break;
498 #endif
500 default:
501 QUEUED_INSN (p) = emit_insn (body);
502 break;
505 pending_chain = QUEUED_NEXT (p);
509 /* Copy data from FROM to TO, where the machine modes are not the same.
510 Both modes may be integer, or both may be floating.
511 UNSIGNEDP should be nonzero if FROM is an unsigned type.
512 This causes zero-extension instead of sign-extension. */
514 void
515 convert_move (rtx to, rtx from, int unsignedp)
517 enum machine_mode to_mode = GET_MODE (to);
518 enum machine_mode from_mode = GET_MODE (from);
519 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
520 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
521 enum insn_code code;
522 rtx libcall;
524 /* rtx code for making an equivalent value. */
525 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
526 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
528 to = protect_from_queue (to, 1);
529 from = protect_from_queue (from, 0);
531 if (to_real != from_real)
532 abort ();
534 /* If FROM is a SUBREG that indicates that we have already done at least
535 the required extension, strip it. We don't handle such SUBREGs as
536 TO here. */
538 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
539 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
540 >= GET_MODE_SIZE (to_mode))
541 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
542 from = gen_lowpart (to_mode, from), from_mode = to_mode;
544 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
545 abort ();
547 if (to_mode == from_mode
548 || (from_mode == VOIDmode && CONSTANT_P (from)))
550 emit_move_insn (to, from);
551 return;
554 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
556 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
557 abort ();
559 if (VECTOR_MODE_P (to_mode))
560 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
561 else
562 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
564 emit_move_insn (to, from);
565 return;
568 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
570 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
571 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
572 return;
575 if (to_real)
577 rtx value, insns;
578 convert_optab tab;
580 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
581 tab = sext_optab;
582 else if (GET_MODE_PRECISION (from_mode) > GET_MODE_PRECISION (to_mode))
583 tab = trunc_optab;
584 else
585 abort ();
587 /* Try converting directly if the insn is supported. */
589 code = tab->handlers[to_mode][from_mode].insn_code;
590 if (code != CODE_FOR_nothing)
592 emit_unop_insn (code, to, from,
593 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
594 return;
597 /* Otherwise use a libcall. */
598 libcall = tab->handlers[to_mode][from_mode].libfunc;
600 if (!libcall)
601 /* This conversion is not implemented yet. */
602 abort ();
604 start_sequence ();
605 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
606 1, from, from_mode);
607 insns = get_insns ();
608 end_sequence ();
609 emit_libcall_block (insns, to, value,
610 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
611 from)
612 : gen_rtx_FLOAT_EXTEND (to_mode, from));
613 return;
616 /* Handle pointer conversion. */ /* SPEE 900220. */
617 /* Targets are expected to provide conversion insns between PxImode and
618 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
619 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
621 enum machine_mode full_mode
622 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
624 if (trunc_optab->handlers[to_mode][full_mode].insn_code
625 == CODE_FOR_nothing)
626 abort ();
628 if (full_mode != from_mode)
629 from = convert_to_mode (full_mode, from, unsignedp);
630 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
631 to, from, UNKNOWN);
632 return;
634 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
636 enum machine_mode full_mode
637 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
639 if (sext_optab->handlers[full_mode][from_mode].insn_code
640 == CODE_FOR_nothing)
641 abort ();
643 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
644 to, from, UNKNOWN);
645 if (to_mode == full_mode)
646 return;
648 /* else proceed to integer conversions below. */
649 from_mode = full_mode;
652 /* Now both modes are integers. */
654 /* Handle expanding beyond a word. */
655 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
656 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
658 rtx insns;
659 rtx lowpart;
660 rtx fill_value;
661 rtx lowfrom;
662 int i;
663 enum machine_mode lowpart_mode;
664 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
666 /* Try converting directly if the insn is supported. */
667 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
668 != CODE_FOR_nothing)
670 /* If FROM is a SUBREG, put it into a register. Do this
671 so that we always generate the same set of insns for
672 better cse'ing; if an intermediate assignment occurred,
673 we won't be doing the operation directly on the SUBREG. */
674 if (optimize > 0 && GET_CODE (from) == SUBREG)
675 from = force_reg (from_mode, from);
676 emit_unop_insn (code, to, from, equiv_code);
677 return;
679 /* Next, try converting via full word. */
680 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
681 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
682 != CODE_FOR_nothing))
684 if (GET_CODE (to) == REG)
686 if (reg_overlap_mentioned_p (to, from))
687 from = force_reg (from_mode, from);
688 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
690 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
691 emit_unop_insn (code, to,
692 gen_lowpart (word_mode, to), equiv_code);
693 return;
696 /* No special multiword conversion insn; do it by hand. */
697 start_sequence ();
699 /* Since we will turn this into a no conflict block, we must ensure
700 that the source does not overlap the target. */
702 if (reg_overlap_mentioned_p (to, from))
703 from = force_reg (from_mode, from);
705 /* Get a copy of FROM widened to a word, if necessary. */
706 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
707 lowpart_mode = word_mode;
708 else
709 lowpart_mode = from_mode;
711 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
713 lowpart = gen_lowpart (lowpart_mode, to);
714 emit_move_insn (lowpart, lowfrom);
716 /* Compute the value to put in each remaining word. */
717 if (unsignedp)
718 fill_value = const0_rtx;
719 else
721 #ifdef HAVE_slt
722 if (HAVE_slt
723 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
724 && STORE_FLAG_VALUE == -1)
726 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
727 lowpart_mode, 0);
728 fill_value = gen_reg_rtx (word_mode);
729 emit_insn (gen_slt (fill_value));
731 else
732 #endif
734 fill_value
735 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
736 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
737 NULL_RTX, 0);
738 fill_value = convert_to_mode (word_mode, fill_value, 1);
742 /* Fill the remaining words. */
743 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
745 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
746 rtx subword = operand_subword (to, index, 1, to_mode);
748 if (subword == 0)
749 abort ();
751 if (fill_value != subword)
752 emit_move_insn (subword, fill_value);
755 insns = get_insns ();
756 end_sequence ();
758 emit_no_conflict_block (insns, to, from, NULL_RTX,
759 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
760 return;
763 /* Truncating multi-word to a word or less. */
764 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
765 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
767 if (!((GET_CODE (from) == MEM
768 && ! MEM_VOLATILE_P (from)
769 && direct_load[(int) to_mode]
770 && ! mode_dependent_address_p (XEXP (from, 0)))
771 || GET_CODE (from) == REG
772 || GET_CODE (from) == SUBREG))
773 from = force_reg (from_mode, from);
774 convert_move (to, gen_lowpart (word_mode, from), 0);
775 return;
778 /* Now follow all the conversions between integers
779 no more than a word long. */
781 /* For truncation, usually we can just refer to FROM in a narrower mode. */
782 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
783 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
784 GET_MODE_BITSIZE (from_mode)))
786 if (!((GET_CODE (from) == MEM
787 && ! MEM_VOLATILE_P (from)
788 && direct_load[(int) to_mode]
789 && ! mode_dependent_address_p (XEXP (from, 0)))
790 || GET_CODE (from) == REG
791 || GET_CODE (from) == SUBREG))
792 from = force_reg (from_mode, from);
793 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
794 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
795 from = copy_to_reg (from);
796 emit_move_insn (to, gen_lowpart (to_mode, from));
797 return;
800 /* Handle extension. */
801 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
803 /* Convert directly if that works. */
804 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
805 != CODE_FOR_nothing)
807 if (flag_force_mem)
808 from = force_not_mem (from);
810 emit_unop_insn (code, to, from, equiv_code);
811 return;
813 else
815 enum machine_mode intermediate;
816 rtx tmp;
817 tree shift_amount;
819 /* Search for a mode to convert via. */
820 for (intermediate = from_mode; intermediate != VOIDmode;
821 intermediate = GET_MODE_WIDER_MODE (intermediate))
822 if (((can_extend_p (to_mode, intermediate, unsignedp)
823 != CODE_FOR_nothing)
824 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
825 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
826 GET_MODE_BITSIZE (intermediate))))
827 && (can_extend_p (intermediate, from_mode, unsignedp)
828 != CODE_FOR_nothing))
830 convert_move (to, convert_to_mode (intermediate, from,
831 unsignedp), unsignedp);
832 return;
835 /* No suitable intermediate mode.
836 Generate what we need with shifts. */
837 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
838 - GET_MODE_BITSIZE (from_mode), 0);
839 from = gen_lowpart (to_mode, force_reg (from_mode, from));
840 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
841 to, unsignedp);
842 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
843 to, unsignedp);
844 if (tmp != to)
845 emit_move_insn (to, tmp);
846 return;
850 /* Support special truncate insns for certain modes. */
851 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
853 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
854 to, from, UNKNOWN);
855 return;
858 /* Handle truncation of volatile memrefs, and so on;
859 the things that couldn't be truncated directly,
860 and for which there was no special instruction.
862 ??? Code above formerly short-circuited this, for most integer
863 mode pairs, with a force_reg in from_mode followed by a recursive
864 call to this routine. Appears always to have been wrong. */
865 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
867 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
868 emit_move_insn (to, temp);
869 return;
872 /* Mode combination is not recognized. */
873 abort ();
876 /* Return an rtx for a value that would result
877 from converting X to mode MODE.
878 Both X and MODE may be floating, or both integer.
879 UNSIGNEDP is nonzero if X is an unsigned value.
880 This can be done by referring to a part of X in place
881 or by copying to a new temporary with conversion.
883 This function *must not* call protect_from_queue
884 except when putting X into an insn (in which case convert_move does it). */
887 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
889 return convert_modes (mode, VOIDmode, x, unsignedp);
892 /* Return an rtx for a value that would result
893 from converting X from mode OLDMODE to mode MODE.
894 Both modes may be floating, or both integer.
895 UNSIGNEDP is nonzero if X is an unsigned value.
897 This can be done by referring to a part of X in place
898 or by copying to a new temporary with conversion.
900 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
902 This function *must not* call protect_from_queue
903 except when putting X into an insn (in which case convert_move does it). */
906 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
908 rtx temp;
910 /* If FROM is a SUBREG that indicates that we have already done at least
911 the required extension, strip it. */
913 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
914 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
915 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
916 x = gen_lowpart (mode, x);
918 if (GET_MODE (x) != VOIDmode)
919 oldmode = GET_MODE (x);
921 if (mode == oldmode)
922 return x;
924 /* There is one case that we must handle specially: If we are converting
925 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
926 we are to interpret the constant as unsigned, gen_lowpart will do
927 the wrong if the constant appears negative. What we want to do is
928 make the high-order word of the constant zero, not all ones. */
930 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
931 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
932 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
934 HOST_WIDE_INT val = INTVAL (x);
936 if (oldmode != VOIDmode
937 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
939 int width = GET_MODE_BITSIZE (oldmode);
941 /* We need to zero extend VAL. */
942 val &= ((HOST_WIDE_INT) 1 << width) - 1;
945 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
948 /* We can do this with a gen_lowpart if both desired and current modes
949 are integer, and this is either a constant integer, a register, or a
950 non-volatile MEM. Except for the constant case where MODE is no
951 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
953 if ((GET_CODE (x) == CONST_INT
954 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
955 || (GET_MODE_CLASS (mode) == MODE_INT
956 && GET_MODE_CLASS (oldmode) == MODE_INT
957 && (GET_CODE (x) == CONST_DOUBLE
958 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
959 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
960 && direct_load[(int) mode])
961 || (GET_CODE (x) == REG
962 && (! HARD_REGISTER_P (x)
963 || HARD_REGNO_MODE_OK (REGNO (x), mode))
964 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
965 GET_MODE_BITSIZE (GET_MODE (x)))))))))
967 /* ?? If we don't know OLDMODE, we have to assume here that
968 X does not need sign- or zero-extension. This may not be
969 the case, but it's the best we can do. */
970 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
971 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
973 HOST_WIDE_INT val = INTVAL (x);
974 int width = GET_MODE_BITSIZE (oldmode);
976 /* We must sign or zero-extend in this case. Start by
977 zero-extending, then sign extend if we need to. */
978 val &= ((HOST_WIDE_INT) 1 << width) - 1;
979 if (! unsignedp
980 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
981 val |= (HOST_WIDE_INT) (-1) << width;
983 return gen_int_mode (val, mode);
986 return gen_lowpart (mode, x);
989 /* Converting from integer constant into mode is always equivalent to an
990 subreg operation. */
991 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
993 if (GET_MODE_BITSIZE (mode) != GET_MODE_BITSIZE (oldmode))
994 abort ();
995 return simplify_gen_subreg (mode, x, oldmode, 0);
998 temp = gen_reg_rtx (mode);
999 convert_move (temp, x, unsignedp);
1000 return temp;
1003 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1004 store efficiently. Due to internal GCC limitations, this is
1005 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1006 for an immediate constant. */
1008 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1010 /* Determine whether the LEN bytes can be moved by using several move
1011 instructions. Return nonzero if a call to move_by_pieces should
1012 succeed. */
1015 can_move_by_pieces (unsigned HOST_WIDE_INT len,
1016 unsigned int align ATTRIBUTE_UNUSED)
1018 return MOVE_BY_PIECES_P (len, align);
1021 /* Generate several move instructions to copy LEN bytes from block FROM to
1022 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1023 and TO through protect_from_queue before calling.
1025 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1026 used to push FROM to the stack.
1028 ALIGN is maximum stack alignment we can assume.
1030 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
1031 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
1032 stpcpy. */
1035 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
1036 unsigned int align, int endp)
1038 struct move_by_pieces data;
1039 rtx to_addr, from_addr = XEXP (from, 0);
1040 unsigned int max_size = MOVE_MAX_PIECES + 1;
1041 enum machine_mode mode = VOIDmode, tmode;
1042 enum insn_code icode;
1044 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
1046 data.offset = 0;
1047 data.from_addr = from_addr;
1048 if (to)
1050 to_addr = XEXP (to, 0);
1051 data.to = to;
1052 data.autinc_to
1053 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1054 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1055 data.reverse
1056 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1058 else
1060 to_addr = NULL_RTX;
1061 data.to = NULL_RTX;
1062 data.autinc_to = 1;
1063 #ifdef STACK_GROWS_DOWNWARD
1064 data.reverse = 1;
1065 #else
1066 data.reverse = 0;
1067 #endif
1069 data.to_addr = to_addr;
1070 data.from = from;
1071 data.autinc_from
1072 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1073 || GET_CODE (from_addr) == POST_INC
1074 || GET_CODE (from_addr) == POST_DEC);
1076 data.explicit_inc_from = 0;
1077 data.explicit_inc_to = 0;
1078 if (data.reverse) data.offset = len;
1079 data.len = len;
1081 /* If copying requires more than two move insns,
1082 copy addresses to registers (to make displacements shorter)
1083 and use post-increment if available. */
1084 if (!(data.autinc_from && data.autinc_to)
1085 && move_by_pieces_ninsns (len, align) > 2)
1087 /* Find the mode of the largest move... */
1088 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1089 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1090 if (GET_MODE_SIZE (tmode) < max_size)
1091 mode = tmode;
1093 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1095 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1096 data.autinc_from = 1;
1097 data.explicit_inc_from = -1;
1099 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1101 data.from_addr = copy_addr_to_reg (from_addr);
1102 data.autinc_from = 1;
1103 data.explicit_inc_from = 1;
1105 if (!data.autinc_from && CONSTANT_P (from_addr))
1106 data.from_addr = copy_addr_to_reg (from_addr);
1107 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1109 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1110 data.autinc_to = 1;
1111 data.explicit_inc_to = -1;
1113 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1115 data.to_addr = copy_addr_to_reg (to_addr);
1116 data.autinc_to = 1;
1117 data.explicit_inc_to = 1;
1119 if (!data.autinc_to && CONSTANT_P (to_addr))
1120 data.to_addr = copy_addr_to_reg (to_addr);
1123 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1124 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1125 align = MOVE_MAX * BITS_PER_UNIT;
1127 /* First move what we can in the largest integer mode, then go to
1128 successively smaller modes. */
1130 while (max_size > 1)
1132 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1133 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1134 if (GET_MODE_SIZE (tmode) < max_size)
1135 mode = tmode;
1137 if (mode == VOIDmode)
1138 break;
1140 icode = mov_optab->handlers[(int) mode].insn_code;
1141 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1142 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1144 max_size = GET_MODE_SIZE (mode);
1147 /* The code above should have handled everything. */
1148 if (data.len > 0)
1149 abort ();
1151 if (endp)
1153 rtx to1;
1155 if (data.reverse)
1156 abort ();
1157 if (data.autinc_to)
1159 if (endp == 2)
1161 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1162 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1163 else
1164 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1165 -1));
1167 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1168 data.offset);
1170 else
1172 if (endp == 2)
1173 --data.offset;
1174 to1 = adjust_address (data.to, QImode, data.offset);
1176 return to1;
1178 else
1179 return data.to;
1182 /* Return number of insns required to move L bytes by pieces.
1183 ALIGN (in bits) is maximum alignment we can assume. */
1185 static unsigned HOST_WIDE_INT
1186 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align)
1188 unsigned HOST_WIDE_INT n_insns = 0;
1189 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1191 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1192 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1193 align = MOVE_MAX * BITS_PER_UNIT;
1195 while (max_size > 1)
1197 enum machine_mode mode = VOIDmode, tmode;
1198 enum insn_code icode;
1200 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1201 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1202 if (GET_MODE_SIZE (tmode) < max_size)
1203 mode = tmode;
1205 if (mode == VOIDmode)
1206 break;
1208 icode = mov_optab->handlers[(int) mode].insn_code;
1209 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1210 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1212 max_size = GET_MODE_SIZE (mode);
1215 if (l)
1216 abort ();
1217 return n_insns;
1220 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1221 with move instructions for mode MODE. GENFUN is the gen_... function
1222 to make a move insn for that mode. DATA has all the other info. */
1224 static void
1225 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1226 struct move_by_pieces *data)
1228 unsigned int size = GET_MODE_SIZE (mode);
1229 rtx to1 = NULL_RTX, from1;
1231 while (data->len >= size)
1233 if (data->reverse)
1234 data->offset -= size;
1236 if (data->to)
1238 if (data->autinc_to)
1239 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1240 data->offset);
1241 else
1242 to1 = adjust_address (data->to, mode, data->offset);
1245 if (data->autinc_from)
1246 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1247 data->offset);
1248 else
1249 from1 = adjust_address (data->from, mode, data->offset);
1251 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1252 emit_insn (gen_add2_insn (data->to_addr,
1253 GEN_INT (-(HOST_WIDE_INT)size)));
1254 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1255 emit_insn (gen_add2_insn (data->from_addr,
1256 GEN_INT (-(HOST_WIDE_INT)size)));
1258 if (data->to)
1259 emit_insn ((*genfun) (to1, from1));
1260 else
1262 #ifdef PUSH_ROUNDING
1263 emit_single_push_insn (mode, from1, NULL);
1264 #else
1265 abort ();
1266 #endif
1269 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1270 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1271 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1272 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1274 if (! data->reverse)
1275 data->offset += size;
1277 data->len -= size;
1281 /* Emit code to move a block Y to a block X. This may be done with
1282 string-move instructions, with multiple scalar move instructions,
1283 or with a library call.
1285 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1286 SIZE is an rtx that says how long they are.
1287 ALIGN is the maximum alignment we can assume they have.
1288 METHOD describes what kind of copy this is, and what mechanisms may be used.
1290 Return the address of the new block, if memcpy is called and returns it,
1291 0 otherwise. */
1294 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1296 bool may_use_call;
1297 rtx retval = 0;
1298 unsigned int align;
1300 switch (method)
1302 case BLOCK_OP_NORMAL:
1303 may_use_call = true;
1304 break;
1306 case BLOCK_OP_CALL_PARM:
1307 may_use_call = block_move_libcall_safe_for_call_parm ();
1309 /* Make inhibit_defer_pop nonzero around the library call
1310 to force it to pop the arguments right away. */
1311 NO_DEFER_POP;
1312 break;
1314 case BLOCK_OP_NO_LIBCALL:
1315 may_use_call = false;
1316 break;
1318 default:
1319 abort ();
1322 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1324 if (GET_MODE (x) != BLKmode)
1325 abort ();
1326 if (GET_MODE (y) != BLKmode)
1327 abort ();
1329 x = protect_from_queue (x, 1);
1330 y = protect_from_queue (y, 0);
1331 size = protect_from_queue (size, 0);
1333 if (GET_CODE (x) != MEM)
1334 abort ();
1335 if (GET_CODE (y) != MEM)
1336 abort ();
1337 if (size == 0)
1338 abort ();
1340 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1341 can be incorrect is coming from __builtin_memcpy. */
1342 if (GET_CODE (size) == CONST_INT)
1344 if (INTVAL (size) == 0)
1345 return 0;
1347 x = shallow_copy_rtx (x);
1348 y = shallow_copy_rtx (y);
1349 set_mem_size (x, size);
1350 set_mem_size (y, size);
1353 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1354 move_by_pieces (x, y, INTVAL (size), align, 0);
1355 else if (emit_block_move_via_movstr (x, y, size, align))
1357 else if (may_use_call)
1358 retval = emit_block_move_via_libcall (x, y, size);
1359 else
1360 emit_block_move_via_loop (x, y, size, align);
1362 if (method == BLOCK_OP_CALL_PARM)
1363 OK_DEFER_POP;
1365 return retval;
1368 /* A subroutine of emit_block_move. Returns true if calling the
1369 block move libcall will not clobber any parameters which may have
1370 already been placed on the stack. */
1372 static bool
1373 block_move_libcall_safe_for_call_parm (void)
1375 /* If arguments are pushed on the stack, then they're safe. */
1376 if (PUSH_ARGS)
1377 return true;
1379 /* If registers go on the stack anyway, any argument is sure to clobber
1380 an outgoing argument. */
1381 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1383 tree fn = emit_block_move_libcall_fn (false);
1384 (void) fn;
1385 if (REG_PARM_STACK_SPACE (fn) != 0)
1386 return false;
1388 #endif
1390 /* If any argument goes in memory, then it might clobber an outgoing
1391 argument. */
1393 CUMULATIVE_ARGS args_so_far;
1394 tree fn, arg;
1396 fn = emit_block_move_libcall_fn (false);
1397 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1399 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1400 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1402 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1403 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1404 if (!tmp || !REG_P (tmp))
1405 return false;
1406 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1407 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1408 NULL_TREE, 1))
1409 return false;
1410 #endif
1411 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1414 return true;
1417 /* A subroutine of emit_block_move. Expand a movstr pattern;
1418 return true if successful. */
1420 static bool
1421 emit_block_move_via_movstr (rtx x, rtx y, rtx size, unsigned int align)
1423 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1424 int save_volatile_ok = volatile_ok;
1425 enum machine_mode mode;
1427 /* Since this is a move insn, we don't care about volatility. */
1428 volatile_ok = 1;
1430 /* Try the most limited insn first, because there's no point
1431 including more than one in the machine description unless
1432 the more limited one has some advantage. */
1434 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1435 mode = GET_MODE_WIDER_MODE (mode))
1437 enum insn_code code = movstr_optab[(int) mode];
1438 insn_operand_predicate_fn pred;
1440 if (code != CODE_FOR_nothing
1441 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1442 here because if SIZE is less than the mode mask, as it is
1443 returned by the macro, it will definitely be less than the
1444 actual mode mask. */
1445 && ((GET_CODE (size) == CONST_INT
1446 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1447 <= (GET_MODE_MASK (mode) >> 1)))
1448 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1449 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1450 || (*pred) (x, BLKmode))
1451 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1452 || (*pred) (y, BLKmode))
1453 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1454 || (*pred) (opalign, VOIDmode)))
1456 rtx op2;
1457 rtx last = get_last_insn ();
1458 rtx pat;
1460 op2 = convert_to_mode (mode, size, 1);
1461 pred = insn_data[(int) code].operand[2].predicate;
1462 if (pred != 0 && ! (*pred) (op2, mode))
1463 op2 = copy_to_mode_reg (mode, op2);
1465 /* ??? When called via emit_block_move_for_call, it'd be
1466 nice if there were some way to inform the backend, so
1467 that it doesn't fail the expansion because it thinks
1468 emitting the libcall would be more efficient. */
1470 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1471 if (pat)
1473 emit_insn (pat);
1474 volatile_ok = save_volatile_ok;
1475 return true;
1477 else
1478 delete_insns_since (last);
1482 volatile_ok = save_volatile_ok;
1483 return false;
1486 /* A subroutine of emit_block_move. Expand a call to memcpy or bcopy.
1487 Return the return value from memcpy, 0 otherwise. */
1489 static rtx
1490 emit_block_move_via_libcall (rtx dst, rtx src, rtx size)
1492 rtx dst_addr, src_addr;
1493 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1494 enum machine_mode size_mode;
1495 rtx retval;
1497 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
1499 It is unsafe to save the value generated by protect_from_queue and reuse
1500 it later. Consider what happens if emit_queue is called before the
1501 return value from protect_from_queue is used.
1503 Expansion of the CALL_EXPR below will call emit_queue before we are
1504 finished emitting RTL for argument setup. So if we are not careful we
1505 could get the wrong value for an argument.
1507 To avoid this problem we go ahead and emit code to copy the addresses of
1508 DST and SRC and SIZE into new pseudos. We can then place those new
1509 pseudos into an RTL_EXPR and use them later, even after a call to
1510 emit_queue.
1512 Note this is not strictly needed for library calls since they do not call
1513 emit_queue before loading their arguments. However, we may need to have
1514 library calls call emit_queue in the future since failing to do so could
1515 cause problems for targets which define SMALL_REGISTER_CLASSES and pass
1516 arguments in registers. */
1518 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1519 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1521 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1522 src_addr = convert_memory_address (ptr_mode, src_addr);
1524 dst_tree = make_tree (ptr_type_node, dst_addr);
1525 src_tree = make_tree (ptr_type_node, src_addr);
1527 if (TARGET_MEM_FUNCTIONS)
1528 size_mode = TYPE_MODE (sizetype);
1529 else
1530 size_mode = TYPE_MODE (unsigned_type_node);
1532 size = convert_to_mode (size_mode, size, 1);
1533 size = copy_to_mode_reg (size_mode, size);
1535 /* It is incorrect to use the libcall calling conventions to call
1536 memcpy in this context. This could be a user call to memcpy and
1537 the user may wish to examine the return value from memcpy. For
1538 targets where libcalls and normal calls have different conventions
1539 for returning pointers, we could end up generating incorrect code.
1541 For convenience, we generate the call to bcopy this way as well. */
1543 if (TARGET_MEM_FUNCTIONS)
1544 size_tree = make_tree (sizetype, size);
1545 else
1546 size_tree = make_tree (unsigned_type_node, size);
1548 fn = emit_block_move_libcall_fn (true);
1549 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1550 if (TARGET_MEM_FUNCTIONS)
1552 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1553 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1555 else
1557 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1558 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1561 /* Now we have to build up the CALL_EXPR itself. */
1562 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1563 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1564 call_expr, arg_list, NULL_TREE);
1566 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1568 /* If we are initializing a readonly value, show the above call clobbered
1569 it. Otherwise, a load from it may erroneously be hoisted from a loop, or
1570 the delay slot scheduler might overlook conflicts and take nasty
1571 decisions. */
1572 if (RTX_UNCHANGING_P (dst))
1573 add_function_usage_to
1574 (last_call_insn (), gen_rtx_EXPR_LIST (VOIDmode,
1575 gen_rtx_CLOBBER (VOIDmode, dst),
1576 NULL_RTX));
1578 return TARGET_MEM_FUNCTIONS ? retval : NULL_RTX;
1581 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1582 for the function we use for block copies. The first time FOR_CALL
1583 is true, we call assemble_external. */
1585 static GTY(()) tree block_move_fn;
1587 void
1588 init_block_move_fn (const char *asmspec)
1590 if (!block_move_fn)
1592 tree args, fn;
1594 if (TARGET_MEM_FUNCTIONS)
1596 fn = get_identifier ("memcpy");
1597 args = build_function_type_list (ptr_type_node, ptr_type_node,
1598 const_ptr_type_node, sizetype,
1599 NULL_TREE);
1601 else
1603 fn = get_identifier ("bcopy");
1604 args = build_function_type_list (void_type_node, const_ptr_type_node,
1605 ptr_type_node, unsigned_type_node,
1606 NULL_TREE);
1609 fn = build_decl (FUNCTION_DECL, fn, args);
1610 DECL_EXTERNAL (fn) = 1;
1611 TREE_PUBLIC (fn) = 1;
1612 DECL_ARTIFICIAL (fn) = 1;
1613 TREE_NOTHROW (fn) = 1;
1615 block_move_fn = fn;
1618 if (asmspec)
1620 SET_DECL_RTL (block_move_fn, NULL_RTX);
1621 SET_DECL_ASSEMBLER_NAME (block_move_fn, get_identifier (asmspec));
1625 static tree
1626 emit_block_move_libcall_fn (int for_call)
1628 static bool emitted_extern;
1630 if (!block_move_fn)
1631 init_block_move_fn (NULL);
1633 if (for_call && !emitted_extern)
1635 emitted_extern = true;
1636 make_decl_rtl (block_move_fn, NULL);
1637 assemble_external (block_move_fn);
1640 return block_move_fn;
1643 /* A subroutine of emit_block_move. Copy the data via an explicit
1644 loop. This is used only when libcalls are forbidden. */
1645 /* ??? It'd be nice to copy in hunks larger than QImode. */
1647 static void
1648 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1649 unsigned int align ATTRIBUTE_UNUSED)
1651 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1652 enum machine_mode iter_mode;
1654 iter_mode = GET_MODE (size);
1655 if (iter_mode == VOIDmode)
1656 iter_mode = word_mode;
1658 top_label = gen_label_rtx ();
1659 cmp_label = gen_label_rtx ();
1660 iter = gen_reg_rtx (iter_mode);
1662 emit_move_insn (iter, const0_rtx);
1664 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1665 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1666 do_pending_stack_adjust ();
1668 emit_note (NOTE_INSN_LOOP_BEG);
1670 emit_jump (cmp_label);
1671 emit_label (top_label);
1673 tmp = convert_modes (Pmode, iter_mode, iter, true);
1674 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1675 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1676 x = change_address (x, QImode, x_addr);
1677 y = change_address (y, QImode, y_addr);
1679 emit_move_insn (x, y);
1681 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1682 true, OPTAB_LIB_WIDEN);
1683 if (tmp != iter)
1684 emit_move_insn (iter, tmp);
1686 emit_note (NOTE_INSN_LOOP_CONT);
1687 emit_label (cmp_label);
1689 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1690 true, top_label);
1692 emit_note (NOTE_INSN_LOOP_END);
1695 /* Copy all or part of a value X into registers starting at REGNO.
1696 The number of registers to be filled is NREGS. */
1698 void
1699 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1701 int i;
1702 #ifdef HAVE_load_multiple
1703 rtx pat;
1704 rtx last;
1705 #endif
1707 if (nregs == 0)
1708 return;
1710 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1711 x = validize_mem (force_const_mem (mode, x));
1713 /* See if the machine can do this with a load multiple insn. */
1714 #ifdef HAVE_load_multiple
1715 if (HAVE_load_multiple)
1717 last = get_last_insn ();
1718 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1719 GEN_INT (nregs));
1720 if (pat)
1722 emit_insn (pat);
1723 return;
1725 else
1726 delete_insns_since (last);
1728 #endif
1730 for (i = 0; i < nregs; i++)
1731 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1732 operand_subword_force (x, i, mode));
1735 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1736 The number of registers to be filled is NREGS. */
1738 void
1739 move_block_from_reg (int regno, rtx x, int nregs)
1741 int i;
1743 if (nregs == 0)
1744 return;
1746 /* See if the machine can do this with a store multiple insn. */
1747 #ifdef HAVE_store_multiple
1748 if (HAVE_store_multiple)
1750 rtx last = get_last_insn ();
1751 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1752 GEN_INT (nregs));
1753 if (pat)
1755 emit_insn (pat);
1756 return;
1758 else
1759 delete_insns_since (last);
1761 #endif
1763 for (i = 0; i < nregs; i++)
1765 rtx tem = operand_subword (x, i, 1, BLKmode);
1767 if (tem == 0)
1768 abort ();
1770 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1774 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1775 ORIG, where ORIG is a non-consecutive group of registers represented by
1776 a PARALLEL. The clone is identical to the original except in that the
1777 original set of registers is replaced by a new set of pseudo registers.
1778 The new set has the same modes as the original set. */
1781 gen_group_rtx (rtx orig)
1783 int i, length;
1784 rtx *tmps;
1786 if (GET_CODE (orig) != PARALLEL)
1787 abort ();
1789 length = XVECLEN (orig, 0);
1790 tmps = alloca (sizeof (rtx) * length);
1792 /* Skip a NULL entry in first slot. */
1793 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1795 if (i)
1796 tmps[0] = 0;
1798 for (; i < length; i++)
1800 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1801 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1803 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1806 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1809 /* Emit code to move a block ORIG_SRC of type TYPE to a block DST,
1810 where DST is non-consecutive registers represented by a PARALLEL.
1811 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1812 if not known. */
1814 void
1815 emit_group_load (rtx dst, rtx orig_src, tree type ATTRIBUTE_UNUSED, int ssize)
1817 rtx *tmps, src;
1818 int start, i;
1820 if (GET_CODE (dst) != PARALLEL)
1821 abort ();
1823 /* Check for a NULL entry, used to indicate that the parameter goes
1824 both on the stack and in registers. */
1825 if (XEXP (XVECEXP (dst, 0, 0), 0))
1826 start = 0;
1827 else
1828 start = 1;
1830 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1832 /* Process the pieces. */
1833 for (i = start; i < XVECLEN (dst, 0); i++)
1835 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1836 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1837 unsigned int bytelen = GET_MODE_SIZE (mode);
1838 int shift = 0;
1840 /* Handle trailing fragments that run over the size of the struct. */
1841 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1843 /* Arrange to shift the fragment to where it belongs.
1844 extract_bit_field loads to the lsb of the reg. */
1845 if (
1846 #ifdef BLOCK_REG_PADDING
1847 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1848 == (BYTES_BIG_ENDIAN ? upward : downward)
1849 #else
1850 BYTES_BIG_ENDIAN
1851 #endif
1853 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1854 bytelen = ssize - bytepos;
1855 if (bytelen <= 0)
1856 abort ();
1859 /* If we won't be loading directly from memory, protect the real source
1860 from strange tricks we might play; but make sure that the source can
1861 be loaded directly into the destination. */
1862 src = orig_src;
1863 if (GET_CODE (orig_src) != MEM
1864 && (!CONSTANT_P (orig_src)
1865 || (GET_MODE (orig_src) != mode
1866 && GET_MODE (orig_src) != VOIDmode)))
1868 if (GET_MODE (orig_src) == VOIDmode)
1869 src = gen_reg_rtx (mode);
1870 else
1871 src = gen_reg_rtx (GET_MODE (orig_src));
1873 emit_move_insn (src, orig_src);
1876 /* Optimize the access just a bit. */
1877 if (GET_CODE (src) == MEM
1878 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1879 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1880 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1881 && bytelen == GET_MODE_SIZE (mode))
1883 tmps[i] = gen_reg_rtx (mode);
1884 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1886 else if (GET_CODE (src) == CONCAT)
1888 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1889 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1891 if ((bytepos == 0 && bytelen == slen0)
1892 || (bytepos != 0 && bytepos + bytelen <= slen))
1894 /* The following assumes that the concatenated objects all
1895 have the same size. In this case, a simple calculation
1896 can be used to determine the object and the bit field
1897 to be extracted. */
1898 tmps[i] = XEXP (src, bytepos / slen0);
1899 if (! CONSTANT_P (tmps[i])
1900 && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
1901 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1902 (bytepos % slen0) * BITS_PER_UNIT,
1903 1, NULL_RTX, mode, mode, ssize);
1905 else if (bytepos == 0)
1907 rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
1908 emit_move_insn (mem, src);
1909 tmps[i] = adjust_address (mem, mode, 0);
1911 else
1912 abort ();
1914 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1915 SIMD register, which is currently broken. While we get GCC
1916 to emit proper RTL for these cases, let's dump to memory. */
1917 else if (VECTOR_MODE_P (GET_MODE (dst))
1918 && GET_CODE (src) == REG)
1920 int slen = GET_MODE_SIZE (GET_MODE (src));
1921 rtx mem;
1923 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1924 emit_move_insn (mem, src);
1925 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1927 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1928 && XVECLEN (dst, 0) > 1)
1929 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1930 else if (CONSTANT_P (src)
1931 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
1932 tmps[i] = src;
1933 else
1934 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1935 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1936 mode, mode, ssize);
1938 if (shift)
1939 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
1940 tmps[i], 0, OPTAB_WIDEN);
1943 emit_queue ();
1945 /* Copy the extracted pieces into the proper (probable) hard regs. */
1946 for (i = start; i < XVECLEN (dst, 0); i++)
1947 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
1950 /* Emit code to move a block SRC to block DST, where SRC and DST are
1951 non-consecutive groups of registers, each represented by a PARALLEL. */
1953 void
1954 emit_group_move (rtx dst, rtx src)
1956 int i;
1958 if (GET_CODE (src) != PARALLEL
1959 || GET_CODE (dst) != PARALLEL
1960 || XVECLEN (src, 0) != XVECLEN (dst, 0))
1961 abort ();
1963 /* Skip first entry if NULL. */
1964 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1965 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1966 XEXP (XVECEXP (src, 0, i), 0));
1969 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1970 where SRC is non-consecutive registers represented by a PARALLEL.
1971 SSIZE represents the total size of block ORIG_DST, or -1 if not
1972 known. */
1974 void
1975 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1977 rtx *tmps, dst;
1978 int start, i;
1980 if (GET_CODE (src) != PARALLEL)
1981 abort ();
1983 /* Check for a NULL entry, used to indicate that the parameter goes
1984 both on the stack and in registers. */
1985 if (XEXP (XVECEXP (src, 0, 0), 0))
1986 start = 0;
1987 else
1988 start = 1;
1990 tmps = alloca (sizeof (rtx) * XVECLEN (src, 0));
1992 /* Copy the (probable) hard regs into pseudos. */
1993 for (i = start; i < XVECLEN (src, 0); i++)
1995 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1996 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1997 emit_move_insn (tmps[i], reg);
1999 emit_queue ();
2001 /* If we won't be storing directly into memory, protect the real destination
2002 from strange tricks we might play. */
2003 dst = orig_dst;
2004 if (GET_CODE (dst) == PARALLEL)
2006 rtx temp;
2008 /* We can get a PARALLEL dst if there is a conditional expression in
2009 a return statement. In that case, the dst and src are the same,
2010 so no action is necessary. */
2011 if (rtx_equal_p (dst, src))
2012 return;
2014 /* It is unclear if we can ever reach here, but we may as well handle
2015 it. Allocate a temporary, and split this into a store/load to/from
2016 the temporary. */
2018 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2019 emit_group_store (temp, src, type, ssize);
2020 emit_group_load (dst, temp, type, ssize);
2021 return;
2023 else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
2025 dst = gen_reg_rtx (GET_MODE (orig_dst));
2026 /* Make life a bit easier for combine. */
2027 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
2030 /* Process the pieces. */
2031 for (i = start; i < XVECLEN (src, 0); i++)
2033 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2034 enum machine_mode mode = GET_MODE (tmps[i]);
2035 unsigned int bytelen = GET_MODE_SIZE (mode);
2036 rtx dest = dst;
2038 /* Handle trailing fragments that run over the size of the struct. */
2039 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2041 /* store_bit_field always takes its value from the lsb.
2042 Move the fragment to the lsb if it's not already there. */
2043 if (
2044 #ifdef BLOCK_REG_PADDING
2045 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2046 == (BYTES_BIG_ENDIAN ? upward : downward)
2047 #else
2048 BYTES_BIG_ENDIAN
2049 #endif
2052 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2053 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2054 tmps[i], 0, OPTAB_WIDEN);
2056 bytelen = ssize - bytepos;
2059 if (GET_CODE (dst) == CONCAT)
2061 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2062 dest = XEXP (dst, 0);
2063 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2065 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2066 dest = XEXP (dst, 1);
2068 else if (bytepos == 0 && XVECLEN (src, 0))
2070 dest = assign_stack_temp (GET_MODE (dest),
2071 GET_MODE_SIZE (GET_MODE (dest)), 0);
2072 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2073 tmps[i]);
2074 dst = dest;
2075 break;
2077 else
2078 abort ();
2081 /* Optimize the access just a bit. */
2082 if (GET_CODE (dest) == MEM
2083 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2084 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2085 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2086 && bytelen == GET_MODE_SIZE (mode))
2087 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2088 else
2089 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2090 mode, tmps[i], ssize);
2093 emit_queue ();
2095 /* Copy from the pseudo into the (probable) hard reg. */
2096 if (orig_dst != dst)
2097 emit_move_insn (orig_dst, dst);
2100 /* Generate code to copy a BLKmode object of TYPE out of a
2101 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2102 is null, a stack temporary is created. TGTBLK is returned.
2104 The purpose of this routine is to handle functions that return
2105 BLKmode structures in registers. Some machines (the PA for example)
2106 want to return all small structures in registers regardless of the
2107 structure's alignment. */
2110 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2112 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2113 rtx src = NULL, dst = NULL;
2114 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2115 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2117 if (tgtblk == 0)
2119 tgtblk = assign_temp (build_qualified_type (type,
2120 (TYPE_QUALS (type)
2121 | TYPE_QUAL_CONST)),
2122 0, 1, 1);
2123 preserve_temp_slots (tgtblk);
2126 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2127 into a new pseudo which is a full word. */
2129 if (GET_MODE (srcreg) != BLKmode
2130 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2131 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2133 /* If the structure doesn't take up a whole number of words, see whether
2134 SRCREG is padded on the left or on the right. If it's on the left,
2135 set PADDING_CORRECTION to the number of bits to skip.
2137 In most ABIs, the structure will be returned at the least end of
2138 the register, which translates to right padding on little-endian
2139 targets and left padding on big-endian targets. The opposite
2140 holds if the structure is returned at the most significant
2141 end of the register. */
2142 if (bytes % UNITS_PER_WORD != 0
2143 && (targetm.calls.return_in_msb (type)
2144 ? !BYTES_BIG_ENDIAN
2145 : BYTES_BIG_ENDIAN))
2146 padding_correction
2147 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2149 /* Copy the structure BITSIZE bites at a time.
2151 We could probably emit more efficient code for machines which do not use
2152 strict alignment, but it doesn't seem worth the effort at the current
2153 time. */
2154 for (bitpos = 0, xbitpos = padding_correction;
2155 bitpos < bytes * BITS_PER_UNIT;
2156 bitpos += bitsize, xbitpos += bitsize)
2158 /* We need a new source operand each time xbitpos is on a
2159 word boundary and when xbitpos == padding_correction
2160 (the first time through). */
2161 if (xbitpos % BITS_PER_WORD == 0
2162 || xbitpos == padding_correction)
2163 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2164 GET_MODE (srcreg));
2166 /* We need a new destination operand each time bitpos is on
2167 a word boundary. */
2168 if (bitpos % BITS_PER_WORD == 0)
2169 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2171 /* Use xbitpos for the source extraction (right justified) and
2172 xbitpos for the destination store (left justified). */
2173 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2174 extract_bit_field (src, bitsize,
2175 xbitpos % BITS_PER_WORD, 1,
2176 NULL_RTX, word_mode, word_mode,
2177 BITS_PER_WORD),
2178 BITS_PER_WORD);
2181 return tgtblk;
2184 /* Add a USE expression for REG to the (possibly empty) list pointed
2185 to by CALL_FUSAGE. REG must denote a hard register. */
2187 void
2188 use_reg (rtx *call_fusage, rtx reg)
2190 if (GET_CODE (reg) != REG
2191 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2192 abort ();
2194 *call_fusage
2195 = gen_rtx_EXPR_LIST (VOIDmode,
2196 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2199 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2200 starting at REGNO. All of these registers must be hard registers. */
2202 void
2203 use_regs (rtx *call_fusage, int regno, int nregs)
2205 int i;
2207 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2208 abort ();
2210 for (i = 0; i < nregs; i++)
2211 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2214 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2215 PARALLEL REGS. This is for calls that pass values in multiple
2216 non-contiguous locations. The Irix 6 ABI has examples of this. */
2218 void
2219 use_group_regs (rtx *call_fusage, rtx regs)
2221 int i;
2223 for (i = 0; i < XVECLEN (regs, 0); i++)
2225 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2227 /* A NULL entry means the parameter goes both on the stack and in
2228 registers. This can also be a MEM for targets that pass values
2229 partially on the stack and partially in registers. */
2230 if (reg != 0 && GET_CODE (reg) == REG)
2231 use_reg (call_fusage, reg);
2236 /* Determine whether the LEN bytes generated by CONSTFUN can be
2237 stored to memory using several move instructions. CONSTFUNDATA is
2238 a pointer which will be passed as argument in every CONSTFUN call.
2239 ALIGN is maximum alignment we can assume. Return nonzero if a
2240 call to store_by_pieces should succeed. */
2243 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2244 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2245 void *constfundata, unsigned int align)
2247 unsigned HOST_WIDE_INT max_size, l;
2248 HOST_WIDE_INT offset = 0;
2249 enum machine_mode mode, tmode;
2250 enum insn_code icode;
2251 int reverse;
2252 rtx cst;
2254 if (len == 0)
2255 return 1;
2257 if (! STORE_BY_PIECES_P (len, align))
2258 return 0;
2260 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2261 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2262 align = MOVE_MAX * BITS_PER_UNIT;
2264 /* We would first store what we can in the largest integer mode, then go to
2265 successively smaller modes. */
2267 for (reverse = 0;
2268 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2269 reverse++)
2271 l = len;
2272 mode = VOIDmode;
2273 max_size = STORE_MAX_PIECES + 1;
2274 while (max_size > 1)
2276 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2277 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2278 if (GET_MODE_SIZE (tmode) < max_size)
2279 mode = tmode;
2281 if (mode == VOIDmode)
2282 break;
2284 icode = mov_optab->handlers[(int) mode].insn_code;
2285 if (icode != CODE_FOR_nothing
2286 && align >= GET_MODE_ALIGNMENT (mode))
2288 unsigned int size = GET_MODE_SIZE (mode);
2290 while (l >= size)
2292 if (reverse)
2293 offset -= size;
2295 cst = (*constfun) (constfundata, offset, mode);
2296 if (!LEGITIMATE_CONSTANT_P (cst))
2297 return 0;
2299 if (!reverse)
2300 offset += size;
2302 l -= size;
2306 max_size = GET_MODE_SIZE (mode);
2309 /* The code above should have handled everything. */
2310 if (l != 0)
2311 abort ();
2314 return 1;
2317 /* Generate several move instructions to store LEN bytes generated by
2318 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2319 pointer which will be passed as argument in every CONSTFUN call.
2320 ALIGN is maximum alignment we can assume.
2321 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2322 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2323 stpcpy. */
2326 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2327 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2328 void *constfundata, unsigned int align, int endp)
2330 struct store_by_pieces data;
2332 if (len == 0)
2334 if (endp == 2)
2335 abort ();
2336 return to;
2339 if (! STORE_BY_PIECES_P (len, align))
2340 abort ();
2341 to = protect_from_queue (to, 1);
2342 data.constfun = constfun;
2343 data.constfundata = constfundata;
2344 data.len = len;
2345 data.to = to;
2346 store_by_pieces_1 (&data, align);
2347 if (endp)
2349 rtx to1;
2351 if (data.reverse)
2352 abort ();
2353 if (data.autinc_to)
2355 if (endp == 2)
2357 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2358 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2359 else
2360 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2361 -1));
2363 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2364 data.offset);
2366 else
2368 if (endp == 2)
2369 --data.offset;
2370 to1 = adjust_address (data.to, QImode, data.offset);
2372 return to1;
2374 else
2375 return data.to;
2378 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2379 rtx with BLKmode). The caller must pass TO through protect_from_queue
2380 before calling. ALIGN is maximum alignment we can assume. */
2382 static void
2383 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2385 struct store_by_pieces data;
2387 if (len == 0)
2388 return;
2390 data.constfun = clear_by_pieces_1;
2391 data.constfundata = NULL;
2392 data.len = len;
2393 data.to = to;
2394 store_by_pieces_1 (&data, align);
2397 /* Callback routine for clear_by_pieces.
2398 Return const0_rtx unconditionally. */
2400 static rtx
2401 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2402 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2403 enum machine_mode mode ATTRIBUTE_UNUSED)
2405 return const0_rtx;
2408 /* Subroutine of clear_by_pieces and store_by_pieces.
2409 Generate several move instructions to store LEN bytes of block TO. (A MEM
2410 rtx with BLKmode). The caller must pass TO through protect_from_queue
2411 before calling. ALIGN is maximum alignment we can assume. */
2413 static void
2414 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2415 unsigned int align ATTRIBUTE_UNUSED)
2417 rtx to_addr = XEXP (data->to, 0);
2418 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
2419 enum machine_mode mode = VOIDmode, tmode;
2420 enum insn_code icode;
2422 data->offset = 0;
2423 data->to_addr = to_addr;
2424 data->autinc_to
2425 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2426 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2428 data->explicit_inc_to = 0;
2429 data->reverse
2430 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2431 if (data->reverse)
2432 data->offset = data->len;
2434 /* If storing requires more than two move insns,
2435 copy addresses to registers (to make displacements shorter)
2436 and use post-increment if available. */
2437 if (!data->autinc_to
2438 && move_by_pieces_ninsns (data->len, align) > 2)
2440 /* Determine the main mode we'll be using. */
2441 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2442 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2443 if (GET_MODE_SIZE (tmode) < max_size)
2444 mode = tmode;
2446 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2448 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2449 data->autinc_to = 1;
2450 data->explicit_inc_to = -1;
2453 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2454 && ! data->autinc_to)
2456 data->to_addr = copy_addr_to_reg (to_addr);
2457 data->autinc_to = 1;
2458 data->explicit_inc_to = 1;
2461 if ( !data->autinc_to && CONSTANT_P (to_addr))
2462 data->to_addr = copy_addr_to_reg (to_addr);
2465 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2466 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2467 align = MOVE_MAX * BITS_PER_UNIT;
2469 /* First store what we can in the largest integer mode, then go to
2470 successively smaller modes. */
2472 while (max_size > 1)
2474 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2475 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2476 if (GET_MODE_SIZE (tmode) < max_size)
2477 mode = tmode;
2479 if (mode == VOIDmode)
2480 break;
2482 icode = mov_optab->handlers[(int) mode].insn_code;
2483 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2484 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2486 max_size = GET_MODE_SIZE (mode);
2489 /* The code above should have handled everything. */
2490 if (data->len != 0)
2491 abort ();
2494 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2495 with move instructions for mode MODE. GENFUN is the gen_... function
2496 to make a move insn for that mode. DATA has all the other info. */
2498 static void
2499 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2500 struct store_by_pieces *data)
2502 unsigned int size = GET_MODE_SIZE (mode);
2503 rtx to1, cst;
2505 while (data->len >= size)
2507 if (data->reverse)
2508 data->offset -= size;
2510 if (data->autinc_to)
2511 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2512 data->offset);
2513 else
2514 to1 = adjust_address (data->to, mode, data->offset);
2516 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2517 emit_insn (gen_add2_insn (data->to_addr,
2518 GEN_INT (-(HOST_WIDE_INT) size)));
2520 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2521 emit_insn ((*genfun) (to1, cst));
2523 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2524 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2526 if (! data->reverse)
2527 data->offset += size;
2529 data->len -= size;
2533 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2534 its length in bytes. */
2537 clear_storage (rtx object, rtx size)
2539 rtx retval = 0;
2540 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2541 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2543 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2544 just move a zero. Otherwise, do this a piece at a time. */
2545 if (GET_MODE (object) != BLKmode
2546 && GET_CODE (size) == CONST_INT
2547 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
2548 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2549 else
2551 object = protect_from_queue (object, 1);
2552 size = protect_from_queue (size, 0);
2554 if (size == const0_rtx)
2556 else if (GET_CODE (size) == CONST_INT
2557 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2558 clear_by_pieces (object, INTVAL (size), align);
2559 else if (clear_storage_via_clrstr (object, size, align))
2561 else
2562 retval = clear_storage_via_libcall (object, size);
2565 return retval;
2568 /* A subroutine of clear_storage. Expand a clrstr pattern;
2569 return true if successful. */
2571 static bool
2572 clear_storage_via_clrstr (rtx object, rtx size, unsigned int align)
2574 /* Try the most limited insn first, because there's no point
2575 including more than one in the machine description unless
2576 the more limited one has some advantage. */
2578 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2579 enum machine_mode mode;
2581 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2582 mode = GET_MODE_WIDER_MODE (mode))
2584 enum insn_code code = clrstr_optab[(int) mode];
2585 insn_operand_predicate_fn pred;
2587 if (code != CODE_FOR_nothing
2588 /* We don't need MODE to be narrower than
2589 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2590 the mode mask, as it is returned by the macro, it will
2591 definitely be less than the actual mode mask. */
2592 && ((GET_CODE (size) == CONST_INT
2593 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2594 <= (GET_MODE_MASK (mode) >> 1)))
2595 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2596 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2597 || (*pred) (object, BLKmode))
2598 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2599 || (*pred) (opalign, VOIDmode)))
2601 rtx op1;
2602 rtx last = get_last_insn ();
2603 rtx pat;
2605 op1 = convert_to_mode (mode, size, 1);
2606 pred = insn_data[(int) code].operand[1].predicate;
2607 if (pred != 0 && ! (*pred) (op1, mode))
2608 op1 = copy_to_mode_reg (mode, op1);
2610 pat = GEN_FCN ((int) code) (object, op1, opalign);
2611 if (pat)
2613 emit_insn (pat);
2614 return true;
2616 else
2617 delete_insns_since (last);
2621 return false;
2624 /* A subroutine of clear_storage. Expand a call to memset or bzero.
2625 Return the return value of memset, 0 otherwise. */
2627 static rtx
2628 clear_storage_via_libcall (rtx object, rtx size)
2630 tree call_expr, arg_list, fn, object_tree, size_tree;
2631 enum machine_mode size_mode;
2632 rtx retval;
2634 /* OBJECT or SIZE may have been passed through protect_from_queue.
2636 It is unsafe to save the value generated by protect_from_queue
2637 and reuse it later. Consider what happens if emit_queue is
2638 called before the return value from protect_from_queue is used.
2640 Expansion of the CALL_EXPR below will call emit_queue before
2641 we are finished emitting RTL for argument setup. So if we are
2642 not careful we could get the wrong value for an argument.
2644 To avoid this problem we go ahead and emit code to copy OBJECT
2645 and SIZE into new pseudos. We can then place those new pseudos
2646 into an RTL_EXPR and use them later, even after a call to
2647 emit_queue.
2649 Note this is not strictly needed for library calls since they
2650 do not call emit_queue before loading their arguments. However,
2651 we may need to have library calls call emit_queue in the future
2652 since failing to do so could cause problems for targets which
2653 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2655 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2657 if (TARGET_MEM_FUNCTIONS)
2658 size_mode = TYPE_MODE (sizetype);
2659 else
2660 size_mode = TYPE_MODE (unsigned_type_node);
2661 size = convert_to_mode (size_mode, size, 1);
2662 size = copy_to_mode_reg (size_mode, size);
2664 /* It is incorrect to use the libcall calling conventions to call
2665 memset in this context. This could be a user call to memset and
2666 the user may wish to examine the return value from memset. For
2667 targets where libcalls and normal calls have different conventions
2668 for returning pointers, we could end up generating incorrect code.
2670 For convenience, we generate the call to bzero this way as well. */
2672 object_tree = make_tree (ptr_type_node, object);
2673 if (TARGET_MEM_FUNCTIONS)
2674 size_tree = make_tree (sizetype, size);
2675 else
2676 size_tree = make_tree (unsigned_type_node, size);
2678 fn = clear_storage_libcall_fn (true);
2679 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2680 if (TARGET_MEM_FUNCTIONS)
2681 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
2682 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2684 /* Now we have to build up the CALL_EXPR itself. */
2685 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2686 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2687 call_expr, arg_list, NULL_TREE);
2689 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2691 /* If we are initializing a readonly value, show the above call
2692 clobbered it. Otherwise, a load from it may erroneously be
2693 hoisted from a loop. */
2694 if (RTX_UNCHANGING_P (object))
2695 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
2697 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
2700 /* A subroutine of clear_storage_via_libcall. Create the tree node
2701 for the function we use for block clears. The first time FOR_CALL
2702 is true, we call assemble_external. */
2704 static GTY(()) tree block_clear_fn;
2706 void
2707 init_block_clear_fn (const char *asmspec)
2709 if (!block_clear_fn)
2711 tree fn, args;
2713 if (TARGET_MEM_FUNCTIONS)
2715 fn = get_identifier ("memset");
2716 args = build_function_type_list (ptr_type_node, ptr_type_node,
2717 integer_type_node, sizetype,
2718 NULL_TREE);
2720 else
2722 fn = get_identifier ("bzero");
2723 args = build_function_type_list (void_type_node, ptr_type_node,
2724 unsigned_type_node, NULL_TREE);
2727 fn = build_decl (FUNCTION_DECL, fn, args);
2728 DECL_EXTERNAL (fn) = 1;
2729 TREE_PUBLIC (fn) = 1;
2730 DECL_ARTIFICIAL (fn) = 1;
2731 TREE_NOTHROW (fn) = 1;
2733 block_clear_fn = fn;
2736 if (asmspec)
2738 SET_DECL_RTL (block_clear_fn, NULL_RTX);
2739 SET_DECL_ASSEMBLER_NAME (block_clear_fn, get_identifier (asmspec));
2743 static tree
2744 clear_storage_libcall_fn (int for_call)
2746 static bool emitted_extern;
2748 if (!block_clear_fn)
2749 init_block_clear_fn (NULL);
2751 if (for_call && !emitted_extern)
2753 emitted_extern = true;
2754 make_decl_rtl (block_clear_fn, NULL);
2755 assemble_external (block_clear_fn);
2758 return block_clear_fn;
2761 /* Generate code to copy Y into X.
2762 Both Y and X must have the same mode, except that
2763 Y can be a constant with VOIDmode.
2764 This mode cannot be BLKmode; use emit_block_move for that.
2766 Return the last instruction emitted. */
2769 emit_move_insn (rtx x, rtx y)
2771 enum machine_mode mode = GET_MODE (x);
2772 rtx y_cst = NULL_RTX;
2773 rtx last_insn, set;
2775 x = protect_from_queue (x, 1);
2776 y = protect_from_queue (y, 0);
2778 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2779 abort ();
2781 /* Never force constant_p_rtx to memory. */
2782 if (GET_CODE (y) == CONSTANT_P_RTX)
2784 else if (CONSTANT_P (y))
2786 if (optimize
2787 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
2788 && (last_insn = compress_float_constant (x, y)))
2789 return last_insn;
2791 y_cst = y;
2793 if (!LEGITIMATE_CONSTANT_P (y))
2795 y = force_const_mem (mode, y);
2797 /* If the target's cannot_force_const_mem prevented the spill,
2798 assume that the target's move expanders will also take care
2799 of the non-legitimate constant. */
2800 if (!y)
2801 y = y_cst;
2805 /* If X or Y are memory references, verify that their addresses are valid
2806 for the machine. */
2807 if (GET_CODE (x) == MEM
2808 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2809 && ! push_operand (x, GET_MODE (x)))
2810 || (flag_force_addr
2811 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2812 x = validize_mem (x);
2814 if (GET_CODE (y) == MEM
2815 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2816 || (flag_force_addr
2817 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2818 y = validize_mem (y);
2820 if (mode == BLKmode)
2821 abort ();
2823 last_insn = emit_move_insn_1 (x, y);
2825 if (y_cst && GET_CODE (x) == REG
2826 && (set = single_set (last_insn)) != NULL_RTX
2827 && SET_DEST (set) == x
2828 && ! rtx_equal_p (y_cst, SET_SRC (set)))
2829 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
2831 return last_insn;
2834 /* Low level part of emit_move_insn.
2835 Called just like emit_move_insn, but assumes X and Y
2836 are basically valid. */
2839 emit_move_insn_1 (rtx x, rtx y)
2841 enum machine_mode mode = GET_MODE (x);
2842 enum machine_mode submode;
2843 enum mode_class class = GET_MODE_CLASS (mode);
2845 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2846 abort ();
2848 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2849 return
2850 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2852 /* Expand complex moves by moving real part and imag part, if possible. */
2853 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2854 && BLKmode != (submode = GET_MODE_INNER (mode))
2855 && (mov_optab->handlers[(int) submode].insn_code
2856 != CODE_FOR_nothing))
2858 /* Don't split destination if it is a stack push. */
2859 int stack = push_operand (x, GET_MODE (x));
2861 #ifdef PUSH_ROUNDING
2862 /* In case we output to the stack, but the size is smaller than the
2863 machine can push exactly, we need to use move instructions. */
2864 if (stack
2865 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
2866 != GET_MODE_SIZE (submode)))
2868 rtx temp;
2869 HOST_WIDE_INT offset1, offset2;
2871 /* Do not use anti_adjust_stack, since we don't want to update
2872 stack_pointer_delta. */
2873 temp = expand_binop (Pmode,
2874 #ifdef STACK_GROWS_DOWNWARD
2875 sub_optab,
2876 #else
2877 add_optab,
2878 #endif
2879 stack_pointer_rtx,
2880 GEN_INT
2881 (PUSH_ROUNDING
2882 (GET_MODE_SIZE (GET_MODE (x)))),
2883 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2885 if (temp != stack_pointer_rtx)
2886 emit_move_insn (stack_pointer_rtx, temp);
2888 #ifdef STACK_GROWS_DOWNWARD
2889 offset1 = 0;
2890 offset2 = GET_MODE_SIZE (submode);
2891 #else
2892 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2893 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2894 + GET_MODE_SIZE (submode));
2895 #endif
2897 emit_move_insn (change_address (x, submode,
2898 gen_rtx_PLUS (Pmode,
2899 stack_pointer_rtx,
2900 GEN_INT (offset1))),
2901 gen_realpart (submode, y));
2902 emit_move_insn (change_address (x, submode,
2903 gen_rtx_PLUS (Pmode,
2904 stack_pointer_rtx,
2905 GEN_INT (offset2))),
2906 gen_imagpart (submode, y));
2908 else
2909 #endif
2910 /* If this is a stack, push the highpart first, so it
2911 will be in the argument order.
2913 In that case, change_address is used only to convert
2914 the mode, not to change the address. */
2915 if (stack)
2917 /* Note that the real part always precedes the imag part in memory
2918 regardless of machine's endianness. */
2919 #ifdef STACK_GROWS_DOWNWARD
2920 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2921 gen_imagpart (submode, y));
2922 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2923 gen_realpart (submode, y));
2924 #else
2925 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2926 gen_realpart (submode, y));
2927 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2928 gen_imagpart (submode, y));
2929 #endif
2931 else
2933 rtx realpart_x, realpart_y;
2934 rtx imagpart_x, imagpart_y;
2936 /* If this is a complex value with each part being smaller than a
2937 word, the usual calling sequence will likely pack the pieces into
2938 a single register. Unfortunately, SUBREG of hard registers only
2939 deals in terms of words, so we have a problem converting input
2940 arguments to the CONCAT of two registers that is used elsewhere
2941 for complex values. If this is before reload, we can copy it into
2942 memory and reload. FIXME, we should see about using extract and
2943 insert on integer registers, but complex short and complex char
2944 variables should be rarely used. */
2945 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2946 && (reload_in_progress | reload_completed) == 0)
2948 int packed_dest_p
2949 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2950 int packed_src_p
2951 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2953 if (packed_dest_p || packed_src_p)
2955 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2956 ? MODE_FLOAT : MODE_INT);
2958 enum machine_mode reg_mode
2959 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2961 if (reg_mode != BLKmode)
2963 rtx mem = assign_stack_temp (reg_mode,
2964 GET_MODE_SIZE (mode), 0);
2965 rtx cmem = adjust_address (mem, mode, 0);
2967 cfun->cannot_inline
2968 = N_("function using short complex types cannot be inline");
2970 if (packed_dest_p)
2972 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2974 emit_move_insn_1 (cmem, y);
2975 return emit_move_insn_1 (sreg, mem);
2977 else
2979 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2981 emit_move_insn_1 (mem, sreg);
2982 return emit_move_insn_1 (x, cmem);
2988 realpart_x = gen_realpart (submode, x);
2989 realpart_y = gen_realpart (submode, y);
2990 imagpart_x = gen_imagpart (submode, x);
2991 imagpart_y = gen_imagpart (submode, y);
2993 /* Show the output dies here. This is necessary for SUBREGs
2994 of pseudos since we cannot track their lifetimes correctly;
2995 hard regs shouldn't appear here except as return values.
2996 We never want to emit such a clobber after reload. */
2997 if (x != y
2998 && ! (reload_in_progress || reload_completed)
2999 && (GET_CODE (realpart_x) == SUBREG
3000 || GET_CODE (imagpart_x) == SUBREG))
3001 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3003 emit_move_insn (realpart_x, realpart_y);
3004 emit_move_insn (imagpart_x, imagpart_y);
3007 return get_last_insn ();
3010 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
3011 find a mode to do it in. If we have a movcc, use it. Otherwise,
3012 find the MODE_INT mode of the same width. */
3013 else if (GET_MODE_CLASS (mode) == MODE_CC
3014 && mov_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
3016 enum insn_code insn_code;
3017 enum machine_mode tmode = VOIDmode;
3018 rtx x1 = x, y1 = y;
3020 if (mode != CCmode
3021 && mov_optab->handlers[(int) CCmode].insn_code != CODE_FOR_nothing)
3022 tmode = CCmode;
3023 else
3024 for (tmode = QImode; tmode != VOIDmode;
3025 tmode = GET_MODE_WIDER_MODE (tmode))
3026 if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode))
3027 break;
3029 if (tmode == VOIDmode)
3030 abort ();
3032 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
3033 may call change_address which is not appropriate if we were
3034 called when a reload was in progress. We don't have to worry
3035 about changing the address since the size in bytes is supposed to
3036 be the same. Copy the MEM to change the mode and move any
3037 substitutions from the old MEM to the new one. */
3039 if (reload_in_progress)
3041 x = gen_lowpart_common (tmode, x1);
3042 if (x == 0 && GET_CODE (x1) == MEM)
3044 x = adjust_address_nv (x1, tmode, 0);
3045 copy_replacements (x1, x);
3048 y = gen_lowpart_common (tmode, y1);
3049 if (y == 0 && GET_CODE (y1) == MEM)
3051 y = adjust_address_nv (y1, tmode, 0);
3052 copy_replacements (y1, y);
3055 else
3057 x = gen_lowpart (tmode, x);
3058 y = gen_lowpart (tmode, y);
3061 insn_code = mov_optab->handlers[(int) tmode].insn_code;
3062 return emit_insn (GEN_FCN (insn_code) (x, y));
3065 /* Try using a move pattern for the corresponding integer mode. This is
3066 only safe when simplify_subreg can convert MODE constants into integer
3067 constants. At present, it can only do this reliably if the value
3068 fits within a HOST_WIDE_INT. */
3069 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3070 && (submode = int_mode_for_mode (mode)) != BLKmode
3071 && mov_optab->handlers[submode].insn_code != CODE_FOR_nothing)
3072 return emit_insn (GEN_FCN (mov_optab->handlers[submode].insn_code)
3073 (simplify_gen_subreg (submode, x, mode, 0),
3074 simplify_gen_subreg (submode, y, mode, 0)));
3076 /* This will handle any multi-word or full-word mode that lacks a move_insn
3077 pattern. However, you will get better code if you define such patterns,
3078 even if they must turn into multiple assembler instructions. */
3079 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
3081 rtx last_insn = 0;
3082 rtx seq, inner;
3083 int need_clobber;
3084 int i;
3086 #ifdef PUSH_ROUNDING
3088 /* If X is a push on the stack, do the push now and replace
3089 X with a reference to the stack pointer. */
3090 if (push_operand (x, GET_MODE (x)))
3092 rtx temp;
3093 enum rtx_code code;
3095 /* Do not use anti_adjust_stack, since we don't want to update
3096 stack_pointer_delta. */
3097 temp = expand_binop (Pmode,
3098 #ifdef STACK_GROWS_DOWNWARD
3099 sub_optab,
3100 #else
3101 add_optab,
3102 #endif
3103 stack_pointer_rtx,
3104 GEN_INT
3105 (PUSH_ROUNDING
3106 (GET_MODE_SIZE (GET_MODE (x)))),
3107 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3109 if (temp != stack_pointer_rtx)
3110 emit_move_insn (stack_pointer_rtx, temp);
3112 code = GET_CODE (XEXP (x, 0));
3114 /* Just hope that small offsets off SP are OK. */
3115 if (code == POST_INC)
3116 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3117 GEN_INT (-((HOST_WIDE_INT)
3118 GET_MODE_SIZE (GET_MODE (x)))));
3119 else if (code == POST_DEC)
3120 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3121 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3122 else
3123 temp = stack_pointer_rtx;
3125 x = change_address (x, VOIDmode, temp);
3127 #endif
3129 /* If we are in reload, see if either operand is a MEM whose address
3130 is scheduled for replacement. */
3131 if (reload_in_progress && GET_CODE (x) == MEM
3132 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3133 x = replace_equiv_address_nv (x, inner);
3134 if (reload_in_progress && GET_CODE (y) == MEM
3135 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3136 y = replace_equiv_address_nv (y, inner);
3138 start_sequence ();
3140 need_clobber = 0;
3141 for (i = 0;
3142 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3143 i++)
3145 rtx xpart = operand_subword (x, i, 1, mode);
3146 rtx ypart = operand_subword (y, i, 1, mode);
3148 /* If we can't get a part of Y, put Y into memory if it is a
3149 constant. Otherwise, force it into a register. If we still
3150 can't get a part of Y, abort. */
3151 if (ypart == 0 && CONSTANT_P (y))
3153 y = force_const_mem (mode, y);
3154 ypart = operand_subword (y, i, 1, mode);
3156 else if (ypart == 0)
3157 ypart = operand_subword_force (y, i, mode);
3159 if (xpart == 0 || ypart == 0)
3160 abort ();
3162 need_clobber |= (GET_CODE (xpart) == SUBREG);
3164 last_insn = emit_move_insn (xpart, ypart);
3167 seq = get_insns ();
3168 end_sequence ();
3170 /* Show the output dies here. This is necessary for SUBREGs
3171 of pseudos since we cannot track their lifetimes correctly;
3172 hard regs shouldn't appear here except as return values.
3173 We never want to emit such a clobber after reload. */
3174 if (x != y
3175 && ! (reload_in_progress || reload_completed)
3176 && need_clobber != 0)
3177 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3179 emit_insn (seq);
3181 return last_insn;
3183 else
3184 abort ();
3187 /* If Y is representable exactly in a narrower mode, and the target can
3188 perform the extension directly from constant or memory, then emit the
3189 move as an extension. */
3191 static rtx
3192 compress_float_constant (rtx x, rtx y)
3194 enum machine_mode dstmode = GET_MODE (x);
3195 enum machine_mode orig_srcmode = GET_MODE (y);
3196 enum machine_mode srcmode;
3197 REAL_VALUE_TYPE r;
3199 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3201 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3202 srcmode != orig_srcmode;
3203 srcmode = GET_MODE_WIDER_MODE (srcmode))
3205 enum insn_code ic;
3206 rtx trunc_y, last_insn;
3208 /* Skip if the target can't extend this way. */
3209 ic = can_extend_p (dstmode, srcmode, 0);
3210 if (ic == CODE_FOR_nothing)
3211 continue;
3213 /* Skip if the narrowed value isn't exact. */
3214 if (! exact_real_truncate (srcmode, &r))
3215 continue;
3217 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3219 if (LEGITIMATE_CONSTANT_P (trunc_y))
3221 /* Skip if the target needs extra instructions to perform
3222 the extension. */
3223 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3224 continue;
3226 else if (float_extend_from_mem[dstmode][srcmode])
3227 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3228 else
3229 continue;
3231 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3232 last_insn = get_last_insn ();
3234 if (GET_CODE (x) == REG)
3235 set_unique_reg_note (last_insn, REG_EQUAL, y);
3237 return last_insn;
3240 return NULL_RTX;
3243 /* Pushing data onto the stack. */
3245 /* Push a block of length SIZE (perhaps variable)
3246 and return an rtx to address the beginning of the block.
3247 Note that it is not possible for the value returned to be a QUEUED.
3248 The value may be virtual_outgoing_args_rtx.
3250 EXTRA is the number of bytes of padding to push in addition to SIZE.
3251 BELOW nonzero means this padding comes at low addresses;
3252 otherwise, the padding comes at high addresses. */
3255 push_block (rtx size, int extra, int below)
3257 rtx temp;
3259 size = convert_modes (Pmode, ptr_mode, size, 1);
3260 if (CONSTANT_P (size))
3261 anti_adjust_stack (plus_constant (size, extra));
3262 else if (GET_CODE (size) == REG && extra == 0)
3263 anti_adjust_stack (size);
3264 else
3266 temp = copy_to_mode_reg (Pmode, size);
3267 if (extra != 0)
3268 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3269 temp, 0, OPTAB_LIB_WIDEN);
3270 anti_adjust_stack (temp);
3273 #ifndef STACK_GROWS_DOWNWARD
3274 if (0)
3275 #else
3276 if (1)
3277 #endif
3279 temp = virtual_outgoing_args_rtx;
3280 if (extra != 0 && below)
3281 temp = plus_constant (temp, extra);
3283 else
3285 if (GET_CODE (size) == CONST_INT)
3286 temp = plus_constant (virtual_outgoing_args_rtx,
3287 -INTVAL (size) - (below ? 0 : extra));
3288 else if (extra != 0 && !below)
3289 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3290 negate_rtx (Pmode, plus_constant (size, extra)));
3291 else
3292 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3293 negate_rtx (Pmode, size));
3296 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3299 #ifdef PUSH_ROUNDING
3301 /* Emit single push insn. */
3303 static void
3304 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3306 rtx dest_addr;
3307 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3308 rtx dest;
3309 enum insn_code icode;
3310 insn_operand_predicate_fn pred;
3312 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3313 /* If there is push pattern, use it. Otherwise try old way of throwing
3314 MEM representing push operation to move expander. */
3315 icode = push_optab->handlers[(int) mode].insn_code;
3316 if (icode != CODE_FOR_nothing)
3318 if (((pred = insn_data[(int) icode].operand[0].predicate)
3319 && !((*pred) (x, mode))))
3320 x = force_reg (mode, x);
3321 emit_insn (GEN_FCN (icode) (x));
3322 return;
3324 if (GET_MODE_SIZE (mode) == rounded_size)
3325 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3326 /* If we are to pad downward, adjust the stack pointer first and
3327 then store X into the stack location using an offset. This is
3328 because emit_move_insn does not know how to pad; it does not have
3329 access to type. */
3330 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3332 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3333 HOST_WIDE_INT offset;
3335 emit_move_insn (stack_pointer_rtx,
3336 expand_binop (Pmode,
3337 #ifdef STACK_GROWS_DOWNWARD
3338 sub_optab,
3339 #else
3340 add_optab,
3341 #endif
3342 stack_pointer_rtx,
3343 GEN_INT (rounded_size),
3344 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3346 offset = (HOST_WIDE_INT) padding_size;
3347 #ifdef STACK_GROWS_DOWNWARD
3348 if (STACK_PUSH_CODE == POST_DEC)
3349 /* We have already decremented the stack pointer, so get the
3350 previous value. */
3351 offset += (HOST_WIDE_INT) rounded_size;
3352 #else
3353 if (STACK_PUSH_CODE == POST_INC)
3354 /* We have already incremented the stack pointer, so get the
3355 previous value. */
3356 offset -= (HOST_WIDE_INT) rounded_size;
3357 #endif
3358 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3360 else
3362 #ifdef STACK_GROWS_DOWNWARD
3363 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3364 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3365 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3366 #else
3367 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3368 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3369 GEN_INT (rounded_size));
3370 #endif
3371 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3374 dest = gen_rtx_MEM (mode, dest_addr);
3376 if (type != 0)
3378 set_mem_attributes (dest, type, 1);
3380 if (flag_optimize_sibling_calls)
3381 /* Function incoming arguments may overlap with sibling call
3382 outgoing arguments and we cannot allow reordering of reads
3383 from function arguments with stores to outgoing arguments
3384 of sibling calls. */
3385 set_mem_alias_set (dest, 0);
3387 emit_move_insn (dest, x);
3389 #endif
3391 /* Generate code to push X onto the stack, assuming it has mode MODE and
3392 type TYPE.
3393 MODE is redundant except when X is a CONST_INT (since they don't
3394 carry mode info).
3395 SIZE is an rtx for the size of data to be copied (in bytes),
3396 needed only if X is BLKmode.
3398 ALIGN (in bits) is maximum alignment we can assume.
3400 If PARTIAL and REG are both nonzero, then copy that many of the first
3401 words of X into registers starting with REG, and push the rest of X.
3402 The amount of space pushed is decreased by PARTIAL words,
3403 rounded *down* to a multiple of PARM_BOUNDARY.
3404 REG must be a hard register in this case.
3405 If REG is zero but PARTIAL is not, take any all others actions for an
3406 argument partially in registers, but do not actually load any
3407 registers.
3409 EXTRA is the amount in bytes of extra space to leave next to this arg.
3410 This is ignored if an argument block has already been allocated.
3412 On a machine that lacks real push insns, ARGS_ADDR is the address of
3413 the bottom of the argument block for this call. We use indexing off there
3414 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3415 argument block has not been preallocated.
3417 ARGS_SO_FAR is the size of args previously pushed for this call.
3419 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3420 for arguments passed in registers. If nonzero, it will be the number
3421 of bytes required. */
3423 void
3424 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3425 unsigned int align, int partial, rtx reg, int extra,
3426 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3427 rtx alignment_pad)
3429 rtx xinner;
3430 enum direction stack_direction
3431 #ifdef STACK_GROWS_DOWNWARD
3432 = downward;
3433 #else
3434 = upward;
3435 #endif
3437 /* Decide where to pad the argument: `downward' for below,
3438 `upward' for above, or `none' for don't pad it.
3439 Default is below for small data on big-endian machines; else above. */
3440 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3442 /* Invert direction if stack is post-decrement.
3443 FIXME: why? */
3444 if (STACK_PUSH_CODE == POST_DEC)
3445 if (where_pad != none)
3446 where_pad = (where_pad == downward ? upward : downward);
3448 xinner = x = protect_from_queue (x, 0);
3450 if (mode == BLKmode)
3452 /* Copy a block into the stack, entirely or partially. */
3454 rtx temp;
3455 int used = partial * UNITS_PER_WORD;
3456 int offset;
3457 int skip;
3459 if (reg && GET_CODE (reg) == PARALLEL)
3461 /* Use the size of the elt to compute offset. */
3462 rtx elt = XEXP (XVECEXP (reg, 0, 0), 0);
3463 used = partial * GET_MODE_SIZE (GET_MODE (elt));
3464 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3466 else
3467 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3469 if (size == 0)
3470 abort ();
3472 used -= offset;
3474 /* USED is now the # of bytes we need not copy to the stack
3475 because registers will take care of them. */
3477 if (partial != 0)
3478 xinner = adjust_address (xinner, BLKmode, used);
3480 /* If the partial register-part of the arg counts in its stack size,
3481 skip the part of stack space corresponding to the registers.
3482 Otherwise, start copying to the beginning of the stack space,
3483 by setting SKIP to 0. */
3484 skip = (reg_parm_stack_space == 0) ? 0 : used;
3486 #ifdef PUSH_ROUNDING
3487 /* Do it with several push insns if that doesn't take lots of insns
3488 and if there is no difficulty with push insns that skip bytes
3489 on the stack for alignment purposes. */
3490 if (args_addr == 0
3491 && PUSH_ARGS
3492 && GET_CODE (size) == CONST_INT
3493 && skip == 0
3494 && MEM_ALIGN (xinner) >= align
3495 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3496 /* Here we avoid the case of a structure whose weak alignment
3497 forces many pushes of a small amount of data,
3498 and such small pushes do rounding that causes trouble. */
3499 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3500 || align >= BIGGEST_ALIGNMENT
3501 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3502 == (align / BITS_PER_UNIT)))
3503 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3505 /* Push padding now if padding above and stack grows down,
3506 or if padding below and stack grows up.
3507 But if space already allocated, this has already been done. */
3508 if (extra && args_addr == 0
3509 && where_pad != none && where_pad != stack_direction)
3510 anti_adjust_stack (GEN_INT (extra));
3512 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3514 else
3515 #endif /* PUSH_ROUNDING */
3517 rtx target;
3519 /* Otherwise make space on the stack and copy the data
3520 to the address of that space. */
3522 /* Deduct words put into registers from the size we must copy. */
3523 if (partial != 0)
3525 if (GET_CODE (size) == CONST_INT)
3526 size = GEN_INT (INTVAL (size) - used);
3527 else
3528 size = expand_binop (GET_MODE (size), sub_optab, size,
3529 GEN_INT (used), NULL_RTX, 0,
3530 OPTAB_LIB_WIDEN);
3533 /* Get the address of the stack space.
3534 In this case, we do not deal with EXTRA separately.
3535 A single stack adjust will do. */
3536 if (! args_addr)
3538 temp = push_block (size, extra, where_pad == downward);
3539 extra = 0;
3541 else if (GET_CODE (args_so_far) == CONST_INT)
3542 temp = memory_address (BLKmode,
3543 plus_constant (args_addr,
3544 skip + INTVAL (args_so_far)));
3545 else
3546 temp = memory_address (BLKmode,
3547 plus_constant (gen_rtx_PLUS (Pmode,
3548 args_addr,
3549 args_so_far),
3550 skip));
3552 if (!ACCUMULATE_OUTGOING_ARGS)
3554 /* If the source is referenced relative to the stack pointer,
3555 copy it to another register to stabilize it. We do not need
3556 to do this if we know that we won't be changing sp. */
3558 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3559 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3560 temp = copy_to_reg (temp);
3563 target = gen_rtx_MEM (BLKmode, temp);
3565 if (type != 0)
3567 set_mem_attributes (target, type, 1);
3568 /* Function incoming arguments may overlap with sibling call
3569 outgoing arguments and we cannot allow reordering of reads
3570 from function arguments with stores to outgoing arguments
3571 of sibling calls. */
3572 set_mem_alias_set (target, 0);
3575 /* ALIGN may well be better aligned than TYPE, e.g. due to
3576 PARM_BOUNDARY. Assume the caller isn't lying. */
3577 set_mem_align (target, align);
3579 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3582 else if (partial > 0)
3584 /* Scalar partly in registers. */
3586 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3587 int i;
3588 int not_stack;
3589 /* # words of start of argument
3590 that we must make space for but need not store. */
3591 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3592 int args_offset = INTVAL (args_so_far);
3593 int skip;
3595 /* Push padding now if padding above and stack grows down,
3596 or if padding below and stack grows up.
3597 But if space already allocated, this has already been done. */
3598 if (extra && args_addr == 0
3599 && where_pad != none && where_pad != stack_direction)
3600 anti_adjust_stack (GEN_INT (extra));
3602 /* If we make space by pushing it, we might as well push
3603 the real data. Otherwise, we can leave OFFSET nonzero
3604 and leave the space uninitialized. */
3605 if (args_addr == 0)
3606 offset = 0;
3608 /* Now NOT_STACK gets the number of words that we don't need to
3609 allocate on the stack. */
3610 not_stack = partial - offset;
3612 /* If the partial register-part of the arg counts in its stack size,
3613 skip the part of stack space corresponding to the registers.
3614 Otherwise, start copying to the beginning of the stack space,
3615 by setting SKIP to 0. */
3616 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3618 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3619 x = validize_mem (force_const_mem (mode, x));
3621 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3622 SUBREGs of such registers are not allowed. */
3623 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3624 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3625 x = copy_to_reg (x);
3627 /* Loop over all the words allocated on the stack for this arg. */
3628 /* We can do it by words, because any scalar bigger than a word
3629 has a size a multiple of a word. */
3630 #ifndef PUSH_ARGS_REVERSED
3631 for (i = not_stack; i < size; i++)
3632 #else
3633 for (i = size - 1; i >= not_stack; i--)
3634 #endif
3635 if (i >= not_stack + offset)
3636 emit_push_insn (operand_subword_force (x, i, mode),
3637 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3638 0, args_addr,
3639 GEN_INT (args_offset + ((i - not_stack + skip)
3640 * UNITS_PER_WORD)),
3641 reg_parm_stack_space, alignment_pad);
3643 else
3645 rtx addr;
3646 rtx dest;
3648 /* Push padding now if padding above and stack grows down,
3649 or if padding below and stack grows up.
3650 But if space already allocated, this has already been done. */
3651 if (extra && args_addr == 0
3652 && where_pad != none && where_pad != stack_direction)
3653 anti_adjust_stack (GEN_INT (extra));
3655 #ifdef PUSH_ROUNDING
3656 if (args_addr == 0 && PUSH_ARGS)
3657 emit_single_push_insn (mode, x, type);
3658 else
3659 #endif
3661 if (GET_CODE (args_so_far) == CONST_INT)
3662 addr
3663 = memory_address (mode,
3664 plus_constant (args_addr,
3665 INTVAL (args_so_far)));
3666 else
3667 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3668 args_so_far));
3669 dest = gen_rtx_MEM (mode, addr);
3670 if (type != 0)
3672 set_mem_attributes (dest, type, 1);
3673 /* Function incoming arguments may overlap with sibling call
3674 outgoing arguments and we cannot allow reordering of reads
3675 from function arguments with stores to outgoing arguments
3676 of sibling calls. */
3677 set_mem_alias_set (dest, 0);
3680 emit_move_insn (dest, x);
3684 /* If part should go in registers, copy that part
3685 into the appropriate registers. Do this now, at the end,
3686 since mem-to-mem copies above may do function calls. */
3687 if (partial > 0 && reg != 0)
3689 /* Handle calls that pass values in multiple non-contiguous locations.
3690 The Irix 6 ABI has examples of this. */
3691 if (GET_CODE (reg) == PARALLEL)
3692 emit_group_load (reg, x, type, -1);
3693 else
3694 move_block_to_reg (REGNO (reg), x, partial, mode);
3697 if (extra && args_addr == 0 && where_pad == stack_direction)
3698 anti_adjust_stack (GEN_INT (extra));
3700 if (alignment_pad && args_addr == 0)
3701 anti_adjust_stack (alignment_pad);
3704 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3705 operations. */
3707 static rtx
3708 get_subtarget (rtx x)
3710 return ((x == 0
3711 /* Only registers can be subtargets. */
3712 || GET_CODE (x) != REG
3713 /* If the register is readonly, it can't be set more than once. */
3714 || RTX_UNCHANGING_P (x)
3715 /* Don't use hard regs to avoid extending their life. */
3716 || REGNO (x) < FIRST_PSEUDO_REGISTER
3717 /* Avoid subtargets inside loops,
3718 since they hide some invariant expressions. */
3719 || preserve_subexpressions_p ())
3720 ? 0 : x);
3723 /* Expand an assignment that stores the value of FROM into TO.
3724 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3725 (This may contain a QUEUED rtx;
3726 if the value is constant, this rtx is a constant.)
3727 Otherwise, the returned value is NULL_RTX. */
3730 expand_assignment (tree to, tree from, int want_value)
3732 rtx to_rtx = 0;
3733 rtx result;
3735 /* Don't crash if the lhs of the assignment was erroneous. */
3737 if (TREE_CODE (to) == ERROR_MARK)
3739 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3740 return want_value ? result : NULL_RTX;
3743 /* Assignment of a structure component needs special treatment
3744 if the structure component's rtx is not simply a MEM.
3745 Assignment of an array element at a constant index, and assignment of
3746 an array element in an unaligned packed structure field, has the same
3747 problem. */
3749 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3750 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF
3751 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
3753 enum machine_mode mode1;
3754 HOST_WIDE_INT bitsize, bitpos;
3755 rtx orig_to_rtx;
3756 tree offset;
3757 int unsignedp;
3758 int volatilep = 0;
3759 tree tem;
3761 push_temp_slots ();
3762 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3763 &unsignedp, &volatilep);
3765 /* If we are going to use store_bit_field and extract_bit_field,
3766 make sure to_rtx will be safe for multiple use. */
3768 if (mode1 == VOIDmode && want_value)
3769 tem = stabilize_reference (tem);
3771 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3773 if (offset != 0)
3775 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
3777 if (GET_CODE (to_rtx) != MEM)
3778 abort ();
3780 #ifdef POINTERS_EXTEND_UNSIGNED
3781 if (GET_MODE (offset_rtx) != Pmode)
3782 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
3783 #else
3784 if (GET_MODE (offset_rtx) != ptr_mode)
3785 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3786 #endif
3788 /* A constant address in TO_RTX can have VOIDmode, we must not try
3789 to call force_reg for that case. Avoid that case. */
3790 if (GET_CODE (to_rtx) == MEM
3791 && GET_MODE (to_rtx) == BLKmode
3792 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3793 && bitsize > 0
3794 && (bitpos % bitsize) == 0
3795 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3796 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3798 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3799 bitpos = 0;
3802 to_rtx = offset_address (to_rtx, offset_rtx,
3803 highest_pow2_factor_for_type (TREE_TYPE (to),
3804 offset));
3807 if (GET_CODE (to_rtx) == MEM)
3809 /* If the field is at offset zero, we could have been given the
3810 DECL_RTX of the parent struct. Don't munge it. */
3811 to_rtx = shallow_copy_rtx (to_rtx);
3813 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
3816 /* Deal with volatile and readonly fields. The former is only done
3817 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3818 if (volatilep && GET_CODE (to_rtx) == MEM)
3820 if (to_rtx == orig_to_rtx)
3821 to_rtx = copy_rtx (to_rtx);
3822 MEM_VOLATILE_P (to_rtx) = 1;
3825 if (TREE_CODE (to) == COMPONENT_REF
3826 && TREE_READONLY (TREE_OPERAND (to, 1))
3827 /* We can't assert that a MEM won't be set more than once
3828 if the component is not addressable because another
3829 non-addressable component may be referenced by the same MEM. */
3830 && ! (GET_CODE (to_rtx) == MEM && ! can_address_p (to)))
3832 if (to_rtx == orig_to_rtx)
3833 to_rtx = copy_rtx (to_rtx);
3834 RTX_UNCHANGING_P (to_rtx) = 1;
3837 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
3839 if (to_rtx == orig_to_rtx)
3840 to_rtx = copy_rtx (to_rtx);
3841 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3844 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3845 (want_value
3846 /* Spurious cast for HPUX compiler. */
3847 ? ((enum machine_mode)
3848 TYPE_MODE (TREE_TYPE (to)))
3849 : VOIDmode),
3850 unsignedp, TREE_TYPE (tem), get_alias_set (to));
3852 preserve_temp_slots (result);
3853 free_temp_slots ();
3854 pop_temp_slots ();
3856 /* If the value is meaningful, convert RESULT to the proper mode.
3857 Otherwise, return nothing. */
3858 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3859 TYPE_MODE (TREE_TYPE (from)),
3860 result,
3861 TREE_UNSIGNED (TREE_TYPE (to)))
3862 : NULL_RTX);
3865 /* If the rhs is a function call and its value is not an aggregate,
3866 call the function before we start to compute the lhs.
3867 This is needed for correct code for cases such as
3868 val = setjmp (buf) on machines where reference to val
3869 requires loading up part of an address in a separate insn.
3871 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3872 since it might be a promoted variable where the zero- or sign- extension
3873 needs to be done. Handling this in the normal way is safe because no
3874 computation is done before the call. */
3875 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
3876 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3877 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3878 && GET_CODE (DECL_RTL (to)) == REG))
3880 rtx value;
3882 push_temp_slots ();
3883 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3884 if (to_rtx == 0)
3885 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3887 /* Handle calls that return values in multiple non-contiguous locations.
3888 The Irix 6 ABI has examples of this. */
3889 if (GET_CODE (to_rtx) == PARALLEL)
3890 emit_group_load (to_rtx, value, TREE_TYPE (from),
3891 int_size_in_bytes (TREE_TYPE (from)));
3892 else if (GET_MODE (to_rtx) == BLKmode)
3893 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
3894 else
3896 if (POINTER_TYPE_P (TREE_TYPE (to)))
3897 value = convert_memory_address (GET_MODE (to_rtx), value);
3898 emit_move_insn (to_rtx, value);
3900 preserve_temp_slots (to_rtx);
3901 free_temp_slots ();
3902 pop_temp_slots ();
3903 return want_value ? to_rtx : NULL_RTX;
3906 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3907 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3909 if (to_rtx == 0)
3910 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3912 /* Don't move directly into a return register. */
3913 if (TREE_CODE (to) == RESULT_DECL
3914 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3916 rtx temp;
3918 push_temp_slots ();
3919 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3921 if (GET_CODE (to_rtx) == PARALLEL)
3922 emit_group_load (to_rtx, temp, TREE_TYPE (from),
3923 int_size_in_bytes (TREE_TYPE (from)));
3924 else
3925 emit_move_insn (to_rtx, temp);
3927 preserve_temp_slots (to_rtx);
3928 free_temp_slots ();
3929 pop_temp_slots ();
3930 return want_value ? to_rtx : NULL_RTX;
3933 /* In case we are returning the contents of an object which overlaps
3934 the place the value is being stored, use a safe function when copying
3935 a value through a pointer into a structure value return block. */
3936 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3937 && current_function_returns_struct
3938 && !current_function_returns_pcc_struct)
3940 rtx from_rtx, size;
3942 push_temp_slots ();
3943 size = expr_size (from);
3944 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
3946 if (TARGET_MEM_FUNCTIONS)
3947 emit_library_call (memmove_libfunc, LCT_NORMAL,
3948 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3949 XEXP (from_rtx, 0), Pmode,
3950 convert_to_mode (TYPE_MODE (sizetype),
3951 size, TREE_UNSIGNED (sizetype)),
3952 TYPE_MODE (sizetype));
3953 else
3954 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3955 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3956 XEXP (to_rtx, 0), Pmode,
3957 convert_to_mode (TYPE_MODE (integer_type_node),
3958 size,
3959 TREE_UNSIGNED (integer_type_node)),
3960 TYPE_MODE (integer_type_node));
3962 preserve_temp_slots (to_rtx);
3963 free_temp_slots ();
3964 pop_temp_slots ();
3965 return want_value ? to_rtx : NULL_RTX;
3968 /* Compute FROM and store the value in the rtx we got. */
3970 push_temp_slots ();
3971 result = store_expr (from, to_rtx, want_value);
3972 preserve_temp_slots (result);
3973 free_temp_slots ();
3974 pop_temp_slots ();
3975 return want_value ? result : NULL_RTX;
3978 /* Generate code for computing expression EXP,
3979 and storing the value into TARGET.
3980 TARGET may contain a QUEUED rtx.
3982 If WANT_VALUE & 1 is nonzero, return a copy of the value
3983 not in TARGET, so that we can be sure to use the proper
3984 value in a containing expression even if TARGET has something
3985 else stored in it. If possible, we copy the value through a pseudo
3986 and return that pseudo. Or, if the value is constant, we try to
3987 return the constant. In some cases, we return a pseudo
3988 copied *from* TARGET.
3990 If the mode is BLKmode then we may return TARGET itself.
3991 It turns out that in BLKmode it doesn't cause a problem.
3992 because C has no operators that could combine two different
3993 assignments into the same BLKmode object with different values
3994 with no sequence point. Will other languages need this to
3995 be more thorough?
3997 If WANT_VALUE & 1 is 0, we return NULL, to make sure
3998 to catch quickly any cases where the caller uses the value
3999 and fails to set WANT_VALUE.
4001 If WANT_VALUE & 2 is set, this is a store into a call param on the
4002 stack, and block moves may need to be treated specially. */
4005 store_expr (tree exp, rtx target, int want_value)
4007 rtx temp;
4008 rtx alt_rtl = NULL_RTX;
4009 int dont_return_target = 0;
4010 int dont_store_target = 0;
4012 if (VOID_TYPE_P (TREE_TYPE (exp)))
4014 /* C++ can generate ?: expressions with a throw expression in one
4015 branch and an rvalue in the other. Here, we resolve attempts to
4016 store the throw expression's nonexistent result. */
4017 if (want_value)
4018 abort ();
4019 expand_expr (exp, const0_rtx, VOIDmode, 0);
4020 return NULL_RTX;
4022 if (TREE_CODE (exp) == COMPOUND_EXPR)
4024 /* Perform first part of compound expression, then assign from second
4025 part. */
4026 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4027 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4028 emit_queue ();
4029 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4031 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4033 /* For conditional expression, get safe form of the target. Then
4034 test the condition, doing the appropriate assignment on either
4035 side. This avoids the creation of unnecessary temporaries.
4036 For non-BLKmode, it is more efficient not to do this. */
4038 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4040 emit_queue ();
4041 target = protect_from_queue (target, 1);
4043 do_pending_stack_adjust ();
4044 NO_DEFER_POP;
4045 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4046 start_cleanup_deferral ();
4047 store_expr (TREE_OPERAND (exp, 1), target, want_value & 2);
4048 end_cleanup_deferral ();
4049 emit_queue ();
4050 emit_jump_insn (gen_jump (lab2));
4051 emit_barrier ();
4052 emit_label (lab1);
4053 start_cleanup_deferral ();
4054 store_expr (TREE_OPERAND (exp, 2), target, want_value & 2);
4055 end_cleanup_deferral ();
4056 emit_queue ();
4057 emit_label (lab2);
4058 OK_DEFER_POP;
4060 return want_value & 1 ? target : NULL_RTX;
4062 else if (queued_subexp_p (target))
4063 /* If target contains a postincrement, let's not risk
4064 using it as the place to generate the rhs. */
4066 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4068 /* Expand EXP into a new pseudo. */
4069 temp = gen_reg_rtx (GET_MODE (target));
4070 temp = expand_expr (exp, temp, GET_MODE (target),
4071 (want_value & 2
4072 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4074 else
4075 temp = expand_expr (exp, NULL_RTX, GET_MODE (target),
4076 (want_value & 2
4077 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4079 /* If target is volatile, ANSI requires accessing the value
4080 *from* the target, if it is accessed. So make that happen.
4081 In no case return the target itself. */
4082 if (! MEM_VOLATILE_P (target) && (want_value & 1) != 0)
4083 dont_return_target = 1;
4085 else if ((want_value & 1) != 0
4086 && GET_CODE (target) == MEM
4087 && ! MEM_VOLATILE_P (target)
4088 && GET_MODE (target) != BLKmode)
4089 /* If target is in memory and caller wants value in a register instead,
4090 arrange that. Pass TARGET as target for expand_expr so that,
4091 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4092 We know expand_expr will not use the target in that case.
4093 Don't do this if TARGET is volatile because we are supposed
4094 to write it and then read it. */
4096 temp = expand_expr (exp, target, GET_MODE (target),
4097 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4098 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4100 /* If TEMP is already in the desired TARGET, only copy it from
4101 memory and don't store it there again. */
4102 if (temp == target
4103 || (rtx_equal_p (temp, target)
4104 && ! side_effects_p (temp) && ! side_effects_p (target)))
4105 dont_store_target = 1;
4106 temp = copy_to_reg (temp);
4108 dont_return_target = 1;
4110 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4111 /* If this is a scalar in a register that is stored in a wider mode
4112 than the declared mode, compute the result into its declared mode
4113 and then convert to the wider mode. Our value is the computed
4114 expression. */
4116 rtx inner_target = 0;
4118 /* If we don't want a value, we can do the conversion inside EXP,
4119 which will often result in some optimizations. Do the conversion
4120 in two steps: first change the signedness, if needed, then
4121 the extend. But don't do this if the type of EXP is a subtype
4122 of something else since then the conversion might involve
4123 more than just converting modes. */
4124 if ((want_value & 1) == 0
4125 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4126 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4128 if (TREE_UNSIGNED (TREE_TYPE (exp))
4129 != SUBREG_PROMOTED_UNSIGNED_P (target))
4130 exp = convert
4131 ((*lang_hooks.types.signed_or_unsigned_type)
4132 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4134 exp = convert ((*lang_hooks.types.type_for_mode)
4135 (GET_MODE (SUBREG_REG (target)),
4136 SUBREG_PROMOTED_UNSIGNED_P (target)),
4137 exp);
4139 inner_target = SUBREG_REG (target);
4142 temp = expand_expr (exp, inner_target, VOIDmode,
4143 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4145 /* If TEMP is a MEM and we want a result value, make the access
4146 now so it gets done only once. Strictly speaking, this is
4147 only necessary if the MEM is volatile, or if the address
4148 overlaps TARGET. But not performing the load twice also
4149 reduces the amount of rtl we generate and then have to CSE. */
4150 if (GET_CODE (temp) == MEM && (want_value & 1) != 0)
4151 temp = copy_to_reg (temp);
4153 /* If TEMP is a VOIDmode constant, use convert_modes to make
4154 sure that we properly convert it. */
4155 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4157 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4158 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4159 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4160 GET_MODE (target), temp,
4161 SUBREG_PROMOTED_UNSIGNED_P (target));
4164 convert_move (SUBREG_REG (target), temp,
4165 SUBREG_PROMOTED_UNSIGNED_P (target));
4167 /* If we promoted a constant, change the mode back down to match
4168 target. Otherwise, the caller might get confused by a result whose
4169 mode is larger than expected. */
4171 if ((want_value & 1) != 0 && GET_MODE (temp) != GET_MODE (target))
4173 if (GET_MODE (temp) != VOIDmode)
4175 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4176 SUBREG_PROMOTED_VAR_P (temp) = 1;
4177 SUBREG_PROMOTED_UNSIGNED_SET (temp,
4178 SUBREG_PROMOTED_UNSIGNED_P (target));
4180 else
4181 temp = convert_modes (GET_MODE (target),
4182 GET_MODE (SUBREG_REG (target)),
4183 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4186 return want_value & 1 ? temp : NULL_RTX;
4188 else
4190 temp = expand_expr_real (exp, target, GET_MODE (target),
4191 (want_value & 2
4192 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4193 &alt_rtl);
4194 /* Return TARGET if it's a specified hardware register.
4195 If TARGET is a volatile mem ref, either return TARGET
4196 or return a reg copied *from* TARGET; ANSI requires this.
4198 Otherwise, if TEMP is not TARGET, return TEMP
4199 if it is constant (for efficiency),
4200 or if we really want the correct value. */
4201 if (!(target && GET_CODE (target) == REG
4202 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4203 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4204 && ! rtx_equal_p (temp, target)
4205 && (CONSTANT_P (temp) || (want_value & 1) != 0))
4206 dont_return_target = 1;
4209 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4210 the same as that of TARGET, adjust the constant. This is needed, for
4211 example, in case it is a CONST_DOUBLE and we want only a word-sized
4212 value. */
4213 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4214 && TREE_CODE (exp) != ERROR_MARK
4215 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4216 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4217 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4219 /* If value was not generated in the target, store it there.
4220 Convert the value to TARGET's type first if necessary.
4221 If TEMP and TARGET compare equal according to rtx_equal_p, but
4222 one or both of them are volatile memory refs, we have to distinguish
4223 two cases:
4224 - expand_expr has used TARGET. In this case, we must not generate
4225 another copy. This can be detected by TARGET being equal according
4226 to == .
4227 - expand_expr has not used TARGET - that means that the source just
4228 happens to have the same RTX form. Since temp will have been created
4229 by expand_expr, it will compare unequal according to == .
4230 We must generate a copy in this case, to reach the correct number
4231 of volatile memory references. */
4233 if ((! rtx_equal_p (temp, target)
4234 || (temp != target && (side_effects_p (temp)
4235 || side_effects_p (target))))
4236 && TREE_CODE (exp) != ERROR_MARK
4237 && ! dont_store_target
4238 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4239 but TARGET is not valid memory reference, TEMP will differ
4240 from TARGET although it is really the same location. */
4241 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4242 /* If there's nothing to copy, don't bother. Don't call expr_size
4243 unless necessary, because some front-ends (C++) expr_size-hook
4244 aborts on objects that are not supposed to be bit-copied or
4245 bit-initialized. */
4246 && expr_size (exp) != const0_rtx)
4248 target = protect_from_queue (target, 1);
4249 if (GET_MODE (temp) != GET_MODE (target)
4250 && GET_MODE (temp) != VOIDmode)
4252 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4253 if (dont_return_target)
4255 /* In this case, we will return TEMP,
4256 so make sure it has the proper mode.
4257 But don't forget to store the value into TARGET. */
4258 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4259 emit_move_insn (target, temp);
4261 else
4262 convert_move (target, temp, unsignedp);
4265 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4267 /* Handle copying a string constant into an array. The string
4268 constant may be shorter than the array. So copy just the string's
4269 actual length, and clear the rest. First get the size of the data
4270 type of the string, which is actually the size of the target. */
4271 rtx size = expr_size (exp);
4273 if (GET_CODE (size) == CONST_INT
4274 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4275 emit_block_move (target, temp, size,
4276 (want_value & 2
4277 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4278 else
4280 /* Compute the size of the data to copy from the string. */
4281 tree copy_size
4282 = size_binop (MIN_EXPR,
4283 make_tree (sizetype, size),
4284 size_int (TREE_STRING_LENGTH (exp)));
4285 rtx copy_size_rtx
4286 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4287 (want_value & 2
4288 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4289 rtx label = 0;
4291 /* Copy that much. */
4292 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4293 TREE_UNSIGNED (sizetype));
4294 emit_block_move (target, temp, copy_size_rtx,
4295 (want_value & 2
4296 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4298 /* Figure out how much is left in TARGET that we have to clear.
4299 Do all calculations in ptr_mode. */
4300 if (GET_CODE (copy_size_rtx) == CONST_INT)
4302 size = plus_constant (size, -INTVAL (copy_size_rtx));
4303 target = adjust_address (target, BLKmode,
4304 INTVAL (copy_size_rtx));
4306 else
4308 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4309 copy_size_rtx, NULL_RTX, 0,
4310 OPTAB_LIB_WIDEN);
4312 #ifdef POINTERS_EXTEND_UNSIGNED
4313 if (GET_MODE (copy_size_rtx) != Pmode)
4314 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4315 TREE_UNSIGNED (sizetype));
4316 #endif
4318 target = offset_address (target, copy_size_rtx,
4319 highest_pow2_factor (copy_size));
4320 label = gen_label_rtx ();
4321 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4322 GET_MODE (size), 0, label);
4325 if (size != const0_rtx)
4326 clear_storage (target, size);
4328 if (label)
4329 emit_label (label);
4332 /* Handle calls that return values in multiple non-contiguous locations.
4333 The Irix 6 ABI has examples of this. */
4334 else if (GET_CODE (target) == PARALLEL)
4335 emit_group_load (target, temp, TREE_TYPE (exp),
4336 int_size_in_bytes (TREE_TYPE (exp)));
4337 else if (GET_MODE (temp) == BLKmode)
4338 emit_block_move (target, temp, expr_size (exp),
4339 (want_value & 2
4340 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4341 else
4343 temp = force_operand (temp, target);
4344 if (temp != target)
4345 emit_move_insn (target, temp);
4349 /* If we don't want a value, return NULL_RTX. */
4350 if ((want_value & 1) == 0)
4351 return NULL_RTX;
4353 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4354 ??? The latter test doesn't seem to make sense. */
4355 else if (dont_return_target && GET_CODE (temp) != MEM)
4356 return temp;
4358 /* Return TARGET itself if it is a hard register. */
4359 else if ((want_value & 1) != 0
4360 && GET_MODE (target) != BLKmode
4361 && ! (GET_CODE (target) == REG
4362 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4363 return copy_to_reg (target);
4365 else
4366 return target;
4369 /* Return 1 if EXP just contains zeros. FIXME merge with initializer_zerop. */
4371 static int
4372 is_zeros_p (tree exp)
4374 tree elt;
4376 switch (TREE_CODE (exp))
4378 case CONVERT_EXPR:
4379 case NOP_EXPR:
4380 case NON_LVALUE_EXPR:
4381 case VIEW_CONVERT_EXPR:
4382 return is_zeros_p (TREE_OPERAND (exp, 0));
4384 case INTEGER_CST:
4385 return integer_zerop (exp);
4387 case COMPLEX_CST:
4388 return
4389 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4391 case REAL_CST:
4392 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4394 case VECTOR_CST:
4395 for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4396 elt = TREE_CHAIN (elt))
4397 if (!is_zeros_p (TREE_VALUE (elt)))
4398 return 0;
4400 return 1;
4402 case CONSTRUCTOR:
4403 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4404 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4405 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4406 if (! is_zeros_p (TREE_VALUE (elt)))
4407 return 0;
4409 return 1;
4411 default:
4412 return 0;
4416 /* Return 1 if EXP contains mostly (3/4) zeros. */
4419 mostly_zeros_p (tree exp)
4421 if (TREE_CODE (exp) == CONSTRUCTOR)
4423 int elts = 0, zeros = 0;
4424 tree elt = CONSTRUCTOR_ELTS (exp);
4425 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4427 /* If there are no ranges of true bits, it is all zero. */
4428 return elt == NULL_TREE;
4430 for (; elt; elt = TREE_CHAIN (elt))
4432 /* We do not handle the case where the index is a RANGE_EXPR,
4433 so the statistic will be somewhat inaccurate.
4434 We do make a more accurate count in store_constructor itself,
4435 so since this function is only used for nested array elements,
4436 this should be close enough. */
4437 if (mostly_zeros_p (TREE_VALUE (elt)))
4438 zeros++;
4439 elts++;
4442 return 4 * zeros >= 3 * elts;
4445 return is_zeros_p (exp);
4448 /* Helper function for store_constructor.
4449 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4450 TYPE is the type of the CONSTRUCTOR, not the element type.
4451 CLEARED is as for store_constructor.
4452 ALIAS_SET is the alias set to use for any stores.
4454 This provides a recursive shortcut back to store_constructor when it isn't
4455 necessary to go through store_field. This is so that we can pass through
4456 the cleared field to let store_constructor know that we may not have to
4457 clear a substructure if the outer structure has already been cleared. */
4459 static void
4460 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4461 HOST_WIDE_INT bitpos, enum machine_mode mode,
4462 tree exp, tree type, int cleared, int alias_set)
4464 if (TREE_CODE (exp) == CONSTRUCTOR
4465 && bitpos % BITS_PER_UNIT == 0
4466 /* If we have a nonzero bitpos for a register target, then we just
4467 let store_field do the bitfield handling. This is unlikely to
4468 generate unnecessary clear instructions anyways. */
4469 && (bitpos == 0 || GET_CODE (target) == MEM))
4471 if (GET_CODE (target) == MEM)
4472 target
4473 = adjust_address (target,
4474 GET_MODE (target) == BLKmode
4475 || 0 != (bitpos
4476 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4477 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4480 /* Update the alias set, if required. */
4481 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4482 && MEM_ALIAS_SET (target) != 0)
4484 target = copy_rtx (target);
4485 set_mem_alias_set (target, alias_set);
4488 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4490 else
4491 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4492 alias_set);
4495 /* Store the value of constructor EXP into the rtx TARGET.
4496 TARGET is either a REG or a MEM; we know it cannot conflict, since
4497 safe_from_p has been called.
4498 CLEARED is true if TARGET is known to have been zero'd.
4499 SIZE is the number of bytes of TARGET we are allowed to modify: this
4500 may not be the same as the size of EXP if we are assigning to a field
4501 which has been packed to exclude padding bits. */
4503 static void
4504 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4506 tree type = TREE_TYPE (exp);
4507 #ifdef WORD_REGISTER_OPERATIONS
4508 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4509 #endif
4511 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4512 || TREE_CODE (type) == QUAL_UNION_TYPE)
4514 tree elt;
4516 /* If size is zero or the target is already cleared, do nothing. */
4517 if (size == 0 || cleared)
4518 cleared = 1;
4519 /* We either clear the aggregate or indicate the value is dead. */
4520 else if ((TREE_CODE (type) == UNION_TYPE
4521 || TREE_CODE (type) == QUAL_UNION_TYPE)
4522 && ! CONSTRUCTOR_ELTS (exp))
4523 /* If the constructor is empty, clear the union. */
4525 clear_storage (target, expr_size (exp));
4526 cleared = 1;
4529 /* If we are building a static constructor into a register,
4530 set the initial value as zero so we can fold the value into
4531 a constant. But if more than one register is involved,
4532 this probably loses. */
4533 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4534 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4536 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4537 cleared = 1;
4540 /* If the constructor has fewer fields than the structure
4541 or if we are initializing the structure to mostly zeros,
4542 clear the whole structure first. Don't do this if TARGET is a
4543 register whose mode size isn't equal to SIZE since clear_storage
4544 can't handle this case. */
4545 else if (((list_length (CONSTRUCTOR_ELTS (exp)) != fields_length (type))
4546 || mostly_zeros_p (exp))
4547 && (GET_CODE (target) != REG
4548 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4549 == size)))
4551 rtx xtarget = target;
4553 if (readonly_fields_p (type))
4555 xtarget = copy_rtx (xtarget);
4556 RTX_UNCHANGING_P (xtarget) = 1;
4559 clear_storage (xtarget, GEN_INT (size));
4560 cleared = 1;
4563 if (! cleared)
4564 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4566 /* Store each element of the constructor into
4567 the corresponding field of TARGET. */
4569 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4571 tree field = TREE_PURPOSE (elt);
4572 tree value = TREE_VALUE (elt);
4573 enum machine_mode mode;
4574 HOST_WIDE_INT bitsize;
4575 HOST_WIDE_INT bitpos = 0;
4576 tree offset;
4577 rtx to_rtx = target;
4579 /* Just ignore missing fields.
4580 We cleared the whole structure, above,
4581 if any fields are missing. */
4582 if (field == 0)
4583 continue;
4585 if (cleared && is_zeros_p (value))
4586 continue;
4588 if (host_integerp (DECL_SIZE (field), 1))
4589 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4590 else
4591 bitsize = -1;
4593 mode = DECL_MODE (field);
4594 if (DECL_BIT_FIELD (field))
4595 mode = VOIDmode;
4597 offset = DECL_FIELD_OFFSET (field);
4598 if (host_integerp (offset, 0)
4599 && host_integerp (bit_position (field), 0))
4601 bitpos = int_bit_position (field);
4602 offset = 0;
4604 else
4605 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4607 if (offset)
4609 rtx offset_rtx;
4611 if (CONTAINS_PLACEHOLDER_P (offset))
4612 offset = build (WITH_RECORD_EXPR, sizetype,
4613 offset, make_tree (TREE_TYPE (exp), target));
4615 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4616 if (GET_CODE (to_rtx) != MEM)
4617 abort ();
4619 #ifdef POINTERS_EXTEND_UNSIGNED
4620 if (GET_MODE (offset_rtx) != Pmode)
4621 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4622 #else
4623 if (GET_MODE (offset_rtx) != ptr_mode)
4624 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4625 #endif
4627 to_rtx = offset_address (to_rtx, offset_rtx,
4628 highest_pow2_factor (offset));
4631 if (TREE_READONLY (field))
4633 if (GET_CODE (to_rtx) == MEM)
4634 to_rtx = copy_rtx (to_rtx);
4636 RTX_UNCHANGING_P (to_rtx) = 1;
4639 #ifdef WORD_REGISTER_OPERATIONS
4640 /* If this initializes a field that is smaller than a word, at the
4641 start of a word, try to widen it to a full word.
4642 This special case allows us to output C++ member function
4643 initializations in a form that the optimizers can understand. */
4644 if (GET_CODE (target) == REG
4645 && bitsize < BITS_PER_WORD
4646 && bitpos % BITS_PER_WORD == 0
4647 && GET_MODE_CLASS (mode) == MODE_INT
4648 && TREE_CODE (value) == INTEGER_CST
4649 && exp_size >= 0
4650 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4652 tree type = TREE_TYPE (value);
4654 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4656 type = (*lang_hooks.types.type_for_size)
4657 (BITS_PER_WORD, TREE_UNSIGNED (type));
4658 value = convert (type, value);
4661 if (BYTES_BIG_ENDIAN)
4662 value
4663 = fold (build (LSHIFT_EXPR, type, value,
4664 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4665 bitsize = BITS_PER_WORD;
4666 mode = word_mode;
4668 #endif
4670 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4671 && DECL_NONADDRESSABLE_P (field))
4673 to_rtx = copy_rtx (to_rtx);
4674 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4677 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4678 value, type, cleared,
4679 get_alias_set (TREE_TYPE (field)));
4682 else if (TREE_CODE (type) == ARRAY_TYPE
4683 || TREE_CODE (type) == VECTOR_TYPE)
4685 tree elt;
4686 int i;
4687 int need_to_clear;
4688 tree domain = TYPE_DOMAIN (type);
4689 tree elttype = TREE_TYPE (type);
4690 int const_bounds_p;
4691 HOST_WIDE_INT minelt = 0;
4692 HOST_WIDE_INT maxelt = 0;
4693 int icode = 0;
4694 rtx *vector = NULL;
4695 int elt_size = 0;
4696 unsigned n_elts = 0;
4698 /* Vectors are like arrays, but the domain is stored via an array
4699 type indirectly. */
4700 if (TREE_CODE (type) == VECTOR_TYPE)
4702 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
4703 the same field as TYPE_DOMAIN, we are not guaranteed that
4704 it always will. */
4705 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
4706 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
4707 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
4709 enum machine_mode mode = GET_MODE (target);
4711 icode = (int) vec_init_optab->handlers[mode].insn_code;
4712 if (icode != CODE_FOR_nothing)
4714 unsigned int i;
4716 elt_size = GET_MODE_SIZE (GET_MODE_INNER (mode));
4717 n_elts = (GET_MODE_SIZE (mode) / elt_size);
4718 vector = alloca (n_elts);
4719 for (i = 0; i < n_elts; i++)
4720 vector [i] = CONST0_RTX (GET_MODE_INNER (mode));
4725 const_bounds_p = (TYPE_MIN_VALUE (domain)
4726 && TYPE_MAX_VALUE (domain)
4727 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4728 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4730 /* If we have constant bounds for the range of the type, get them. */
4731 if (const_bounds_p)
4733 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4734 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4737 /* If the constructor has fewer elements than the array,
4738 clear the whole array first. Similarly if this is
4739 static constructor of a non-BLKmode object. */
4740 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4741 need_to_clear = 1;
4742 else
4744 HOST_WIDE_INT count = 0, zero_count = 0;
4745 need_to_clear = ! const_bounds_p;
4747 /* This loop is a more accurate version of the loop in
4748 mostly_zeros_p (it handles RANGE_EXPR in an index).
4749 It is also needed to check for missing elements. */
4750 for (elt = CONSTRUCTOR_ELTS (exp);
4751 elt != NULL_TREE && ! need_to_clear;
4752 elt = TREE_CHAIN (elt))
4754 tree index = TREE_PURPOSE (elt);
4755 HOST_WIDE_INT this_node_count;
4757 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4759 tree lo_index = TREE_OPERAND (index, 0);
4760 tree hi_index = TREE_OPERAND (index, 1);
4762 if (! host_integerp (lo_index, 1)
4763 || ! host_integerp (hi_index, 1))
4765 need_to_clear = 1;
4766 break;
4769 this_node_count = (tree_low_cst (hi_index, 1)
4770 - tree_low_cst (lo_index, 1) + 1);
4772 else
4773 this_node_count = 1;
4775 count += this_node_count;
4776 if (mostly_zeros_p (TREE_VALUE (elt)))
4777 zero_count += this_node_count;
4780 /* Clear the entire array first if there are any missing elements,
4781 or if the incidence of zero elements is >= 75%. */
4782 if (! need_to_clear
4783 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4784 need_to_clear = 1;
4787 if (need_to_clear && size > 0 && !vector)
4789 if (! cleared)
4791 if (REG_P (target))
4792 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4793 else
4794 clear_storage (target, GEN_INT (size));
4796 cleared = 1;
4798 else if (REG_P (target))
4799 /* Inform later passes that the old value is dead. */
4800 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4802 /* Store each element of the constructor into
4803 the corresponding element of TARGET, determined
4804 by counting the elements. */
4805 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4806 elt;
4807 elt = TREE_CHAIN (elt), i++)
4809 enum machine_mode mode;
4810 HOST_WIDE_INT bitsize;
4811 HOST_WIDE_INT bitpos;
4812 int unsignedp;
4813 tree value = TREE_VALUE (elt);
4814 tree index = TREE_PURPOSE (elt);
4815 rtx xtarget = target;
4817 if (cleared && is_zeros_p (value))
4818 continue;
4820 unsignedp = TREE_UNSIGNED (elttype);
4821 mode = TYPE_MODE (elttype);
4822 if (mode == BLKmode)
4823 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4824 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4825 : -1);
4826 else
4827 bitsize = GET_MODE_BITSIZE (mode);
4829 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4831 tree lo_index = TREE_OPERAND (index, 0);
4832 tree hi_index = TREE_OPERAND (index, 1);
4833 rtx index_r, pos_rtx, loop_end;
4834 struct nesting *loop;
4835 HOST_WIDE_INT lo, hi, count;
4836 tree position;
4838 if (vector)
4839 abort ();
4841 /* If the range is constant and "small", unroll the loop. */
4842 if (const_bounds_p
4843 && host_integerp (lo_index, 0)
4844 && host_integerp (hi_index, 0)
4845 && (lo = tree_low_cst (lo_index, 0),
4846 hi = tree_low_cst (hi_index, 0),
4847 count = hi - lo + 1,
4848 (GET_CODE (target) != MEM
4849 || count <= 2
4850 || (host_integerp (TYPE_SIZE (elttype), 1)
4851 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4852 <= 40 * 8)))))
4854 lo -= minelt; hi -= minelt;
4855 for (; lo <= hi; lo++)
4857 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4859 if (GET_CODE (target) == MEM
4860 && !MEM_KEEP_ALIAS_SET_P (target)
4861 && TREE_CODE (type) == ARRAY_TYPE
4862 && TYPE_NONALIASED_COMPONENT (type))
4864 target = copy_rtx (target);
4865 MEM_KEEP_ALIAS_SET_P (target) = 1;
4868 store_constructor_field
4869 (target, bitsize, bitpos, mode, value, type, cleared,
4870 get_alias_set (elttype));
4873 else
4875 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4876 loop_end = gen_label_rtx ();
4878 unsignedp = TREE_UNSIGNED (domain);
4880 index = build_decl (VAR_DECL, NULL_TREE, domain);
4882 index_r
4883 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4884 &unsignedp, 0));
4885 SET_DECL_RTL (index, index_r);
4886 if (TREE_CODE (value) == SAVE_EXPR
4887 && SAVE_EXPR_RTL (value) == 0)
4889 /* Make sure value gets expanded once before the
4890 loop. */
4891 expand_expr (value, const0_rtx, VOIDmode, 0);
4892 emit_queue ();
4894 store_expr (lo_index, index_r, 0);
4895 loop = expand_start_loop (0);
4897 /* Assign value to element index. */
4898 position
4899 = convert (ssizetype,
4900 fold (build (MINUS_EXPR, TREE_TYPE (index),
4901 index, TYPE_MIN_VALUE (domain))));
4902 position = size_binop (MULT_EXPR, position,
4903 convert (ssizetype,
4904 TYPE_SIZE_UNIT (elttype)));
4906 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4907 xtarget = offset_address (target, pos_rtx,
4908 highest_pow2_factor (position));
4909 xtarget = adjust_address (xtarget, mode, 0);
4910 if (TREE_CODE (value) == CONSTRUCTOR)
4911 store_constructor (value, xtarget, cleared,
4912 bitsize / BITS_PER_UNIT);
4913 else
4914 store_expr (value, xtarget, 0);
4916 expand_exit_loop_if_false (loop,
4917 build (LT_EXPR, integer_type_node,
4918 index, hi_index));
4920 expand_increment (build (PREINCREMENT_EXPR,
4921 TREE_TYPE (index),
4922 index, integer_one_node), 0, 0);
4923 expand_end_loop ();
4924 emit_label (loop_end);
4927 else if ((index != 0 && ! host_integerp (index, 0))
4928 || ! host_integerp (TYPE_SIZE (elttype), 1))
4930 tree position;
4932 if (vector)
4933 abort ();
4935 if (index == 0)
4936 index = ssize_int (1);
4938 if (minelt)
4939 index = convert (ssizetype,
4940 fold (build (MINUS_EXPR, index,
4941 TYPE_MIN_VALUE (domain))));
4943 position = size_binop (MULT_EXPR, index,
4944 convert (ssizetype,
4945 TYPE_SIZE_UNIT (elttype)));
4946 xtarget = offset_address (target,
4947 expand_expr (position, 0, VOIDmode, 0),
4948 highest_pow2_factor (position));
4949 xtarget = adjust_address (xtarget, mode, 0);
4950 store_expr (value, xtarget, 0);
4952 else if (vector)
4954 int pos;
4956 if (index != 0)
4957 pos = tree_low_cst (index, 0) - minelt;
4958 else
4959 pos = i;
4960 vector[pos] = expand_expr (value, NULL_RTX, VOIDmode, 0);
4962 else
4964 if (index != 0)
4965 bitpos = ((tree_low_cst (index, 0) - minelt)
4966 * tree_low_cst (TYPE_SIZE (elttype), 1));
4967 else
4968 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4970 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
4971 && TREE_CODE (type) == ARRAY_TYPE
4972 && TYPE_NONALIASED_COMPONENT (type))
4974 target = copy_rtx (target);
4975 MEM_KEEP_ALIAS_SET_P (target) = 1;
4977 store_constructor_field (target, bitsize, bitpos, mode, value,
4978 type, cleared, get_alias_set (elttype));
4981 if (vector)
4983 emit_insn (GEN_FCN (icode) (target,
4984 gen_rtx_PARALLEL (GET_MODE (target),
4985 gen_rtvec_v (n_elts, vector))));
4989 /* Set constructor assignments. */
4990 else if (TREE_CODE (type) == SET_TYPE)
4992 tree elt = CONSTRUCTOR_ELTS (exp);
4993 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4994 tree domain = TYPE_DOMAIN (type);
4995 tree domain_min, domain_max, bitlength;
4997 /* The default implementation strategy is to extract the constant
4998 parts of the constructor, use that to initialize the target,
4999 and then "or" in whatever non-constant ranges we need in addition.
5001 If a large set is all zero or all ones, it is
5002 probably better to set it using memset (if available) or bzero.
5003 Also, if a large set has just a single range, it may also be
5004 better to first clear all the first clear the set (using
5005 bzero/memset), and set the bits we want. */
5007 /* Check for all zeros. */
5008 if (elt == NULL_TREE && size > 0)
5010 if (!cleared)
5011 clear_storage (target, GEN_INT (size));
5012 return;
5015 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
5016 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
5017 bitlength = size_binop (PLUS_EXPR,
5018 size_diffop (domain_max, domain_min),
5019 ssize_int (1));
5021 nbits = tree_low_cst (bitlength, 1);
5023 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5024 are "complicated" (more than one range), initialize (the
5025 constant parts) by copying from a constant. */
5026 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
5027 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
5029 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
5030 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
5031 char *bit_buffer = alloca (nbits);
5032 HOST_WIDE_INT word = 0;
5033 unsigned int bit_pos = 0;
5034 unsigned int ibit = 0;
5035 unsigned int offset = 0; /* In bytes from beginning of set. */
5037 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
5038 for (;;)
5040 if (bit_buffer[ibit])
5042 if (BYTES_BIG_ENDIAN)
5043 word |= (1 << (set_word_size - 1 - bit_pos));
5044 else
5045 word |= 1 << bit_pos;
5048 bit_pos++; ibit++;
5049 if (bit_pos >= set_word_size || ibit == nbits)
5051 if (word != 0 || ! cleared)
5053 rtx datum = GEN_INT (word);
5054 rtx to_rtx;
5056 /* The assumption here is that it is safe to use
5057 XEXP if the set is multi-word, but not if
5058 it's single-word. */
5059 if (GET_CODE (target) == MEM)
5060 to_rtx = adjust_address (target, mode, offset);
5061 else if (offset == 0)
5062 to_rtx = target;
5063 else
5064 abort ();
5065 emit_move_insn (to_rtx, datum);
5068 if (ibit == nbits)
5069 break;
5070 word = 0;
5071 bit_pos = 0;
5072 offset += set_word_size / BITS_PER_UNIT;
5076 else if (!cleared)
5077 /* Don't bother clearing storage if the set is all ones. */
5078 if (TREE_CHAIN (elt) != NULL_TREE
5079 || (TREE_PURPOSE (elt) == NULL_TREE
5080 ? nbits != 1
5081 : ( ! host_integerp (TREE_VALUE (elt), 0)
5082 || ! host_integerp (TREE_PURPOSE (elt), 0)
5083 || (tree_low_cst (TREE_VALUE (elt), 0)
5084 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5085 != (HOST_WIDE_INT) nbits))))
5086 clear_storage (target, expr_size (exp));
5088 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5090 /* Start of range of element or NULL. */
5091 tree startbit = TREE_PURPOSE (elt);
5092 /* End of range of element, or element value. */
5093 tree endbit = TREE_VALUE (elt);
5094 HOST_WIDE_INT startb, endb;
5095 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5097 bitlength_rtx = expand_expr (bitlength,
5098 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5100 /* Handle non-range tuple element like [ expr ]. */
5101 if (startbit == NULL_TREE)
5103 startbit = save_expr (endbit);
5104 endbit = startbit;
5107 startbit = convert (sizetype, startbit);
5108 endbit = convert (sizetype, endbit);
5109 if (! integer_zerop (domain_min))
5111 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5112 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5114 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5115 EXPAND_CONST_ADDRESS);
5116 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5117 EXPAND_CONST_ADDRESS);
5119 if (REG_P (target))
5121 targetx
5122 = assign_temp
5123 ((build_qualified_type ((*lang_hooks.types.type_for_mode)
5124 (GET_MODE (target), 0),
5125 TYPE_QUAL_CONST)),
5126 0, 1, 1);
5127 emit_move_insn (targetx, target);
5130 else if (GET_CODE (target) == MEM)
5131 targetx = target;
5132 else
5133 abort ();
5135 /* Optimization: If startbit and endbit are constants divisible
5136 by BITS_PER_UNIT, call memset instead. */
5137 if (TARGET_MEM_FUNCTIONS
5138 && TREE_CODE (startbit) == INTEGER_CST
5139 && TREE_CODE (endbit) == INTEGER_CST
5140 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5141 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5143 emit_library_call (memset_libfunc, LCT_NORMAL,
5144 VOIDmode, 3,
5145 plus_constant (XEXP (targetx, 0),
5146 startb / BITS_PER_UNIT),
5147 Pmode,
5148 constm1_rtx, TYPE_MODE (integer_type_node),
5149 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5150 TYPE_MODE (sizetype));
5152 else
5153 emit_library_call (setbits_libfunc, LCT_NORMAL,
5154 VOIDmode, 4, XEXP (targetx, 0),
5155 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5156 startbit_rtx, TYPE_MODE (sizetype),
5157 endbit_rtx, TYPE_MODE (sizetype));
5159 if (REG_P (target))
5160 emit_move_insn (target, targetx);
5164 else
5165 abort ();
5168 /* Store the value of EXP (an expression tree)
5169 into a subfield of TARGET which has mode MODE and occupies
5170 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5171 If MODE is VOIDmode, it means that we are storing into a bit-field.
5173 If VALUE_MODE is VOIDmode, return nothing in particular.
5174 UNSIGNEDP is not used in this case.
5176 Otherwise, return an rtx for the value stored. This rtx
5177 has mode VALUE_MODE if that is convenient to do.
5178 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5180 TYPE is the type of the underlying object,
5182 ALIAS_SET is the alias set for the destination. This value will
5183 (in general) be different from that for TARGET, since TARGET is a
5184 reference to the containing structure. */
5186 static rtx
5187 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5188 enum machine_mode mode, tree exp, enum machine_mode value_mode,
5189 int unsignedp, tree type, int alias_set)
5191 HOST_WIDE_INT width_mask = 0;
5193 if (TREE_CODE (exp) == ERROR_MARK)
5194 return const0_rtx;
5196 /* If we have nothing to store, do nothing unless the expression has
5197 side-effects. */
5198 if (bitsize == 0)
5199 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5200 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5201 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5203 /* If we are storing into an unaligned field of an aligned union that is
5204 in a register, we may have the mode of TARGET being an integer mode but
5205 MODE == BLKmode. In that case, get an aligned object whose size and
5206 alignment are the same as TARGET and store TARGET into it (we can avoid
5207 the store if the field being stored is the entire width of TARGET). Then
5208 call ourselves recursively to store the field into a BLKmode version of
5209 that object. Finally, load from the object into TARGET. This is not
5210 very efficient in general, but should only be slightly more expensive
5211 than the otherwise-required unaligned accesses. Perhaps this can be
5212 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5213 twice, once with emit_move_insn and once via store_field. */
5215 if (mode == BLKmode
5216 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5218 rtx object = assign_temp (type, 0, 1, 1);
5219 rtx blk_object = adjust_address (object, BLKmode, 0);
5221 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5222 emit_move_insn (object, target);
5224 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5225 alias_set);
5227 emit_move_insn (target, object);
5229 /* We want to return the BLKmode version of the data. */
5230 return blk_object;
5233 if (GET_CODE (target) == CONCAT)
5235 /* We're storing into a struct containing a single __complex. */
5237 if (bitpos != 0)
5238 abort ();
5239 return store_expr (exp, target, 0);
5242 /* If the structure is in a register or if the component
5243 is a bit field, we cannot use addressing to access it.
5244 Use bit-field techniques or SUBREG to store in it. */
5246 if (mode == VOIDmode
5247 || (mode != BLKmode && ! direct_store[(int) mode]
5248 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5249 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5250 || GET_CODE (target) == REG
5251 || GET_CODE (target) == SUBREG
5252 /* If the field isn't aligned enough to store as an ordinary memref,
5253 store it as a bit field. */
5254 || (mode != BLKmode
5255 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5256 || bitpos % GET_MODE_ALIGNMENT (mode))
5257 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5258 || (bitpos % BITS_PER_UNIT != 0)))
5259 /* If the RHS and field are a constant size and the size of the
5260 RHS isn't the same size as the bitfield, we must use bitfield
5261 operations. */
5262 || (bitsize >= 0
5263 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5264 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5266 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5268 /* If BITSIZE is narrower than the size of the type of EXP
5269 we will be narrowing TEMP. Normally, what's wanted are the
5270 low-order bits. However, if EXP's type is a record and this is
5271 big-endian machine, we want the upper BITSIZE bits. */
5272 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5273 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5274 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5275 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5276 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5277 - bitsize),
5278 NULL_RTX, 1);
5280 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5281 MODE. */
5282 if (mode != VOIDmode && mode != BLKmode
5283 && mode != TYPE_MODE (TREE_TYPE (exp)))
5284 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5286 /* If the modes of TARGET and TEMP are both BLKmode, both
5287 must be in memory and BITPOS must be aligned on a byte
5288 boundary. If so, we simply do a block copy. */
5289 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5291 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5292 || bitpos % BITS_PER_UNIT != 0)
5293 abort ();
5295 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5296 emit_block_move (target, temp,
5297 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5298 / BITS_PER_UNIT),
5299 BLOCK_OP_NORMAL);
5301 return value_mode == VOIDmode ? const0_rtx : target;
5304 /* Store the value in the bitfield. */
5305 store_bit_field (target, bitsize, bitpos, mode, temp,
5306 int_size_in_bytes (type));
5308 if (value_mode != VOIDmode)
5310 /* The caller wants an rtx for the value.
5311 If possible, avoid refetching from the bitfield itself. */
5312 if (width_mask != 0
5313 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5315 tree count;
5316 enum machine_mode tmode;
5318 tmode = GET_MODE (temp);
5319 if (tmode == VOIDmode)
5320 tmode = value_mode;
5322 if (unsignedp)
5323 return expand_and (tmode, temp,
5324 gen_int_mode (width_mask, tmode),
5325 NULL_RTX);
5327 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5328 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5329 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5332 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5333 NULL_RTX, value_mode, VOIDmode,
5334 int_size_in_bytes (type));
5336 return const0_rtx;
5338 else
5340 rtx addr = XEXP (target, 0);
5341 rtx to_rtx = target;
5343 /* If a value is wanted, it must be the lhs;
5344 so make the address stable for multiple use. */
5346 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5347 && ! CONSTANT_ADDRESS_P (addr)
5348 /* A frame-pointer reference is already stable. */
5349 && ! (GET_CODE (addr) == PLUS
5350 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5351 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5352 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5353 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5355 /* Now build a reference to just the desired component. */
5357 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5359 if (to_rtx == target)
5360 to_rtx = copy_rtx (to_rtx);
5362 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5363 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5364 set_mem_alias_set (to_rtx, alias_set);
5366 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5370 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5371 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5372 codes and find the ultimate containing object, which we return.
5374 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5375 bit position, and *PUNSIGNEDP to the signedness of the field.
5376 If the position of the field is variable, we store a tree
5377 giving the variable offset (in units) in *POFFSET.
5378 This offset is in addition to the bit position.
5379 If the position is not variable, we store 0 in *POFFSET.
5381 If any of the extraction expressions is volatile,
5382 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5384 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5385 is a mode that can be used to access the field. In that case, *PBITSIZE
5386 is redundant.
5388 If the field describes a variable-sized object, *PMODE is set to
5389 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5390 this case, but the address of the object can be found. */
5392 tree
5393 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5394 HOST_WIDE_INT *pbitpos, tree *poffset,
5395 enum machine_mode *pmode, int *punsignedp,
5396 int *pvolatilep)
5398 tree size_tree = 0;
5399 enum machine_mode mode = VOIDmode;
5400 tree offset = size_zero_node;
5401 tree bit_offset = bitsize_zero_node;
5402 tree placeholder_ptr = 0;
5403 tree tem;
5405 /* First get the mode, signedness, and size. We do this from just the
5406 outermost expression. */
5407 if (TREE_CODE (exp) == COMPONENT_REF)
5409 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5410 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5411 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5413 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5415 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5417 size_tree = TREE_OPERAND (exp, 1);
5418 *punsignedp = TREE_UNSIGNED (exp);
5420 else
5422 mode = TYPE_MODE (TREE_TYPE (exp));
5423 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5425 if (mode == BLKmode)
5426 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5427 else
5428 *pbitsize = GET_MODE_BITSIZE (mode);
5431 if (size_tree != 0)
5433 if (! host_integerp (size_tree, 1))
5434 mode = BLKmode, *pbitsize = -1;
5435 else
5436 *pbitsize = tree_low_cst (size_tree, 1);
5439 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5440 and find the ultimate containing object. */
5441 while (1)
5443 if (TREE_CODE (exp) == BIT_FIELD_REF)
5444 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5445 else if (TREE_CODE (exp) == COMPONENT_REF)
5447 tree field = TREE_OPERAND (exp, 1);
5448 tree this_offset = DECL_FIELD_OFFSET (field);
5450 /* If this field hasn't been filled in yet, don't go
5451 past it. This should only happen when folding expressions
5452 made during type construction. */
5453 if (this_offset == 0)
5454 break;
5455 else if (CONTAINS_PLACEHOLDER_P (this_offset))
5456 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5458 offset = size_binop (PLUS_EXPR, offset, this_offset);
5459 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5460 DECL_FIELD_BIT_OFFSET (field));
5462 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5465 else if (TREE_CODE (exp) == ARRAY_REF
5466 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5468 tree index = TREE_OPERAND (exp, 1);
5469 tree array = TREE_OPERAND (exp, 0);
5470 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5471 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5472 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5474 /* We assume all arrays have sizes that are a multiple of a byte.
5475 First subtract the lower bound, if any, in the type of the
5476 index, then convert to sizetype and multiply by the size of the
5477 array element. */
5478 if (low_bound != 0 && ! integer_zerop (low_bound))
5479 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5480 index, low_bound));
5482 /* If the index has a self-referential type, pass it to a
5483 WITH_RECORD_EXPR; if the component size is, pass our
5484 component to one. */
5485 if (CONTAINS_PLACEHOLDER_P (index))
5486 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5487 if (CONTAINS_PLACEHOLDER_P (unit_size))
5488 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5490 offset = size_binop (PLUS_EXPR, offset,
5491 size_binop (MULT_EXPR,
5492 convert (sizetype, index),
5493 unit_size));
5496 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5498 tree new = find_placeholder (exp, &placeholder_ptr);
5500 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5501 We might have been called from tree optimization where we
5502 haven't set up an object yet. */
5503 if (new == 0)
5504 break;
5505 else
5506 exp = new;
5508 continue;
5511 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5512 conversions that don't change the mode, and all view conversions
5513 except those that need to "step up" the alignment. */
5514 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5515 && ! (TREE_CODE (exp) == VIEW_CONVERT_EXPR
5516 && ! ((TYPE_ALIGN (TREE_TYPE (exp))
5517 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5518 && STRICT_ALIGNMENT
5519 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5520 < BIGGEST_ALIGNMENT)
5521 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5522 || TYPE_ALIGN_OK (TREE_TYPE
5523 (TREE_OPERAND (exp, 0))))))
5524 && ! ((TREE_CODE (exp) == NOP_EXPR
5525 || TREE_CODE (exp) == CONVERT_EXPR)
5526 && (TYPE_MODE (TREE_TYPE (exp))
5527 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5528 break;
5530 /* If any reference in the chain is volatile, the effect is volatile. */
5531 if (TREE_THIS_VOLATILE (exp))
5532 *pvolatilep = 1;
5534 exp = TREE_OPERAND (exp, 0);
5537 /* If OFFSET is constant, see if we can return the whole thing as a
5538 constant bit position. Otherwise, split it up. */
5539 if (host_integerp (offset, 0)
5540 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5541 bitsize_unit_node))
5542 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5543 && host_integerp (tem, 0))
5544 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5545 else
5546 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5548 *pmode = mode;
5549 return exp;
5552 /* Return 1 if T is an expression that get_inner_reference handles. */
5555 handled_component_p (tree t)
5557 switch (TREE_CODE (t))
5559 case BIT_FIELD_REF:
5560 case COMPONENT_REF:
5561 case ARRAY_REF:
5562 case ARRAY_RANGE_REF:
5563 case NON_LVALUE_EXPR:
5564 case VIEW_CONVERT_EXPR:
5565 return 1;
5567 /* ??? Sure they are handled, but get_inner_reference may return
5568 a different PBITSIZE, depending upon whether the expression is
5569 wrapped up in a NOP_EXPR or not, e.g. for bitfields. */
5570 case NOP_EXPR:
5571 case CONVERT_EXPR:
5572 return (TYPE_MODE (TREE_TYPE (t))
5573 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5575 default:
5576 return 0;
5580 /* Given an rtx VALUE that may contain additions and multiplications, return
5581 an equivalent value that just refers to a register, memory, or constant.
5582 This is done by generating instructions to perform the arithmetic and
5583 returning a pseudo-register containing the value.
5585 The returned value may be a REG, SUBREG, MEM or constant. */
5588 force_operand (rtx value, rtx target)
5590 rtx op1, op2;
5591 /* Use subtarget as the target for operand 0 of a binary operation. */
5592 rtx subtarget = get_subtarget (target);
5593 enum rtx_code code = GET_CODE (value);
5595 /* Check for subreg applied to an expression produced by loop optimizer. */
5596 if (code == SUBREG
5597 && GET_CODE (SUBREG_REG (value)) != REG
5598 && GET_CODE (SUBREG_REG (value)) != MEM)
5600 value = simplify_gen_subreg (GET_MODE (value),
5601 force_reg (GET_MODE (SUBREG_REG (value)),
5602 force_operand (SUBREG_REG (value),
5603 NULL_RTX)),
5604 GET_MODE (SUBREG_REG (value)),
5605 SUBREG_BYTE (value));
5606 code = GET_CODE (value);
5609 /* Check for a PIC address load. */
5610 if ((code == PLUS || code == MINUS)
5611 && XEXP (value, 0) == pic_offset_table_rtx
5612 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5613 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5614 || GET_CODE (XEXP (value, 1)) == CONST))
5616 if (!subtarget)
5617 subtarget = gen_reg_rtx (GET_MODE (value));
5618 emit_move_insn (subtarget, value);
5619 return subtarget;
5622 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5624 if (!target)
5625 target = gen_reg_rtx (GET_MODE (value));
5626 convert_move (target, force_operand (XEXP (value, 0), NULL),
5627 code == ZERO_EXTEND);
5628 return target;
5631 if (ARITHMETIC_P (value))
5633 op2 = XEXP (value, 1);
5634 if (!CONSTANT_P (op2) && !(GET_CODE (op2) == REG && op2 != subtarget))
5635 subtarget = 0;
5636 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5638 code = PLUS;
5639 op2 = negate_rtx (GET_MODE (value), op2);
5642 /* Check for an addition with OP2 a constant integer and our first
5643 operand a PLUS of a virtual register and something else. In that
5644 case, we want to emit the sum of the virtual register and the
5645 constant first and then add the other value. This allows virtual
5646 register instantiation to simply modify the constant rather than
5647 creating another one around this addition. */
5648 if (code == PLUS && GET_CODE (op2) == CONST_INT
5649 && GET_CODE (XEXP (value, 0)) == PLUS
5650 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5651 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5652 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5654 rtx temp = expand_simple_binop (GET_MODE (value), code,
5655 XEXP (XEXP (value, 0), 0), op2,
5656 subtarget, 0, OPTAB_LIB_WIDEN);
5657 return expand_simple_binop (GET_MODE (value), code, temp,
5658 force_operand (XEXP (XEXP (value,
5659 0), 1), 0),
5660 target, 0, OPTAB_LIB_WIDEN);
5663 op1 = force_operand (XEXP (value, 0), subtarget);
5664 op2 = force_operand (op2, NULL_RTX);
5665 switch (code)
5667 case MULT:
5668 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5669 case DIV:
5670 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5671 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5672 target, 1, OPTAB_LIB_WIDEN);
5673 else
5674 return expand_divmod (0,
5675 FLOAT_MODE_P (GET_MODE (value))
5676 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5677 GET_MODE (value), op1, op2, target, 0);
5678 break;
5679 case MOD:
5680 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5681 target, 0);
5682 break;
5683 case UDIV:
5684 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5685 target, 1);
5686 break;
5687 case UMOD:
5688 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5689 target, 1);
5690 break;
5691 case ASHIFTRT:
5692 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5693 target, 0, OPTAB_LIB_WIDEN);
5694 break;
5695 default:
5696 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5697 target, 1, OPTAB_LIB_WIDEN);
5700 if (UNARY_P (value))
5702 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5703 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5706 #ifdef INSN_SCHEDULING
5707 /* On machines that have insn scheduling, we want all memory reference to be
5708 explicit, so we need to deal with such paradoxical SUBREGs. */
5709 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
5710 && (GET_MODE_SIZE (GET_MODE (value))
5711 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5712 value
5713 = simplify_gen_subreg (GET_MODE (value),
5714 force_reg (GET_MODE (SUBREG_REG (value)),
5715 force_operand (SUBREG_REG (value),
5716 NULL_RTX)),
5717 GET_MODE (SUBREG_REG (value)),
5718 SUBREG_BYTE (value));
5719 #endif
5721 return value;
5724 /* Subroutine of expand_expr: return nonzero iff there is no way that
5725 EXP can reference X, which is being modified. TOP_P is nonzero if this
5726 call is going to be used to determine whether we need a temporary
5727 for EXP, as opposed to a recursive call to this function.
5729 It is always safe for this routine to return zero since it merely
5730 searches for optimization opportunities. */
5733 safe_from_p (rtx x, tree exp, int top_p)
5735 rtx exp_rtl = 0;
5736 int i, nops;
5737 static tree save_expr_list;
5739 if (x == 0
5740 /* If EXP has varying size, we MUST use a target since we currently
5741 have no way of allocating temporaries of variable size
5742 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5743 So we assume here that something at a higher level has prevented a
5744 clash. This is somewhat bogus, but the best we can do. Only
5745 do this when X is BLKmode and when we are at the top level. */
5746 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5747 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5748 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5749 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5750 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5751 != INTEGER_CST)
5752 && GET_MODE (x) == BLKmode)
5753 /* If X is in the outgoing argument area, it is always safe. */
5754 || (GET_CODE (x) == MEM
5755 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5756 || (GET_CODE (XEXP (x, 0)) == PLUS
5757 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5758 return 1;
5760 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5761 find the underlying pseudo. */
5762 if (GET_CODE (x) == SUBREG)
5764 x = SUBREG_REG (x);
5765 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5766 return 0;
5769 /* A SAVE_EXPR might appear many times in the expression passed to the
5770 top-level safe_from_p call, and if it has a complex subexpression,
5771 examining it multiple times could result in a combinatorial explosion.
5772 E.g. on an Alpha running at least 200MHz, a Fortran testcase compiled
5773 with optimization took about 28 minutes to compile -- even though it was
5774 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5775 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5776 we have processed. Note that the only test of top_p was above. */
5778 if (top_p)
5780 int rtn;
5781 tree t;
5783 save_expr_list = 0;
5785 rtn = safe_from_p (x, exp, 0);
5787 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5788 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5790 return rtn;
5793 /* Now look at our tree code and possibly recurse. */
5794 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5796 case 'd':
5797 exp_rtl = DECL_RTL_IF_SET (exp);
5798 break;
5800 case 'c':
5801 return 1;
5803 case 'x':
5804 if (TREE_CODE (exp) == TREE_LIST)
5806 while (1)
5808 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
5809 return 0;
5810 exp = TREE_CHAIN (exp);
5811 if (!exp)
5812 return 1;
5813 if (TREE_CODE (exp) != TREE_LIST)
5814 return safe_from_p (x, exp, 0);
5817 else if (TREE_CODE (exp) == ERROR_MARK)
5818 return 1; /* An already-visited SAVE_EXPR? */
5819 else
5820 return 0;
5822 case '2':
5823 case '<':
5824 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
5825 return 0;
5826 /* Fall through. */
5828 case '1':
5829 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5831 case 'e':
5832 case 'r':
5833 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5834 the expression. If it is set, we conflict iff we are that rtx or
5835 both are in memory. Otherwise, we check all operands of the
5836 expression recursively. */
5838 switch (TREE_CODE (exp))
5840 case ADDR_EXPR:
5841 /* If the operand is static or we are static, we can't conflict.
5842 Likewise if we don't conflict with the operand at all. */
5843 if (staticp (TREE_OPERAND (exp, 0))
5844 || TREE_STATIC (exp)
5845 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5846 return 1;
5848 /* Otherwise, the only way this can conflict is if we are taking
5849 the address of a DECL a that address if part of X, which is
5850 very rare. */
5851 exp = TREE_OPERAND (exp, 0);
5852 if (DECL_P (exp))
5854 if (!DECL_RTL_SET_P (exp)
5855 || GET_CODE (DECL_RTL (exp)) != MEM)
5856 return 0;
5857 else
5858 exp_rtl = XEXP (DECL_RTL (exp), 0);
5860 break;
5862 case INDIRECT_REF:
5863 if (GET_CODE (x) == MEM
5864 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5865 get_alias_set (exp)))
5866 return 0;
5867 break;
5869 case CALL_EXPR:
5870 /* Assume that the call will clobber all hard registers and
5871 all of memory. */
5872 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5873 || GET_CODE (x) == MEM)
5874 return 0;
5875 break;
5877 case RTL_EXPR:
5878 /* If a sequence exists, we would have to scan every instruction
5879 in the sequence to see if it was safe. This is probably not
5880 worthwhile. */
5881 if (RTL_EXPR_SEQUENCE (exp))
5882 return 0;
5884 exp_rtl = RTL_EXPR_RTL (exp);
5885 break;
5887 case WITH_CLEANUP_EXPR:
5888 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
5889 break;
5891 case CLEANUP_POINT_EXPR:
5892 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5894 case SAVE_EXPR:
5895 exp_rtl = SAVE_EXPR_RTL (exp);
5896 if (exp_rtl)
5897 break;
5899 /* If we've already scanned this, don't do it again. Otherwise,
5900 show we've scanned it and record for clearing the flag if we're
5901 going on. */
5902 if (TREE_PRIVATE (exp))
5903 return 1;
5905 TREE_PRIVATE (exp) = 1;
5906 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5908 TREE_PRIVATE (exp) = 0;
5909 return 0;
5912 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
5913 return 1;
5915 case BIND_EXPR:
5916 /* The only operand we look at is operand 1. The rest aren't
5917 part of the expression. */
5918 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5920 default:
5921 break;
5924 /* If we have an rtx, we do not need to scan our operands. */
5925 if (exp_rtl)
5926 break;
5928 nops = first_rtl_op (TREE_CODE (exp));
5929 for (i = 0; i < nops; i++)
5930 if (TREE_OPERAND (exp, i) != 0
5931 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5932 return 0;
5934 /* If this is a language-specific tree code, it may require
5935 special handling. */
5936 if ((unsigned int) TREE_CODE (exp)
5937 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5938 && !(*lang_hooks.safe_from_p) (x, exp))
5939 return 0;
5942 /* If we have an rtl, find any enclosed object. Then see if we conflict
5943 with it. */
5944 if (exp_rtl)
5946 if (GET_CODE (exp_rtl) == SUBREG)
5948 exp_rtl = SUBREG_REG (exp_rtl);
5949 if (GET_CODE (exp_rtl) == REG
5950 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5951 return 0;
5954 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5955 are memory and they conflict. */
5956 return ! (rtx_equal_p (x, exp_rtl)
5957 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5958 && true_dependence (exp_rtl, VOIDmode, x,
5959 rtx_addr_varies_p)));
5962 /* If we reach here, it is safe. */
5963 return 1;
5966 /* Subroutine of expand_expr: return rtx if EXP is a
5967 variable or parameter; else return 0. */
5969 static rtx
5970 var_rtx (tree exp)
5972 STRIP_NOPS (exp);
5973 switch (TREE_CODE (exp))
5975 case PARM_DECL:
5976 case VAR_DECL:
5977 return DECL_RTL (exp);
5978 default:
5979 return 0;
5983 /* Return the highest power of two that EXP is known to be a multiple of.
5984 This is used in updating alignment of MEMs in array references. */
5986 static unsigned HOST_WIDE_INT
5987 highest_pow2_factor (tree exp)
5989 unsigned HOST_WIDE_INT c0, c1;
5991 switch (TREE_CODE (exp))
5993 case INTEGER_CST:
5994 /* We can find the lowest bit that's a one. If the low
5995 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
5996 We need to handle this case since we can find it in a COND_EXPR,
5997 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
5998 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
5999 later ICE. */
6000 if (TREE_CONSTANT_OVERFLOW (exp))
6001 return BIGGEST_ALIGNMENT;
6002 else
6004 /* Note: tree_low_cst is intentionally not used here,
6005 we don't care about the upper bits. */
6006 c0 = TREE_INT_CST_LOW (exp);
6007 c0 &= -c0;
6008 return c0 ? c0 : BIGGEST_ALIGNMENT;
6010 break;
6012 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6013 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6014 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6015 return MIN (c0, c1);
6017 case MULT_EXPR:
6018 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6019 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6020 return c0 * c1;
6022 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6023 case CEIL_DIV_EXPR:
6024 if (integer_pow2p (TREE_OPERAND (exp, 1))
6025 && host_integerp (TREE_OPERAND (exp, 1), 1))
6027 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6028 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6029 return MAX (1, c0 / c1);
6031 break;
6033 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6034 case SAVE_EXPR: case WITH_RECORD_EXPR:
6035 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6037 case COMPOUND_EXPR:
6038 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6040 case COND_EXPR:
6041 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6042 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6043 return MIN (c0, c1);
6045 default:
6046 break;
6049 return 1;
6052 /* Similar, except that it is known that the expression must be a multiple
6053 of the alignment of TYPE. */
6055 static unsigned HOST_WIDE_INT
6056 highest_pow2_factor_for_type (tree type, tree exp)
6058 unsigned HOST_WIDE_INT type_align, factor;
6060 factor = highest_pow2_factor (exp);
6061 type_align = TYPE_ALIGN (type) / BITS_PER_UNIT;
6062 return MAX (factor, type_align);
6065 /* Return an object on the placeholder list that matches EXP, a
6066 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6067 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6068 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6069 is a location which initially points to a starting location in the
6070 placeholder list (zero means start of the list) and where a pointer into
6071 the placeholder list at which the object is found is placed. */
6073 tree
6074 find_placeholder (tree exp, tree *plist)
6076 tree type = TREE_TYPE (exp);
6077 tree placeholder_expr;
6079 for (placeholder_expr
6080 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
6081 placeholder_expr != 0;
6082 placeholder_expr = TREE_CHAIN (placeholder_expr))
6084 tree need_type = TYPE_MAIN_VARIANT (type);
6085 tree elt;
6087 /* Find the outermost reference that is of the type we want. If none,
6088 see if any object has a type that is a pointer to the type we
6089 want. */
6090 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6091 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
6092 || TREE_CODE (elt) == COND_EXPR)
6093 ? TREE_OPERAND (elt, 1)
6094 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6095 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6096 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6097 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6098 ? TREE_OPERAND (elt, 0) : 0))
6099 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6101 if (plist)
6102 *plist = placeholder_expr;
6103 return elt;
6106 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6108 = ((TREE_CODE (elt) == COMPOUND_EXPR
6109 || TREE_CODE (elt) == COND_EXPR)
6110 ? TREE_OPERAND (elt, 1)
6111 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6112 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6113 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6114 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6115 ? TREE_OPERAND (elt, 0) : 0))
6116 if (POINTER_TYPE_P (TREE_TYPE (elt))
6117 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6118 == need_type))
6120 if (plist)
6121 *plist = placeholder_expr;
6122 return build1 (INDIRECT_REF, need_type, elt);
6126 return 0;
6129 /* Subroutine of expand_expr. Expand the two operands of a binary
6130 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6131 The value may be stored in TARGET if TARGET is nonzero. The
6132 MODIFIER argument is as documented by expand_expr. */
6134 static void
6135 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6136 enum expand_modifier modifier)
6138 if (! safe_from_p (target, exp1, 1))
6139 target = 0;
6140 if (operand_equal_p (exp0, exp1, 0))
6142 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6143 *op1 = copy_rtx (*op0);
6145 else
6147 /* If we need to preserve evaluation order, copy exp0 into its own
6148 temporary variable so that it can't be clobbered by exp1. */
6149 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6150 exp0 = save_expr (exp0);
6151 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6152 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6157 /* expand_expr: generate code for computing expression EXP.
6158 An rtx for the computed value is returned. The value is never null.
6159 In the case of a void EXP, const0_rtx is returned.
6161 The value may be stored in TARGET if TARGET is nonzero.
6162 TARGET is just a suggestion; callers must assume that
6163 the rtx returned may not be the same as TARGET.
6165 If TARGET is CONST0_RTX, it means that the value will be ignored.
6167 If TMODE is not VOIDmode, it suggests generating the
6168 result in mode TMODE. But this is done only when convenient.
6169 Otherwise, TMODE is ignored and the value generated in its natural mode.
6170 TMODE is just a suggestion; callers must assume that
6171 the rtx returned may not have mode TMODE.
6173 Note that TARGET may have neither TMODE nor MODE. In that case, it
6174 probably will not be used.
6176 If MODIFIER is EXPAND_SUM then when EXP is an addition
6177 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6178 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6179 products as above, or REG or MEM, or constant.
6180 Ordinarily in such cases we would output mul or add instructions
6181 and then return a pseudo reg containing the sum.
6183 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6184 it also marks a label as absolutely required (it can't be dead).
6185 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6186 This is used for outputting expressions used in initializers.
6188 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6189 with a constant address even if that address is not normally legitimate.
6190 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6192 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6193 a call parameter. Such targets require special care as we haven't yet
6194 marked TARGET so that it's safe from being trashed by libcalls. We
6195 don't want to use TARGET for anything but the final result;
6196 Intermediate values must go elsewhere. Additionally, calls to
6197 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6199 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6200 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6201 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6202 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6203 recursively. */
6206 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6207 enum expand_modifier modifier, rtx *alt_rtl)
6209 rtx op0, op1, temp;
6210 tree type = TREE_TYPE (exp);
6211 int unsignedp = TREE_UNSIGNED (type);
6212 enum machine_mode mode;
6213 enum tree_code code = TREE_CODE (exp);
6214 optab this_optab;
6215 rtx subtarget, original_target;
6216 int ignore;
6217 tree context;
6219 /* Handle ERROR_MARK before anybody tries to access its type. */
6220 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6222 op0 = CONST0_RTX (tmode);
6223 if (op0 != 0)
6224 return op0;
6225 return const0_rtx;
6228 mode = TYPE_MODE (type);
6229 /* Use subtarget as the target for operand 0 of a binary operation. */
6230 subtarget = get_subtarget (target);
6231 original_target = target;
6232 ignore = (target == const0_rtx
6233 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6234 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6235 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
6236 && TREE_CODE (type) == VOID_TYPE));
6238 /* If we are going to ignore this result, we need only do something
6239 if there is a side-effect somewhere in the expression. If there
6240 is, short-circuit the most common cases here. Note that we must
6241 not call expand_expr with anything but const0_rtx in case this
6242 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6244 if (ignore)
6246 if (! TREE_SIDE_EFFECTS (exp))
6247 return const0_rtx;
6249 /* Ensure we reference a volatile object even if value is ignored, but
6250 don't do this if all we are doing is taking its address. */
6251 if (TREE_THIS_VOLATILE (exp)
6252 && TREE_CODE (exp) != FUNCTION_DECL
6253 && mode != VOIDmode && mode != BLKmode
6254 && modifier != EXPAND_CONST_ADDRESS)
6256 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6257 if (GET_CODE (temp) == MEM)
6258 temp = copy_to_reg (temp);
6259 return const0_rtx;
6262 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6263 || code == INDIRECT_REF || code == BUFFER_REF)
6264 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6265 modifier);
6267 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6268 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6270 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6271 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6272 return const0_rtx;
6274 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6275 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6276 /* If the second operand has no side effects, just evaluate
6277 the first. */
6278 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6279 modifier);
6280 else if (code == BIT_FIELD_REF)
6282 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6283 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6284 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6285 return const0_rtx;
6288 target = 0;
6291 /* If will do cse, generate all results into pseudo registers
6292 since 1) that allows cse to find more things
6293 and 2) otherwise cse could produce an insn the machine
6294 cannot support. An exception is a CONSTRUCTOR into a multi-word
6295 MEM: that's much more likely to be most efficient into the MEM.
6296 Another is a CALL_EXPR which must return in memory. */
6298 if (! cse_not_expected && mode != BLKmode && target
6299 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6300 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6301 && ! (code == CALL_EXPR && aggregate_value_p (exp, exp)))
6302 target = 0;
6304 switch (code)
6306 case LABEL_DECL:
6308 tree function = decl_function_context (exp);
6309 /* Labels in containing functions, or labels used from initializers,
6310 must be forced. */
6311 if (modifier == EXPAND_INITIALIZER
6312 || (function != current_function_decl
6313 && function != inline_function_decl
6314 && function != 0))
6315 temp = force_label_rtx (exp);
6316 else
6317 temp = label_rtx (exp);
6319 temp = gen_rtx_MEM (FUNCTION_MODE, gen_rtx_LABEL_REF (Pmode, temp));
6320 if (function != current_function_decl
6321 && function != inline_function_decl && function != 0)
6322 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6323 return temp;
6326 case PARM_DECL:
6327 if (!DECL_RTL_SET_P (exp))
6329 error ("%Jprior parameter's size depends on '%D'", exp, exp);
6330 return CONST0_RTX (mode);
6333 /* ... fall through ... */
6335 case VAR_DECL:
6336 /* If a static var's type was incomplete when the decl was written,
6337 but the type is complete now, lay out the decl now. */
6338 if (DECL_SIZE (exp) == 0
6339 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6340 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6341 layout_decl (exp, 0);
6343 /* ... fall through ... */
6345 case FUNCTION_DECL:
6346 case RESULT_DECL:
6347 if (DECL_RTL (exp) == 0)
6348 abort ();
6350 /* Ensure variable marked as used even if it doesn't go through
6351 a parser. If it hasn't be used yet, write out an external
6352 definition. */
6353 if (! TREE_USED (exp))
6355 assemble_external (exp);
6356 TREE_USED (exp) = 1;
6359 /* Show we haven't gotten RTL for this yet. */
6360 temp = 0;
6362 /* Handle variables inherited from containing functions. */
6363 context = decl_function_context (exp);
6365 /* We treat inline_function_decl as an alias for the current function
6366 because that is the inline function whose vars, types, etc.
6367 are being merged into the current function.
6368 See expand_inline_function. */
6370 if (context != 0 && context != current_function_decl
6371 && context != inline_function_decl
6372 /* If var is static, we don't need a static chain to access it. */
6373 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6374 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6376 rtx addr;
6378 /* Mark as non-local and addressable. */
6379 DECL_NONLOCAL (exp) = 1;
6380 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6381 abort ();
6382 (*lang_hooks.mark_addressable) (exp);
6383 if (GET_CODE (DECL_RTL (exp)) != MEM)
6384 abort ();
6385 addr = XEXP (DECL_RTL (exp), 0);
6386 if (GET_CODE (addr) == MEM)
6387 addr
6388 = replace_equiv_address (addr,
6389 fix_lexical_addr (XEXP (addr, 0), exp));
6390 else
6391 addr = fix_lexical_addr (addr, exp);
6393 temp = replace_equiv_address (DECL_RTL (exp), addr);
6396 /* This is the case of an array whose size is to be determined
6397 from its initializer, while the initializer is still being parsed.
6398 See expand_decl. */
6400 else if (GET_CODE (DECL_RTL (exp)) == MEM
6401 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6402 temp = validize_mem (DECL_RTL (exp));
6404 /* If DECL_RTL is memory, we are in the normal case and either
6405 the address is not valid or it is not a register and -fforce-addr
6406 is specified, get the address into a register. */
6408 else if (GET_CODE (DECL_RTL (exp)) == MEM
6409 && modifier != EXPAND_CONST_ADDRESS
6410 && modifier != EXPAND_SUM
6411 && modifier != EXPAND_INITIALIZER
6412 && (! memory_address_p (DECL_MODE (exp),
6413 XEXP (DECL_RTL (exp), 0))
6414 || (flag_force_addr
6415 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6417 if (alt_rtl)
6418 *alt_rtl = DECL_RTL (exp);
6419 temp = replace_equiv_address (DECL_RTL (exp),
6420 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6423 /* If we got something, return it. But first, set the alignment
6424 if the address is a register. */
6425 if (temp != 0)
6427 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6428 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6430 return temp;
6433 /* If the mode of DECL_RTL does not match that of the decl, it
6434 must be a promoted value. We return a SUBREG of the wanted mode,
6435 but mark it so that we know that it was already extended. */
6437 if (GET_CODE (DECL_RTL (exp)) == REG
6438 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6440 /* Get the signedness used for this variable. Ensure we get the
6441 same mode we got when the variable was declared. */
6442 if (GET_MODE (DECL_RTL (exp))
6443 != promote_mode (type, DECL_MODE (exp), &unsignedp,
6444 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6445 abort ();
6447 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6448 SUBREG_PROMOTED_VAR_P (temp) = 1;
6449 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6450 return temp;
6453 return DECL_RTL (exp);
6455 case INTEGER_CST:
6456 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6457 TREE_INT_CST_HIGH (exp), mode);
6459 /* ??? If overflow is set, fold will have done an incomplete job,
6460 which can result in (plus xx (const_int 0)), which can get
6461 simplified by validate_replace_rtx during virtual register
6462 instantiation, which can result in unrecognizable insns.
6463 Avoid this by forcing all overflows into registers. */
6464 if (TREE_CONSTANT_OVERFLOW (exp)
6465 && modifier != EXPAND_INITIALIZER)
6466 temp = force_reg (mode, temp);
6468 return temp;
6470 case VECTOR_CST:
6471 return const_vector_from_tree (exp);
6473 case CONST_DECL:
6474 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6476 case REAL_CST:
6477 /* If optimized, generate immediate CONST_DOUBLE
6478 which will be turned into memory by reload if necessary.
6480 We used to force a register so that loop.c could see it. But
6481 this does not allow gen_* patterns to perform optimizations with
6482 the constants. It also produces two insns in cases like "x = 1.0;".
6483 On most machines, floating-point constants are not permitted in
6484 many insns, so we'd end up copying it to a register in any case.
6486 Now, we do the copying in expand_binop, if appropriate. */
6487 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6488 TYPE_MODE (TREE_TYPE (exp)));
6490 case COMPLEX_CST:
6491 /* Handle evaluating a complex constant in a CONCAT target. */
6492 if (original_target && GET_CODE (original_target) == CONCAT)
6494 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6495 rtx rtarg, itarg;
6497 rtarg = XEXP (original_target, 0);
6498 itarg = XEXP (original_target, 1);
6500 /* Move the real and imaginary parts separately. */
6501 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
6502 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
6504 if (op0 != rtarg)
6505 emit_move_insn (rtarg, op0);
6506 if (op1 != itarg)
6507 emit_move_insn (itarg, op1);
6509 return original_target;
6512 /* ... fall through ... */
6514 case STRING_CST:
6515 temp = output_constant_def (exp, 1);
6517 /* temp contains a constant address.
6518 On RISC machines where a constant address isn't valid,
6519 make some insns to get that address into a register. */
6520 if (modifier != EXPAND_CONST_ADDRESS
6521 && modifier != EXPAND_INITIALIZER
6522 && modifier != EXPAND_SUM
6523 && (! memory_address_p (mode, XEXP (temp, 0))
6524 || flag_force_addr))
6525 return replace_equiv_address (temp,
6526 copy_rtx (XEXP (temp, 0)));
6527 return temp;
6529 case EXPR_WITH_FILE_LOCATION:
6531 rtx to_return;
6532 struct file_stack fs;
6534 fs.location = input_location;
6535 fs.next = expr_wfl_stack;
6536 input_filename = EXPR_WFL_FILENAME (exp);
6537 input_line = EXPR_WFL_LINENO (exp);
6538 expr_wfl_stack = &fs;
6539 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6540 emit_line_note (input_location);
6541 /* Possibly avoid switching back and forth here. */
6542 to_return = expand_expr (EXPR_WFL_NODE (exp),
6543 (ignore ? const0_rtx : target),
6544 tmode, modifier);
6545 if (expr_wfl_stack != &fs)
6546 abort ();
6547 input_location = fs.location;
6548 expr_wfl_stack = fs.next;
6549 return to_return;
6552 case SAVE_EXPR:
6553 context = decl_function_context (exp);
6555 /* If this SAVE_EXPR was at global context, assume we are an
6556 initialization function and move it into our context. */
6557 if (context == 0)
6558 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6560 /* We treat inline_function_decl as an alias for the current function
6561 because that is the inline function whose vars, types, etc.
6562 are being merged into the current function.
6563 See expand_inline_function. */
6564 if (context == current_function_decl || context == inline_function_decl)
6565 context = 0;
6567 /* If this is non-local, handle it. */
6568 if (context)
6570 /* The following call just exists to abort if the context is
6571 not of a containing function. */
6572 find_function_data (context);
6574 temp = SAVE_EXPR_RTL (exp);
6575 if (temp && GET_CODE (temp) == REG)
6577 put_var_into_stack (exp, /*rescan=*/true);
6578 temp = SAVE_EXPR_RTL (exp);
6580 if (temp == 0 || GET_CODE (temp) != MEM)
6581 abort ();
6582 return
6583 replace_equiv_address (temp,
6584 fix_lexical_addr (XEXP (temp, 0), exp));
6586 if (SAVE_EXPR_RTL (exp) == 0)
6588 if (mode == VOIDmode)
6589 temp = const0_rtx;
6590 else
6591 temp = assign_temp (build_qualified_type (type,
6592 (TYPE_QUALS (type)
6593 | TYPE_QUAL_CONST)),
6594 3, 0, 0);
6596 SAVE_EXPR_RTL (exp) = temp;
6597 if (!optimize && GET_CODE (temp) == REG)
6598 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6599 save_expr_regs);
6601 /* If the mode of TEMP does not match that of the expression, it
6602 must be a promoted value. We pass store_expr a SUBREG of the
6603 wanted mode but mark it so that we know that it was already
6604 extended. */
6606 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6608 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6609 promote_mode (type, mode, &unsignedp, 0);
6610 SUBREG_PROMOTED_VAR_P (temp) = 1;
6611 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6614 if (temp == const0_rtx)
6615 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6616 else
6617 store_expr (TREE_OPERAND (exp, 0), temp,
6618 modifier == EXPAND_STACK_PARM ? 2 : 0);
6620 TREE_USED (exp) = 1;
6623 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6624 must be a promoted value. We return a SUBREG of the wanted mode,
6625 but mark it so that we know that it was already extended. */
6627 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6628 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6630 /* Compute the signedness and make the proper SUBREG. */
6631 promote_mode (type, mode, &unsignedp, 0);
6632 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6633 SUBREG_PROMOTED_VAR_P (temp) = 1;
6634 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6635 return temp;
6638 return SAVE_EXPR_RTL (exp);
6640 case UNSAVE_EXPR:
6642 rtx temp;
6643 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6644 TREE_OPERAND (exp, 0)
6645 = (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0));
6646 return temp;
6649 case PLACEHOLDER_EXPR:
6651 tree old_list = placeholder_list;
6652 tree placeholder_expr = 0;
6654 exp = find_placeholder (exp, &placeholder_expr);
6655 if (exp == 0)
6656 abort ();
6658 placeholder_list = TREE_CHAIN (placeholder_expr);
6659 temp = expand_expr (exp, original_target, tmode, modifier);
6660 placeholder_list = old_list;
6661 return temp;
6664 case WITH_RECORD_EXPR:
6665 /* Put the object on the placeholder list, expand our first operand,
6666 and pop the list. */
6667 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6668 placeholder_list);
6669 target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
6670 modifier);
6671 placeholder_list = TREE_CHAIN (placeholder_list);
6672 return target;
6674 case GOTO_EXPR:
6675 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6676 expand_goto (TREE_OPERAND (exp, 0));
6677 else
6678 expand_computed_goto (TREE_OPERAND (exp, 0));
6679 return const0_rtx;
6681 case EXIT_EXPR:
6682 expand_exit_loop_if_false (NULL,
6683 invert_truthvalue (TREE_OPERAND (exp, 0)));
6684 return const0_rtx;
6686 case LABELED_BLOCK_EXPR:
6687 if (LABELED_BLOCK_BODY (exp))
6688 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
6689 /* Should perhaps use expand_label, but this is simpler and safer. */
6690 do_pending_stack_adjust ();
6691 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6692 return const0_rtx;
6694 case EXIT_BLOCK_EXPR:
6695 if (EXIT_BLOCK_RETURN (exp))
6696 sorry ("returned value in block_exit_expr");
6697 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6698 return const0_rtx;
6700 case LOOP_EXPR:
6701 push_temp_slots ();
6702 expand_start_loop (1);
6703 expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
6704 expand_end_loop ();
6705 pop_temp_slots ();
6707 return const0_rtx;
6709 case BIND_EXPR:
6711 tree vars = TREE_OPERAND (exp, 0);
6713 /* Need to open a binding contour here because
6714 if there are any cleanups they must be contained here. */
6715 expand_start_bindings (2);
6717 /* Mark the corresponding BLOCK for output in its proper place. */
6718 if (TREE_OPERAND (exp, 2) != 0
6719 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6720 (*lang_hooks.decls.insert_block) (TREE_OPERAND (exp, 2));
6722 /* If VARS have not yet been expanded, expand them now. */
6723 while (vars)
6725 if (!DECL_RTL_SET_P (vars))
6726 expand_decl (vars);
6727 expand_decl_init (vars);
6728 vars = TREE_CHAIN (vars);
6731 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
6733 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6735 return temp;
6738 case RTL_EXPR:
6739 if (RTL_EXPR_SEQUENCE (exp))
6741 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6742 abort ();
6743 emit_insn (RTL_EXPR_SEQUENCE (exp));
6744 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6746 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6747 free_temps_for_rtl_expr (exp);
6748 if (alt_rtl)
6749 *alt_rtl = RTL_EXPR_ALT_RTL (exp);
6750 return RTL_EXPR_RTL (exp);
6752 case CONSTRUCTOR:
6753 /* If we don't need the result, just ensure we evaluate any
6754 subexpressions. */
6755 if (ignore)
6757 tree elt;
6759 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6760 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6762 return const0_rtx;
6765 /* All elts simple constants => refer to a constant in memory. But
6766 if this is a non-BLKmode mode, let it store a field at a time
6767 since that should make a CONST_INT or CONST_DOUBLE when we
6768 fold. Likewise, if we have a target we can use, it is best to
6769 store directly into the target unless the type is large enough
6770 that memcpy will be used. If we are making an initializer and
6771 all operands are constant, put it in memory as well.
6773 FIXME: Avoid trying to fill vector constructors piece-meal.
6774 Output them with output_constant_def below unless we're sure
6775 they're zeros. This should go away when vector initializers
6776 are treated like VECTOR_CST instead of arrays.
6778 else if ((TREE_STATIC (exp)
6779 && ((mode == BLKmode
6780 && ! (target != 0 && safe_from_p (target, exp, 1)))
6781 || TREE_ADDRESSABLE (exp)
6782 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6783 && (! MOVE_BY_PIECES_P
6784 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6785 TYPE_ALIGN (type)))
6786 && ((TREE_CODE (type) == VECTOR_TYPE
6787 && !is_zeros_p (exp))
6788 || ! mostly_zeros_p (exp)))))
6789 || ((modifier == EXPAND_INITIALIZER
6790 || modifier == EXPAND_CONST_ADDRESS)
6791 && TREE_CONSTANT (exp)))
6793 rtx constructor = output_constant_def (exp, 1);
6795 if (modifier != EXPAND_CONST_ADDRESS
6796 && modifier != EXPAND_INITIALIZER
6797 && modifier != EXPAND_SUM)
6798 constructor = validize_mem (constructor);
6800 return constructor;
6802 else
6804 /* Handle calls that pass values in multiple non-contiguous
6805 locations. The Irix 6 ABI has examples of this. */
6806 if (target == 0 || ! safe_from_p (target, exp, 1)
6807 || GET_CODE (target) == PARALLEL
6808 || modifier == EXPAND_STACK_PARM)
6809 target
6810 = assign_temp (build_qualified_type (type,
6811 (TYPE_QUALS (type)
6812 | (TREE_READONLY (exp)
6813 * TYPE_QUAL_CONST))),
6814 0, TREE_ADDRESSABLE (exp), 1);
6816 store_constructor (exp, target, 0, int_expr_size (exp));
6817 return target;
6820 case INDIRECT_REF:
6822 tree exp1 = TREE_OPERAND (exp, 0);
6823 tree index;
6824 tree string = string_constant (exp1, &index);
6826 /* Try to optimize reads from const strings. */
6827 if (string
6828 && TREE_CODE (string) == STRING_CST
6829 && TREE_CODE (index) == INTEGER_CST
6830 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6831 && GET_MODE_CLASS (mode) == MODE_INT
6832 && GET_MODE_SIZE (mode) == 1
6833 && modifier != EXPAND_WRITE)
6834 return gen_int_mode (TREE_STRING_POINTER (string)
6835 [TREE_INT_CST_LOW (index)], mode);
6837 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6838 op0 = memory_address (mode, op0);
6839 temp = gen_rtx_MEM (mode, op0);
6840 set_mem_attributes (temp, exp, 0);
6842 /* If we are writing to this object and its type is a record with
6843 readonly fields, we must mark it as readonly so it will
6844 conflict with readonly references to those fields. */
6845 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
6846 RTX_UNCHANGING_P (temp) = 1;
6848 return temp;
6851 case ARRAY_REF:
6852 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6853 abort ();
6856 tree array = TREE_OPERAND (exp, 0);
6857 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6858 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6859 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6860 HOST_WIDE_INT i;
6862 /* Optimize the special-case of a zero lower bound.
6864 We convert the low_bound to sizetype to avoid some problems
6865 with constant folding. (E.g. suppose the lower bound is 1,
6866 and its mode is QI. Without the conversion, (ARRAY
6867 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6868 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6870 if (! integer_zerop (low_bound))
6871 index = size_diffop (index, convert (sizetype, low_bound));
6873 /* Fold an expression like: "foo"[2].
6874 This is not done in fold so it won't happen inside &.
6875 Don't fold if this is for wide characters since it's too
6876 difficult to do correctly and this is a very rare case. */
6878 if (modifier != EXPAND_CONST_ADDRESS
6879 && modifier != EXPAND_INITIALIZER
6880 && modifier != EXPAND_MEMORY
6881 && TREE_CODE (array) == STRING_CST
6882 && TREE_CODE (index) == INTEGER_CST
6883 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6884 && GET_MODE_CLASS (mode) == MODE_INT
6885 && GET_MODE_SIZE (mode) == 1)
6886 return gen_int_mode (TREE_STRING_POINTER (array)
6887 [TREE_INT_CST_LOW (index)], mode);
6889 /* If this is a constant index into a constant array,
6890 just get the value from the array. Handle both the cases when
6891 we have an explicit constructor and when our operand is a variable
6892 that was declared const. */
6894 if (modifier != EXPAND_CONST_ADDRESS
6895 && modifier != EXPAND_INITIALIZER
6896 && modifier != EXPAND_MEMORY
6897 && TREE_CODE (array) == CONSTRUCTOR
6898 && ! TREE_SIDE_EFFECTS (array)
6899 && TREE_CODE (index) == INTEGER_CST
6900 && 0 > compare_tree_int (index,
6901 list_length (CONSTRUCTOR_ELTS
6902 (TREE_OPERAND (exp, 0)))))
6904 tree elem;
6906 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6907 i = TREE_INT_CST_LOW (index);
6908 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6911 if (elem)
6912 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6913 modifier);
6916 else if (optimize >= 1
6917 && modifier != EXPAND_CONST_ADDRESS
6918 && modifier != EXPAND_INITIALIZER
6919 && modifier != EXPAND_MEMORY
6920 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6921 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6922 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
6923 && targetm.binds_local_p (array))
6925 if (TREE_CODE (index) == INTEGER_CST)
6927 tree init = DECL_INITIAL (array);
6929 if (TREE_CODE (init) == CONSTRUCTOR)
6931 tree elem;
6933 for (elem = CONSTRUCTOR_ELTS (init);
6934 (elem
6935 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6936 elem = TREE_CHAIN (elem))
6939 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6940 return expand_expr (fold (TREE_VALUE (elem)), target,
6941 tmode, modifier);
6943 else if (TREE_CODE (init) == STRING_CST
6944 && 0 > compare_tree_int (index,
6945 TREE_STRING_LENGTH (init)))
6947 tree type = TREE_TYPE (TREE_TYPE (init));
6948 enum machine_mode mode = TYPE_MODE (type);
6950 if (GET_MODE_CLASS (mode) == MODE_INT
6951 && GET_MODE_SIZE (mode) == 1)
6952 return gen_int_mode (TREE_STRING_POINTER (init)
6953 [TREE_INT_CST_LOW (index)], mode);
6958 goto normal_inner_ref;
6960 case COMPONENT_REF:
6961 /* If the operand is a CONSTRUCTOR, we can just extract the
6962 appropriate field if it is present. */
6963 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
6965 tree elt;
6967 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6968 elt = TREE_CHAIN (elt))
6969 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6970 /* We can normally use the value of the field in the
6971 CONSTRUCTOR. However, if this is a bitfield in
6972 an integral mode that we can fit in a HOST_WIDE_INT,
6973 we must mask only the number of bits in the bitfield,
6974 since this is done implicitly by the constructor. If
6975 the bitfield does not meet either of those conditions,
6976 we can't do this optimization. */
6977 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6978 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6979 == MODE_INT)
6980 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6981 <= HOST_BITS_PER_WIDE_INT))))
6983 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
6984 && modifier == EXPAND_STACK_PARM)
6985 target = 0;
6986 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6987 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6989 HOST_WIDE_INT bitsize
6990 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6991 enum machine_mode imode
6992 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6994 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6996 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6997 op0 = expand_and (imode, op0, op1, target);
6999 else
7001 tree count
7002 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
7005 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7006 target, 0);
7007 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7008 target, 0);
7012 return op0;
7015 goto normal_inner_ref;
7017 case BIT_FIELD_REF:
7018 case ARRAY_RANGE_REF:
7019 normal_inner_ref:
7021 enum machine_mode mode1;
7022 HOST_WIDE_INT bitsize, bitpos;
7023 tree offset;
7024 int volatilep = 0;
7025 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7026 &mode1, &unsignedp, &volatilep);
7027 rtx orig_op0;
7029 /* If we got back the original object, something is wrong. Perhaps
7030 we are evaluating an expression too early. In any event, don't
7031 infinitely recurse. */
7032 if (tem == exp)
7033 abort ();
7035 /* If TEM's type is a union of variable size, pass TARGET to the inner
7036 computation, since it will need a temporary and TARGET is known
7037 to have to do. This occurs in unchecked conversion in Ada. */
7039 orig_op0 = op0
7040 = expand_expr (tem,
7041 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7042 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7043 != INTEGER_CST)
7044 && modifier != EXPAND_STACK_PARM
7045 ? target : NULL_RTX),
7046 VOIDmode,
7047 (modifier == EXPAND_INITIALIZER
7048 || modifier == EXPAND_CONST_ADDRESS
7049 || modifier == EXPAND_STACK_PARM)
7050 ? modifier : EXPAND_NORMAL);
7052 /* If this is a constant, put it into a register if it is a
7053 legitimate constant and OFFSET is 0 and memory if it isn't. */
7054 if (CONSTANT_P (op0))
7056 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7057 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7058 && offset == 0)
7059 op0 = force_reg (mode, op0);
7060 else
7061 op0 = validize_mem (force_const_mem (mode, op0));
7064 /* Otherwise, if this object not in memory and we either have an
7065 offset or a BLKmode result, put it there. This case can't occur in
7066 C, but can in Ada if we have unchecked conversion of an expression
7067 from a scalar type to an array or record type or for an
7068 ARRAY_RANGE_REF whose type is BLKmode. */
7069 else if (GET_CODE (op0) != MEM
7070 && (offset != 0
7071 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7073 /* If the operand is a SAVE_EXPR, we can deal with this by
7074 forcing the SAVE_EXPR into memory. */
7075 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7077 put_var_into_stack (TREE_OPERAND (exp, 0),
7078 /*rescan=*/true);
7079 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7081 else
7083 tree nt
7084 = build_qualified_type (TREE_TYPE (tem),
7085 (TYPE_QUALS (TREE_TYPE (tem))
7086 | TYPE_QUAL_CONST));
7087 rtx memloc = assign_temp (nt, 1, 1, 1);
7089 emit_move_insn (memloc, op0);
7090 op0 = memloc;
7094 if (offset != 0)
7096 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7097 EXPAND_SUM);
7099 if (GET_CODE (op0) != MEM)
7100 abort ();
7102 #ifdef POINTERS_EXTEND_UNSIGNED
7103 if (GET_MODE (offset_rtx) != Pmode)
7104 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7105 #else
7106 if (GET_MODE (offset_rtx) != ptr_mode)
7107 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7108 #endif
7110 if (GET_MODE (op0) == BLKmode
7111 /* A constant address in OP0 can have VOIDmode, we must
7112 not try to call force_reg in that case. */
7113 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7114 && bitsize != 0
7115 && (bitpos % bitsize) == 0
7116 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7117 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7119 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7120 bitpos = 0;
7123 op0 = offset_address (op0, offset_rtx,
7124 highest_pow2_factor (offset));
7127 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7128 record its alignment as BIGGEST_ALIGNMENT. */
7129 if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
7130 && is_aligning_offset (offset, tem))
7131 set_mem_align (op0, BIGGEST_ALIGNMENT);
7133 /* Don't forget about volatility even if this is a bitfield. */
7134 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7136 if (op0 == orig_op0)
7137 op0 = copy_rtx (op0);
7139 MEM_VOLATILE_P (op0) = 1;
7142 /* The following code doesn't handle CONCAT.
7143 Assume only bitpos == 0 can be used for CONCAT, due to
7144 one element arrays having the same mode as its element. */
7145 if (GET_CODE (op0) == CONCAT)
7147 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7148 abort ();
7149 return op0;
7152 /* In cases where an aligned union has an unaligned object
7153 as a field, we might be extracting a BLKmode value from
7154 an integer-mode (e.g., SImode) object. Handle this case
7155 by doing the extract into an object as wide as the field
7156 (which we know to be the width of a basic mode), then
7157 storing into memory, and changing the mode to BLKmode. */
7158 if (mode1 == VOIDmode
7159 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7160 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7161 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7162 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7163 && modifier != EXPAND_CONST_ADDRESS
7164 && modifier != EXPAND_INITIALIZER)
7165 /* If the field isn't aligned enough to fetch as a memref,
7166 fetch it as a bit field. */
7167 || (mode1 != BLKmode
7168 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7169 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
7170 || (GET_CODE (op0) == MEM
7171 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7172 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
7173 && ((modifier == EXPAND_CONST_ADDRESS
7174 || modifier == EXPAND_INITIALIZER)
7175 ? STRICT_ALIGNMENT
7176 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7177 || (bitpos % BITS_PER_UNIT != 0)))
7178 /* If the type and the field are a constant size and the
7179 size of the type isn't the same size as the bitfield,
7180 we must use bitfield operations. */
7181 || (bitsize >= 0
7182 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7183 == INTEGER_CST)
7184 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7185 bitsize)))
7187 enum machine_mode ext_mode = mode;
7189 if (ext_mode == BLKmode
7190 && ! (target != 0 && GET_CODE (op0) == MEM
7191 && GET_CODE (target) == MEM
7192 && bitpos % BITS_PER_UNIT == 0))
7193 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7195 if (ext_mode == BLKmode)
7197 if (target == 0)
7198 target = assign_temp (type, 0, 1, 1);
7200 if (bitsize == 0)
7201 return target;
7203 /* In this case, BITPOS must start at a byte boundary and
7204 TARGET, if specified, must be a MEM. */
7205 if (GET_CODE (op0) != MEM
7206 || (target != 0 && GET_CODE (target) != MEM)
7207 || bitpos % BITS_PER_UNIT != 0)
7208 abort ();
7210 emit_block_move (target,
7211 adjust_address (op0, VOIDmode,
7212 bitpos / BITS_PER_UNIT),
7213 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7214 / BITS_PER_UNIT),
7215 (modifier == EXPAND_STACK_PARM
7216 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7218 return target;
7221 op0 = validize_mem (op0);
7223 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7224 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7226 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7227 (modifier == EXPAND_STACK_PARM
7228 ? NULL_RTX : target),
7229 ext_mode, ext_mode,
7230 int_size_in_bytes (TREE_TYPE (tem)));
7232 /* If the result is a record type and BITSIZE is narrower than
7233 the mode of OP0, an integral mode, and this is a big endian
7234 machine, we must put the field into the high-order bits. */
7235 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7236 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7237 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7238 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7239 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7240 - bitsize),
7241 op0, 1);
7243 /* If the result type is BLKmode, store the data into a temporary
7244 of the appropriate type, but with the mode corresponding to the
7245 mode for the data we have (op0's mode). It's tempting to make
7246 this a constant type, since we know it's only being stored once,
7247 but that can cause problems if we are taking the address of this
7248 COMPONENT_REF because the MEM of any reference via that address
7249 will have flags corresponding to the type, which will not
7250 necessarily be constant. */
7251 if (mode == BLKmode)
7253 rtx new
7254 = assign_stack_temp_for_type
7255 (ext_mode, GET_MODE_BITSIZE (ext_mode), 0, type);
7257 emit_move_insn (new, op0);
7258 op0 = copy_rtx (new);
7259 PUT_MODE (op0, BLKmode);
7260 set_mem_attributes (op0, exp, 1);
7263 return op0;
7266 /* If the result is BLKmode, use that to access the object
7267 now as well. */
7268 if (mode == BLKmode)
7269 mode1 = BLKmode;
7271 /* Get a reference to just this component. */
7272 if (modifier == EXPAND_CONST_ADDRESS
7273 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7274 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7275 else
7276 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7278 if (op0 == orig_op0)
7279 op0 = copy_rtx (op0);
7281 set_mem_attributes (op0, exp, 0);
7282 if (GET_CODE (XEXP (op0, 0)) == REG)
7283 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7285 MEM_VOLATILE_P (op0) |= volatilep;
7286 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7287 || modifier == EXPAND_CONST_ADDRESS
7288 || modifier == EXPAND_INITIALIZER)
7289 return op0;
7290 else if (target == 0)
7291 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7293 convert_move (target, op0, unsignedp);
7294 return target;
7297 case VTABLE_REF:
7299 rtx insn, before = get_last_insn (), vtbl_ref;
7301 /* Evaluate the interior expression. */
7302 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7303 tmode, modifier);
7305 /* Get or create an instruction off which to hang a note. */
7306 if (REG_P (subtarget))
7308 target = subtarget;
7309 insn = get_last_insn ();
7310 if (insn == before)
7311 abort ();
7312 if (! INSN_P (insn))
7313 insn = prev_nonnote_insn (insn);
7315 else
7317 target = gen_reg_rtx (GET_MODE (subtarget));
7318 insn = emit_move_insn (target, subtarget);
7321 /* Collect the data for the note. */
7322 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7323 vtbl_ref = plus_constant (vtbl_ref,
7324 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7325 /* Discard the initial CONST that was added. */
7326 vtbl_ref = XEXP (vtbl_ref, 0);
7328 REG_NOTES (insn)
7329 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7331 return target;
7334 /* Intended for a reference to a buffer of a file-object in Pascal.
7335 But it's not certain that a special tree code will really be
7336 necessary for these. INDIRECT_REF might work for them. */
7337 case BUFFER_REF:
7338 abort ();
7340 case IN_EXPR:
7342 /* Pascal set IN expression.
7344 Algorithm:
7345 rlo = set_low - (set_low%bits_per_word);
7346 the_word = set [ (index - rlo)/bits_per_word ];
7347 bit_index = index % bits_per_word;
7348 bitmask = 1 << bit_index;
7349 return !!(the_word & bitmask); */
7351 tree set = TREE_OPERAND (exp, 0);
7352 tree index = TREE_OPERAND (exp, 1);
7353 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7354 tree set_type = TREE_TYPE (set);
7355 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7356 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7357 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7358 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7359 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7360 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7361 rtx setaddr = XEXP (setval, 0);
7362 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7363 rtx rlow;
7364 rtx diff, quo, rem, addr, bit, result;
7366 /* If domain is empty, answer is no. Likewise if index is constant
7367 and out of bounds. */
7368 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7369 && TREE_CODE (set_low_bound) == INTEGER_CST
7370 && tree_int_cst_lt (set_high_bound, set_low_bound))
7371 || (TREE_CODE (index) == INTEGER_CST
7372 && TREE_CODE (set_low_bound) == INTEGER_CST
7373 && tree_int_cst_lt (index, set_low_bound))
7374 || (TREE_CODE (set_high_bound) == INTEGER_CST
7375 && TREE_CODE (index) == INTEGER_CST
7376 && tree_int_cst_lt (set_high_bound, index))))
7377 return const0_rtx;
7379 if (target == 0)
7380 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7382 /* If we get here, we have to generate the code for both cases
7383 (in range and out of range). */
7385 op0 = gen_label_rtx ();
7386 op1 = gen_label_rtx ();
7388 if (! (GET_CODE (index_val) == CONST_INT
7389 && GET_CODE (lo_r) == CONST_INT))
7390 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7391 GET_MODE (index_val), iunsignedp, op1);
7393 if (! (GET_CODE (index_val) == CONST_INT
7394 && GET_CODE (hi_r) == CONST_INT))
7395 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7396 GET_MODE (index_val), iunsignedp, op1);
7398 /* Calculate the element number of bit zero in the first word
7399 of the set. */
7400 if (GET_CODE (lo_r) == CONST_INT)
7401 rlow = GEN_INT (INTVAL (lo_r)
7402 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7403 else
7404 rlow = expand_binop (index_mode, and_optab, lo_r,
7405 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7406 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7408 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7409 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7411 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7412 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7413 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7414 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7416 addr = memory_address (byte_mode,
7417 expand_binop (index_mode, add_optab, diff,
7418 setaddr, NULL_RTX, iunsignedp,
7419 OPTAB_LIB_WIDEN));
7421 /* Extract the bit we want to examine. */
7422 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7423 gen_rtx_MEM (byte_mode, addr),
7424 make_tree (TREE_TYPE (index), rem),
7425 NULL_RTX, 1);
7426 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7427 GET_MODE (target) == byte_mode ? target : 0,
7428 1, OPTAB_LIB_WIDEN);
7430 if (result != target)
7431 convert_move (target, result, 1);
7433 /* Output the code to handle the out-of-range case. */
7434 emit_jump (op0);
7435 emit_label (op1);
7436 emit_move_insn (target, const0_rtx);
7437 emit_label (op0);
7438 return target;
7441 case WITH_CLEANUP_EXPR:
7442 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7444 WITH_CLEANUP_EXPR_RTL (exp)
7445 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7446 expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1),
7447 CLEANUP_EH_ONLY (exp));
7449 /* That's it for this cleanup. */
7450 TREE_OPERAND (exp, 1) = 0;
7452 return WITH_CLEANUP_EXPR_RTL (exp);
7454 case CLEANUP_POINT_EXPR:
7456 /* Start a new binding layer that will keep track of all cleanup
7457 actions to be performed. */
7458 expand_start_bindings (2);
7460 target_temp_slot_level = temp_slot_level;
7462 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7463 /* If we're going to use this value, load it up now. */
7464 if (! ignore)
7465 op0 = force_not_mem (op0);
7466 preserve_temp_slots (op0);
7467 expand_end_bindings (NULL_TREE, 0, 0);
7469 return op0;
7471 case CALL_EXPR:
7472 /* Check for a built-in function. */
7473 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7474 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7475 == FUNCTION_DECL)
7476 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7478 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7479 == BUILT_IN_FRONTEND)
7480 return (*lang_hooks.expand_expr) (exp, original_target,
7481 tmode, modifier,
7482 alt_rtl);
7483 else
7484 return expand_builtin (exp, target, subtarget, tmode, ignore);
7487 return expand_call (exp, target, ignore);
7489 case NON_LVALUE_EXPR:
7490 case NOP_EXPR:
7491 case CONVERT_EXPR:
7492 case REFERENCE_EXPR:
7493 if (TREE_OPERAND (exp, 0) == error_mark_node)
7494 return const0_rtx;
7496 if (TREE_CODE (type) == UNION_TYPE)
7498 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7500 /* If both input and output are BLKmode, this conversion isn't doing
7501 anything except possibly changing memory attribute. */
7502 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7504 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7505 modifier);
7507 result = copy_rtx (result);
7508 set_mem_attributes (result, exp, 0);
7509 return result;
7512 if (target == 0)
7514 if (TYPE_MODE (type) != BLKmode)
7515 target = gen_reg_rtx (TYPE_MODE (type));
7516 else
7517 target = assign_temp (type, 0, 1, 1);
7520 if (GET_CODE (target) == MEM)
7521 /* Store data into beginning of memory target. */
7522 store_expr (TREE_OPERAND (exp, 0),
7523 adjust_address (target, TYPE_MODE (valtype), 0),
7524 modifier == EXPAND_STACK_PARM ? 2 : 0);
7526 else if (GET_CODE (target) == REG)
7527 /* Store this field into a union of the proper type. */
7528 store_field (target,
7529 MIN ((int_size_in_bytes (TREE_TYPE
7530 (TREE_OPERAND (exp, 0)))
7531 * BITS_PER_UNIT),
7532 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7533 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7534 VOIDmode, 0, type, 0);
7535 else
7536 abort ();
7538 /* Return the entire union. */
7539 return target;
7542 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7544 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7545 modifier);
7547 /* If the signedness of the conversion differs and OP0 is
7548 a promoted SUBREG, clear that indication since we now
7549 have to do the proper extension. */
7550 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7551 && GET_CODE (op0) == SUBREG)
7552 SUBREG_PROMOTED_VAR_P (op0) = 0;
7554 return op0;
7557 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7558 if (GET_MODE (op0) == mode)
7559 return op0;
7561 /* If OP0 is a constant, just convert it into the proper mode. */
7562 if (CONSTANT_P (op0))
7564 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7565 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7567 if (modifier == EXPAND_INITIALIZER)
7568 return simplify_gen_subreg (mode, op0, inner_mode,
7569 subreg_lowpart_offset (mode,
7570 inner_mode));
7571 else
7572 return convert_modes (mode, inner_mode, op0,
7573 TREE_UNSIGNED (inner_type));
7576 if (modifier == EXPAND_INITIALIZER)
7577 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7579 if (target == 0)
7580 return
7581 convert_to_mode (mode, op0,
7582 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7583 else
7584 convert_move (target, op0,
7585 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7586 return target;
7588 case VIEW_CONVERT_EXPR:
7589 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7591 /* If the input and output modes are both the same, we are done.
7592 Otherwise, if neither mode is BLKmode and both are integral and within
7593 a word, we can use gen_lowpart. If neither is true, make sure the
7594 operand is in memory and convert the MEM to the new mode. */
7595 if (TYPE_MODE (type) == GET_MODE (op0))
7597 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7598 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7599 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
7600 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7601 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7602 op0 = gen_lowpart (TYPE_MODE (type), op0);
7603 else if (GET_CODE (op0) != MEM)
7605 /* If the operand is not a MEM, force it into memory. Since we
7606 are going to be be changing the mode of the MEM, don't call
7607 force_const_mem for constants because we don't allow pool
7608 constants to change mode. */
7609 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7611 if (TREE_ADDRESSABLE (exp))
7612 abort ();
7614 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7615 target
7616 = assign_stack_temp_for_type
7617 (TYPE_MODE (inner_type),
7618 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7620 emit_move_insn (target, op0);
7621 op0 = target;
7624 /* At this point, OP0 is in the correct mode. If the output type is such
7625 that the operand is known to be aligned, indicate that it is.
7626 Otherwise, we need only be concerned about alignment for non-BLKmode
7627 results. */
7628 if (GET_CODE (op0) == MEM)
7630 op0 = copy_rtx (op0);
7632 if (TYPE_ALIGN_OK (type))
7633 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7634 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7635 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7637 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7638 HOST_WIDE_INT temp_size
7639 = MAX (int_size_in_bytes (inner_type),
7640 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7641 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7642 temp_size, 0, type);
7643 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7645 if (TREE_ADDRESSABLE (exp))
7646 abort ();
7648 if (GET_MODE (op0) == BLKmode)
7649 emit_block_move (new_with_op0_mode, op0,
7650 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7651 (modifier == EXPAND_STACK_PARM
7652 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7653 else
7654 emit_move_insn (new_with_op0_mode, op0);
7656 op0 = new;
7659 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7662 return op0;
7664 case PLUS_EXPR:
7665 this_optab = ! unsignedp && flag_trapv
7666 && (GET_MODE_CLASS (mode) == MODE_INT)
7667 ? addv_optab : add_optab;
7669 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7670 something else, make sure we add the register to the constant and
7671 then to the other thing. This case can occur during strength
7672 reduction and doing it this way will produce better code if the
7673 frame pointer or argument pointer is eliminated.
7675 fold-const.c will ensure that the constant is always in the inner
7676 PLUS_EXPR, so the only case we need to do anything about is if
7677 sp, ap, or fp is our second argument, in which case we must swap
7678 the innermost first argument and our second argument. */
7680 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7681 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7682 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7683 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7684 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7685 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7687 tree t = TREE_OPERAND (exp, 1);
7689 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7690 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7693 /* If the result is to be ptr_mode and we are adding an integer to
7694 something, we might be forming a constant. So try to use
7695 plus_constant. If it produces a sum and we can't accept it,
7696 use force_operand. This allows P = &ARR[const] to generate
7697 efficient code on machines where a SYMBOL_REF is not a valid
7698 address.
7700 If this is an EXPAND_SUM call, always return the sum. */
7701 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7702 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7704 if (modifier == EXPAND_STACK_PARM)
7705 target = 0;
7706 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7707 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7708 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7710 rtx constant_part;
7712 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7713 EXPAND_SUM);
7714 /* Use immed_double_const to ensure that the constant is
7715 truncated according to the mode of OP1, then sign extended
7716 to a HOST_WIDE_INT. Using the constant directly can result
7717 in non-canonical RTL in a 64x32 cross compile. */
7718 constant_part
7719 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7720 (HOST_WIDE_INT) 0,
7721 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7722 op1 = plus_constant (op1, INTVAL (constant_part));
7723 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7724 op1 = force_operand (op1, target);
7725 return op1;
7728 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7729 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7730 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7732 rtx constant_part;
7734 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7735 (modifier == EXPAND_INITIALIZER
7736 ? EXPAND_INITIALIZER : EXPAND_SUM));
7737 if (! CONSTANT_P (op0))
7739 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7740 VOIDmode, modifier);
7741 /* Return a PLUS if modifier says it's OK. */
7742 if (modifier == EXPAND_SUM
7743 || modifier == EXPAND_INITIALIZER)
7744 return simplify_gen_binary (PLUS, mode, op0, op1);
7745 goto binop2;
7747 /* Use immed_double_const to ensure that the constant is
7748 truncated according to the mode of OP1, then sign extended
7749 to a HOST_WIDE_INT. Using the constant directly can result
7750 in non-canonical RTL in a 64x32 cross compile. */
7751 constant_part
7752 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7753 (HOST_WIDE_INT) 0,
7754 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7755 op0 = plus_constant (op0, INTVAL (constant_part));
7756 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7757 op0 = force_operand (op0, target);
7758 return op0;
7762 /* No sense saving up arithmetic to be done
7763 if it's all in the wrong mode to form part of an address.
7764 And force_operand won't know whether to sign-extend or
7765 zero-extend. */
7766 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7767 || mode != ptr_mode)
7769 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7770 subtarget, &op0, &op1, 0);
7771 if (op0 == const0_rtx)
7772 return op1;
7773 if (op1 == const0_rtx)
7774 return op0;
7775 goto binop2;
7778 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7779 subtarget, &op0, &op1, modifier);
7780 return simplify_gen_binary (PLUS, mode, op0, op1);
7782 case MINUS_EXPR:
7783 /* For initializers, we are allowed to return a MINUS of two
7784 symbolic constants. Here we handle all cases when both operands
7785 are constant. */
7786 /* Handle difference of two symbolic constants,
7787 for the sake of an initializer. */
7788 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7789 && really_constant_p (TREE_OPERAND (exp, 0))
7790 && really_constant_p (TREE_OPERAND (exp, 1)))
7792 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7793 NULL_RTX, &op0, &op1, modifier);
7795 /* If the last operand is a CONST_INT, use plus_constant of
7796 the negated constant. Else make the MINUS. */
7797 if (GET_CODE (op1) == CONST_INT)
7798 return plus_constant (op0, - INTVAL (op1));
7799 else
7800 return gen_rtx_MINUS (mode, op0, op1);
7803 this_optab = ! unsignedp && flag_trapv
7804 && (GET_MODE_CLASS(mode) == MODE_INT)
7805 ? subv_optab : sub_optab;
7807 /* No sense saving up arithmetic to be done
7808 if it's all in the wrong mode to form part of an address.
7809 And force_operand won't know whether to sign-extend or
7810 zero-extend. */
7811 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7812 || mode != ptr_mode)
7813 goto binop;
7815 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7816 subtarget, &op0, &op1, modifier);
7818 /* Convert A - const to A + (-const). */
7819 if (GET_CODE (op1) == CONST_INT)
7821 op1 = negate_rtx (mode, op1);
7822 return simplify_gen_binary (PLUS, mode, op0, op1);
7825 goto binop2;
7827 case MULT_EXPR:
7828 /* If first operand is constant, swap them.
7829 Thus the following special case checks need only
7830 check the second operand. */
7831 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7833 tree t1 = TREE_OPERAND (exp, 0);
7834 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7835 TREE_OPERAND (exp, 1) = t1;
7838 /* Attempt to return something suitable for generating an
7839 indexed address, for machines that support that. */
7841 if (modifier == EXPAND_SUM && mode == ptr_mode
7842 && host_integerp (TREE_OPERAND (exp, 1), 0))
7844 tree exp1 = TREE_OPERAND (exp, 1);
7846 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7847 EXPAND_SUM);
7849 if (GET_CODE (op0) != REG)
7850 op0 = force_operand (op0, NULL_RTX);
7851 if (GET_CODE (op0) != REG)
7852 op0 = copy_to_mode_reg (mode, op0);
7854 return gen_rtx_MULT (mode, op0,
7855 gen_int_mode (tree_low_cst (exp1, 0),
7856 TYPE_MODE (TREE_TYPE (exp1))));
7859 if (modifier == EXPAND_STACK_PARM)
7860 target = 0;
7862 /* Check for multiplying things that have been extended
7863 from a narrower type. If this machine supports multiplying
7864 in that narrower type with a result in the desired type,
7865 do it that way, and avoid the explicit type-conversion. */
7866 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7867 && TREE_CODE (type) == INTEGER_TYPE
7868 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7869 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7870 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7871 && int_fits_type_p (TREE_OPERAND (exp, 1),
7872 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7873 /* Don't use a widening multiply if a shift will do. */
7874 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7875 > HOST_BITS_PER_WIDE_INT)
7876 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7878 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7879 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7881 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7882 /* If both operands are extended, they must either both
7883 be zero-extended or both be sign-extended. */
7884 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7886 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7888 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
7889 enum machine_mode innermode = TYPE_MODE (op0type);
7890 bool zextend_p = TREE_UNSIGNED (op0type);
7891 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
7892 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
7894 if (mode == GET_MODE_WIDER_MODE (innermode))
7896 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7898 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7899 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7900 TREE_OPERAND (exp, 1),
7901 NULL_RTX, &op0, &op1, 0);
7902 else
7903 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7904 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7905 NULL_RTX, &op0, &op1, 0);
7906 goto binop2;
7908 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7909 && innermode == word_mode)
7911 rtx htem, hipart;
7912 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7913 NULL_RTX, VOIDmode, 0);
7914 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7915 op1 = convert_modes (innermode, mode,
7916 expand_expr (TREE_OPERAND (exp, 1),
7917 NULL_RTX, VOIDmode, 0),
7918 unsignedp);
7919 else
7920 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7921 NULL_RTX, VOIDmode, 0);
7922 temp = expand_binop (mode, other_optab, op0, op1, target,
7923 unsignedp, OPTAB_LIB_WIDEN);
7924 hipart = gen_highpart (innermode, temp);
7925 htem = expand_mult_highpart_adjust (innermode, hipart,
7926 op0, op1, hipart,
7927 zextend_p);
7928 if (htem != hipart)
7929 emit_move_insn (hipart, htem);
7930 return temp;
7934 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7935 subtarget, &op0, &op1, 0);
7936 return expand_mult (mode, op0, op1, target, unsignedp);
7938 case TRUNC_DIV_EXPR:
7939 case FLOOR_DIV_EXPR:
7940 case CEIL_DIV_EXPR:
7941 case ROUND_DIV_EXPR:
7942 case EXACT_DIV_EXPR:
7943 if (modifier == EXPAND_STACK_PARM)
7944 target = 0;
7945 /* Possible optimization: compute the dividend with EXPAND_SUM
7946 then if the divisor is constant can optimize the case
7947 where some terms of the dividend have coeffs divisible by it. */
7948 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7949 subtarget, &op0, &op1, 0);
7950 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7952 case RDIV_EXPR:
7953 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7954 expensive divide. If not, combine will rebuild the original
7955 computation. */
7956 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7957 && TREE_CODE (type) == REAL_TYPE
7958 && !real_onep (TREE_OPERAND (exp, 0)))
7959 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7960 build (RDIV_EXPR, type,
7961 build_real (type, dconst1),
7962 TREE_OPERAND (exp, 1))),
7963 target, tmode, modifier);
7964 this_optab = sdiv_optab;
7965 goto binop;
7967 case TRUNC_MOD_EXPR:
7968 case FLOOR_MOD_EXPR:
7969 case CEIL_MOD_EXPR:
7970 case ROUND_MOD_EXPR:
7971 if (modifier == EXPAND_STACK_PARM)
7972 target = 0;
7973 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7974 subtarget, &op0, &op1, 0);
7975 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7977 case FIX_ROUND_EXPR:
7978 case FIX_FLOOR_EXPR:
7979 case FIX_CEIL_EXPR:
7980 abort (); /* Not used for C. */
7982 case FIX_TRUNC_EXPR:
7983 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7984 if (target == 0 || modifier == EXPAND_STACK_PARM)
7985 target = gen_reg_rtx (mode);
7986 expand_fix (target, op0, unsignedp);
7987 return target;
7989 case FLOAT_EXPR:
7990 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7991 if (target == 0 || modifier == EXPAND_STACK_PARM)
7992 target = gen_reg_rtx (mode);
7993 /* expand_float can't figure out what to do if FROM has VOIDmode.
7994 So give it the correct mode. With -O, cse will optimize this. */
7995 if (GET_MODE (op0) == VOIDmode)
7996 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7997 op0);
7998 expand_float (target, op0,
7999 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8000 return target;
8002 case NEGATE_EXPR:
8003 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8004 if (modifier == EXPAND_STACK_PARM)
8005 target = 0;
8006 temp = expand_unop (mode,
8007 ! unsignedp && flag_trapv
8008 && (GET_MODE_CLASS(mode) == MODE_INT)
8009 ? negv_optab : neg_optab, op0, target, 0);
8010 if (temp == 0)
8011 abort ();
8012 return temp;
8014 case ABS_EXPR:
8015 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8016 if (modifier == EXPAND_STACK_PARM)
8017 target = 0;
8019 /* ABS_EXPR is not valid for complex arguments. */
8020 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
8021 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
8022 abort ();
8024 /* Unsigned abs is simply the operand. Testing here means we don't
8025 risk generating incorrect code below. */
8026 if (TREE_UNSIGNED (type))
8027 return op0;
8029 return expand_abs (mode, op0, target, unsignedp,
8030 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8032 case MAX_EXPR:
8033 case MIN_EXPR:
8034 target = original_target;
8035 if (target == 0
8036 || modifier == EXPAND_STACK_PARM
8037 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
8038 || GET_MODE (target) != mode
8039 || (GET_CODE (target) == REG
8040 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8041 target = gen_reg_rtx (mode);
8042 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8043 target, &op0, &op1, 0);
8045 /* First try to do it with a special MIN or MAX instruction.
8046 If that does not win, use a conditional jump to select the proper
8047 value. */
8048 this_optab = (unsignedp
8049 ? (code == MIN_EXPR ? umin_optab : umax_optab)
8050 : (code == MIN_EXPR ? smin_optab : smax_optab));
8052 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8053 OPTAB_WIDEN);
8054 if (temp != 0)
8055 return temp;
8057 /* At this point, a MEM target is no longer useful; we will get better
8058 code without it. */
8060 if (GET_CODE (target) == MEM)
8061 target = gen_reg_rtx (mode);
8063 /* If op1 was placed in target, swap op0 and op1. */
8064 if (target != op0 && target == op1)
8066 rtx tem = op0;
8067 op0 = op1;
8068 op1 = tem;
8071 if (target != op0)
8072 emit_move_insn (target, op0);
8074 op0 = gen_label_rtx ();
8076 /* If this mode is an integer too wide to compare properly,
8077 compare word by word. Rely on cse to optimize constant cases. */
8078 if (GET_MODE_CLASS (mode) == MODE_INT
8079 && ! can_compare_p (GE, mode, ccp_jump))
8081 if (code == MAX_EXPR)
8082 do_jump_by_parts_greater_rtx (mode, unsignedp, target, op1,
8083 NULL_RTX, op0);
8084 else
8085 do_jump_by_parts_greater_rtx (mode, unsignedp, op1, target,
8086 NULL_RTX, op0);
8088 else
8090 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
8091 unsignedp, mode, NULL_RTX, NULL_RTX, op0);
8093 emit_move_insn (target, op1);
8094 emit_label (op0);
8095 return target;
8097 case BIT_NOT_EXPR:
8098 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8099 if (modifier == EXPAND_STACK_PARM)
8100 target = 0;
8101 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8102 if (temp == 0)
8103 abort ();
8104 return temp;
8106 /* ??? Can optimize bitwise operations with one arg constant.
8107 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8108 and (a bitwise1 b) bitwise2 b (etc)
8109 but that is probably not worth while. */
8111 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8112 boolean values when we want in all cases to compute both of them. In
8113 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8114 as actual zero-or-1 values and then bitwise anding. In cases where
8115 there cannot be any side effects, better code would be made by
8116 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8117 how to recognize those cases. */
8119 case TRUTH_AND_EXPR:
8120 case BIT_AND_EXPR:
8121 this_optab = and_optab;
8122 goto binop;
8124 case TRUTH_OR_EXPR:
8125 case BIT_IOR_EXPR:
8126 this_optab = ior_optab;
8127 goto binop;
8129 case TRUTH_XOR_EXPR:
8130 case BIT_XOR_EXPR:
8131 this_optab = xor_optab;
8132 goto binop;
8134 case LSHIFT_EXPR:
8135 case RSHIFT_EXPR:
8136 case LROTATE_EXPR:
8137 case RROTATE_EXPR:
8138 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8139 subtarget = 0;
8140 if (modifier == EXPAND_STACK_PARM)
8141 target = 0;
8142 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8143 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8144 unsignedp);
8146 /* Could determine the answer when only additive constants differ. Also,
8147 the addition of one can be handled by changing the condition. */
8148 case LT_EXPR:
8149 case LE_EXPR:
8150 case GT_EXPR:
8151 case GE_EXPR:
8152 case EQ_EXPR:
8153 case NE_EXPR:
8154 case UNORDERED_EXPR:
8155 case ORDERED_EXPR:
8156 case UNLT_EXPR:
8157 case UNLE_EXPR:
8158 case UNGT_EXPR:
8159 case UNGE_EXPR:
8160 case UNEQ_EXPR:
8161 temp = do_store_flag (exp,
8162 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8163 tmode != VOIDmode ? tmode : mode, 0);
8164 if (temp != 0)
8165 return temp;
8167 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8168 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8169 && original_target
8170 && GET_CODE (original_target) == REG
8171 && (GET_MODE (original_target)
8172 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8174 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8175 VOIDmode, 0);
8177 /* If temp is constant, we can just compute the result. */
8178 if (GET_CODE (temp) == CONST_INT)
8180 if (INTVAL (temp) != 0)
8181 emit_move_insn (target, const1_rtx);
8182 else
8183 emit_move_insn (target, const0_rtx);
8185 return target;
8188 if (temp != original_target)
8190 enum machine_mode mode1 = GET_MODE (temp);
8191 if (mode1 == VOIDmode)
8192 mode1 = tmode != VOIDmode ? tmode : mode;
8194 temp = copy_to_mode_reg (mode1, temp);
8197 op1 = gen_label_rtx ();
8198 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8199 GET_MODE (temp), unsignedp, op1);
8200 emit_move_insn (temp, const1_rtx);
8201 emit_label (op1);
8202 return temp;
8205 /* If no set-flag instruction, must generate a conditional
8206 store into a temporary variable. Drop through
8207 and handle this like && and ||. */
8209 case TRUTH_ANDIF_EXPR:
8210 case TRUTH_ORIF_EXPR:
8211 if (! ignore
8212 && (target == 0
8213 || modifier == EXPAND_STACK_PARM
8214 || ! safe_from_p (target, exp, 1)
8215 /* Make sure we don't have a hard reg (such as function's return
8216 value) live across basic blocks, if not optimizing. */
8217 || (!optimize && GET_CODE (target) == REG
8218 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8219 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8221 if (target)
8222 emit_clr_insn (target);
8224 op1 = gen_label_rtx ();
8225 jumpifnot (exp, op1);
8227 if (target)
8228 emit_0_to_1_insn (target);
8230 emit_label (op1);
8231 return ignore ? const0_rtx : target;
8233 case TRUTH_NOT_EXPR:
8234 if (modifier == EXPAND_STACK_PARM)
8235 target = 0;
8236 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8237 /* The parser is careful to generate TRUTH_NOT_EXPR
8238 only with operands that are always zero or one. */
8239 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8240 target, 1, OPTAB_LIB_WIDEN);
8241 if (temp == 0)
8242 abort ();
8243 return temp;
8245 case COMPOUND_EXPR:
8246 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8247 emit_queue ();
8248 return expand_expr_real (TREE_OPERAND (exp, 1),
8249 (ignore ? const0_rtx : target),
8250 VOIDmode, modifier, alt_rtl);
8252 case COND_EXPR:
8253 /* If we would have a "singleton" (see below) were it not for a
8254 conversion in each arm, bring that conversion back out. */
8255 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8256 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8257 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8258 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8260 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8261 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8263 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8264 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8265 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8266 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8267 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8268 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8269 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8270 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8271 return expand_expr (build1 (NOP_EXPR, type,
8272 build (COND_EXPR, TREE_TYPE (iftrue),
8273 TREE_OPERAND (exp, 0),
8274 iftrue, iffalse)),
8275 target, tmode, modifier);
8279 /* Note that COND_EXPRs whose type is a structure or union
8280 are required to be constructed to contain assignments of
8281 a temporary variable, so that we can evaluate them here
8282 for side effect only. If type is void, we must do likewise. */
8284 /* If an arm of the branch requires a cleanup,
8285 only that cleanup is performed. */
8287 tree singleton = 0;
8288 tree binary_op = 0, unary_op = 0;
8290 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8291 convert it to our mode, if necessary. */
8292 if (integer_onep (TREE_OPERAND (exp, 1))
8293 && integer_zerop (TREE_OPERAND (exp, 2))
8294 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8296 if (ignore)
8298 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8299 modifier);
8300 return const0_rtx;
8303 if (modifier == EXPAND_STACK_PARM)
8304 target = 0;
8305 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8306 if (GET_MODE (op0) == mode)
8307 return op0;
8309 if (target == 0)
8310 target = gen_reg_rtx (mode);
8311 convert_move (target, op0, unsignedp);
8312 return target;
8315 /* Check for X ? A + B : A. If we have this, we can copy A to the
8316 output and conditionally add B. Similarly for unary operations.
8317 Don't do this if X has side-effects because those side effects
8318 might affect A or B and the "?" operation is a sequence point in
8319 ANSI. (operand_equal_p tests for side effects.) */
8321 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8322 && operand_equal_p (TREE_OPERAND (exp, 2),
8323 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8324 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8325 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8326 && operand_equal_p (TREE_OPERAND (exp, 1),
8327 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8328 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8329 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8330 && operand_equal_p (TREE_OPERAND (exp, 2),
8331 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8332 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8333 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8334 && operand_equal_p (TREE_OPERAND (exp, 1),
8335 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8336 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8338 /* If we are not to produce a result, we have no target. Otherwise,
8339 if a target was specified use it; it will not be used as an
8340 intermediate target unless it is safe. If no target, use a
8341 temporary. */
8343 if (ignore)
8344 temp = 0;
8345 else if (modifier == EXPAND_STACK_PARM)
8346 temp = assign_temp (type, 0, 0, 1);
8347 else if (original_target
8348 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8349 || (singleton && GET_CODE (original_target) == REG
8350 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8351 && original_target == var_rtx (singleton)))
8352 && GET_MODE (original_target) == mode
8353 #ifdef HAVE_conditional_move
8354 && (! can_conditionally_move_p (mode)
8355 || GET_CODE (original_target) == REG
8356 || TREE_ADDRESSABLE (type))
8357 #endif
8358 && (GET_CODE (original_target) != MEM
8359 || TREE_ADDRESSABLE (type)))
8360 temp = original_target;
8361 else if (TREE_ADDRESSABLE (type))
8362 abort ();
8363 else
8364 temp = assign_temp (type, 0, 0, 1);
8366 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8367 do the test of X as a store-flag operation, do this as
8368 A + ((X != 0) << log C). Similarly for other simple binary
8369 operators. Only do for C == 1 if BRANCH_COST is low. */
8370 if (temp && singleton && binary_op
8371 && (TREE_CODE (binary_op) == PLUS_EXPR
8372 || TREE_CODE (binary_op) == MINUS_EXPR
8373 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8374 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8375 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8376 : integer_onep (TREE_OPERAND (binary_op, 1)))
8377 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8379 rtx result;
8380 tree cond;
8381 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8382 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8383 ? addv_optab : add_optab)
8384 : TREE_CODE (binary_op) == MINUS_EXPR
8385 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8386 ? subv_optab : sub_optab)
8387 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8388 : xor_optab);
8390 /* If we had X ? A : A + 1, do this as A + (X == 0). */
8391 if (singleton == TREE_OPERAND (exp, 1))
8392 cond = invert_truthvalue (TREE_OPERAND (exp, 0));
8393 else
8394 cond = TREE_OPERAND (exp, 0);
8396 result = do_store_flag (cond, (safe_from_p (temp, singleton, 1)
8397 ? temp : NULL_RTX),
8398 mode, BRANCH_COST <= 1);
8400 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8401 result = expand_shift (LSHIFT_EXPR, mode, result,
8402 build_int_2 (tree_log2
8403 (TREE_OPERAND
8404 (binary_op, 1)),
8406 (safe_from_p (temp, singleton, 1)
8407 ? temp : NULL_RTX), 0);
8409 if (result)
8411 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8412 return expand_binop (mode, boptab, op1, result, temp,
8413 unsignedp, OPTAB_LIB_WIDEN);
8417 do_pending_stack_adjust ();
8418 NO_DEFER_POP;
8419 op0 = gen_label_rtx ();
8421 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8423 if (temp != 0)
8425 /* If the target conflicts with the other operand of the
8426 binary op, we can't use it. Also, we can't use the target
8427 if it is a hard register, because evaluating the condition
8428 might clobber it. */
8429 if ((binary_op
8430 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8431 || (GET_CODE (temp) == REG
8432 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8433 temp = gen_reg_rtx (mode);
8434 store_expr (singleton, temp,
8435 modifier == EXPAND_STACK_PARM ? 2 : 0);
8437 else
8438 expand_expr (singleton,
8439 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8440 if (singleton == TREE_OPERAND (exp, 1))
8441 jumpif (TREE_OPERAND (exp, 0), op0);
8442 else
8443 jumpifnot (TREE_OPERAND (exp, 0), op0);
8445 start_cleanup_deferral ();
8446 if (binary_op && temp == 0)
8447 /* Just touch the other operand. */
8448 expand_expr (TREE_OPERAND (binary_op, 1),
8449 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8450 else if (binary_op)
8451 store_expr (build (TREE_CODE (binary_op), type,
8452 make_tree (type, temp),
8453 TREE_OPERAND (binary_op, 1)),
8454 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8455 else
8456 store_expr (build1 (TREE_CODE (unary_op), type,
8457 make_tree (type, temp)),
8458 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8459 op1 = op0;
8461 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8462 comparison operator. If we have one of these cases, set the
8463 output to A, branch on A (cse will merge these two references),
8464 then set the output to FOO. */
8465 else if (temp
8466 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8467 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8468 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8469 TREE_OPERAND (exp, 1), 0)
8470 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8471 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8472 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8474 if (GET_CODE (temp) == REG
8475 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8476 temp = gen_reg_rtx (mode);
8477 store_expr (TREE_OPERAND (exp, 1), temp,
8478 modifier == EXPAND_STACK_PARM ? 2 : 0);
8479 jumpif (TREE_OPERAND (exp, 0), op0);
8481 start_cleanup_deferral ();
8482 if (TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8483 store_expr (TREE_OPERAND (exp, 2), temp,
8484 modifier == EXPAND_STACK_PARM ? 2 : 0);
8485 else
8486 expand_expr (TREE_OPERAND (exp, 2),
8487 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8488 op1 = op0;
8490 else if (temp
8491 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8492 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8493 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8494 TREE_OPERAND (exp, 2), 0)
8495 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8496 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8497 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8499 if (GET_CODE (temp) == REG
8500 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8501 temp = gen_reg_rtx (mode);
8502 store_expr (TREE_OPERAND (exp, 2), temp,
8503 modifier == EXPAND_STACK_PARM ? 2 : 0);
8504 jumpifnot (TREE_OPERAND (exp, 0), op0);
8506 start_cleanup_deferral ();
8507 if (TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8508 store_expr (TREE_OPERAND (exp, 1), temp,
8509 modifier == EXPAND_STACK_PARM ? 2 : 0);
8510 else
8511 expand_expr (TREE_OPERAND (exp, 1),
8512 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8513 op1 = op0;
8515 else
8517 op1 = gen_label_rtx ();
8518 jumpifnot (TREE_OPERAND (exp, 0), op0);
8520 start_cleanup_deferral ();
8522 /* One branch of the cond can be void, if it never returns. For
8523 example A ? throw : E */
8524 if (temp != 0
8525 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8526 store_expr (TREE_OPERAND (exp, 1), temp,
8527 modifier == EXPAND_STACK_PARM ? 2 : 0);
8528 else
8529 expand_expr (TREE_OPERAND (exp, 1),
8530 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8531 end_cleanup_deferral ();
8532 emit_queue ();
8533 emit_jump_insn (gen_jump (op1));
8534 emit_barrier ();
8535 emit_label (op0);
8536 start_cleanup_deferral ();
8537 if (temp != 0
8538 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8539 store_expr (TREE_OPERAND (exp, 2), temp,
8540 modifier == EXPAND_STACK_PARM ? 2 : 0);
8541 else
8542 expand_expr (TREE_OPERAND (exp, 2),
8543 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8546 end_cleanup_deferral ();
8548 emit_queue ();
8549 emit_label (op1);
8550 OK_DEFER_POP;
8552 return temp;
8555 case TARGET_EXPR:
8557 /* Something needs to be initialized, but we didn't know
8558 where that thing was when building the tree. For example,
8559 it could be the return value of a function, or a parameter
8560 to a function which lays down in the stack, or a temporary
8561 variable which must be passed by reference.
8563 We guarantee that the expression will either be constructed
8564 or copied into our original target. */
8566 tree slot = TREE_OPERAND (exp, 0);
8567 tree cleanups = NULL_TREE;
8568 tree exp1;
8570 if (TREE_CODE (slot) != VAR_DECL)
8571 abort ();
8573 if (! ignore)
8574 target = original_target;
8576 /* Set this here so that if we get a target that refers to a
8577 register variable that's already been used, put_reg_into_stack
8578 knows that it should fix up those uses. */
8579 TREE_USED (slot) = 1;
8581 if (target == 0)
8583 if (DECL_RTL_SET_P (slot))
8585 target = DECL_RTL (slot);
8586 /* If we have already expanded the slot, so don't do
8587 it again. (mrs) */
8588 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8589 return target;
8591 else
8593 target = assign_temp (type, 2, 0, 1);
8594 /* All temp slots at this level must not conflict. */
8595 preserve_temp_slots (target);
8596 SET_DECL_RTL (slot, target);
8597 if (TREE_ADDRESSABLE (slot))
8598 put_var_into_stack (slot, /*rescan=*/false);
8600 /* Since SLOT is not known to the called function
8601 to belong to its stack frame, we must build an explicit
8602 cleanup. This case occurs when we must build up a reference
8603 to pass the reference as an argument. In this case,
8604 it is very likely that such a reference need not be
8605 built here. */
8607 if (TREE_OPERAND (exp, 2) == 0)
8608 TREE_OPERAND (exp, 2)
8609 = (*lang_hooks.maybe_build_cleanup) (slot);
8610 cleanups = TREE_OPERAND (exp, 2);
8613 else
8615 /* This case does occur, when expanding a parameter which
8616 needs to be constructed on the stack. The target
8617 is the actual stack address that we want to initialize.
8618 The function we call will perform the cleanup in this case. */
8620 /* If we have already assigned it space, use that space,
8621 not target that we were passed in, as our target
8622 parameter is only a hint. */
8623 if (DECL_RTL_SET_P (slot))
8625 target = DECL_RTL (slot);
8626 /* If we have already expanded the slot, so don't do
8627 it again. (mrs) */
8628 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8629 return target;
8631 else
8633 SET_DECL_RTL (slot, target);
8634 /* If we must have an addressable slot, then make sure that
8635 the RTL that we just stored in slot is OK. */
8636 if (TREE_ADDRESSABLE (slot))
8637 put_var_into_stack (slot, /*rescan=*/true);
8641 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8642 /* Mark it as expanded. */
8643 TREE_OPERAND (exp, 1) = NULL_TREE;
8645 store_expr (exp1, target, modifier == EXPAND_STACK_PARM ? 2 : 0);
8647 expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
8649 return target;
8652 case INIT_EXPR:
8654 tree lhs = TREE_OPERAND (exp, 0);
8655 tree rhs = TREE_OPERAND (exp, 1);
8657 temp = expand_assignment (lhs, rhs, ! ignore);
8658 return temp;
8661 case MODIFY_EXPR:
8663 /* If lhs is complex, expand calls in rhs before computing it.
8664 That's so we don't compute a pointer and save it over a
8665 call. If lhs is simple, compute it first so we can give it
8666 as a target if the rhs is just a call. This avoids an
8667 extra temp and copy and that prevents a partial-subsumption
8668 which makes bad code. Actually we could treat
8669 component_ref's of vars like vars. */
8671 tree lhs = TREE_OPERAND (exp, 0);
8672 tree rhs = TREE_OPERAND (exp, 1);
8674 temp = 0;
8676 /* Check for |= or &= of a bitfield of size one into another bitfield
8677 of size 1. In this case, (unless we need the result of the
8678 assignment) we can do this more efficiently with a
8679 test followed by an assignment, if necessary.
8681 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8682 things change so we do, this code should be enhanced to
8683 support it. */
8684 if (ignore
8685 && TREE_CODE (lhs) == COMPONENT_REF
8686 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8687 || TREE_CODE (rhs) == BIT_AND_EXPR)
8688 && TREE_OPERAND (rhs, 0) == lhs
8689 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8690 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8691 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8693 rtx label = gen_label_rtx ();
8695 do_jump (TREE_OPERAND (rhs, 1),
8696 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8697 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8698 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8699 (TREE_CODE (rhs) == BIT_IOR_EXPR
8700 ? integer_one_node
8701 : integer_zero_node)),
8703 do_pending_stack_adjust ();
8704 emit_label (label);
8705 return const0_rtx;
8708 temp = expand_assignment (lhs, rhs, ! ignore);
8710 return temp;
8713 case RETURN_EXPR:
8714 if (!TREE_OPERAND (exp, 0))
8715 expand_null_return ();
8716 else
8717 expand_return (TREE_OPERAND (exp, 0));
8718 return const0_rtx;
8720 case PREINCREMENT_EXPR:
8721 case PREDECREMENT_EXPR:
8722 return expand_increment (exp, 0, ignore);
8724 case POSTINCREMENT_EXPR:
8725 case POSTDECREMENT_EXPR:
8726 /* Faster to treat as pre-increment if result is not used. */
8727 return expand_increment (exp, ! ignore, ignore);
8729 case ADDR_EXPR:
8730 if (modifier == EXPAND_STACK_PARM)
8731 target = 0;
8732 /* Are we taking the address of a nested function? */
8733 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8734 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8735 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8736 && ! TREE_STATIC (exp))
8738 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8739 op0 = force_operand (op0, target);
8741 /* If we are taking the address of something erroneous, just
8742 return a zero. */
8743 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8744 return const0_rtx;
8745 /* If we are taking the address of a constant and are at the
8746 top level, we have to use output_constant_def since we can't
8747 call force_const_mem at top level. */
8748 else if (cfun == 0
8749 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8750 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
8751 == 'c')))
8752 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
8753 else
8755 /* We make sure to pass const0_rtx down if we came in with
8756 ignore set, to avoid doing the cleanups twice for something. */
8757 op0 = expand_expr (TREE_OPERAND (exp, 0),
8758 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8759 (modifier == EXPAND_INITIALIZER
8760 ? modifier : EXPAND_CONST_ADDRESS));
8762 /* If we are going to ignore the result, OP0 will have been set
8763 to const0_rtx, so just return it. Don't get confused and
8764 think we are taking the address of the constant. */
8765 if (ignore)
8766 return op0;
8768 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8769 clever and returns a REG when given a MEM. */
8770 op0 = protect_from_queue (op0, 1);
8772 /* We would like the object in memory. If it is a constant, we can
8773 have it be statically allocated into memory. For a non-constant,
8774 we need to allocate some memory and store the value into it. */
8776 if (CONSTANT_P (op0))
8777 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8778 op0);
8779 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8780 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8781 || GET_CODE (op0) == PARALLEL || GET_CODE (op0) == LO_SUM)
8783 /* If the operand is a SAVE_EXPR, we can deal with this by
8784 forcing the SAVE_EXPR into memory. */
8785 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
8787 put_var_into_stack (TREE_OPERAND (exp, 0),
8788 /*rescan=*/true);
8789 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
8791 else
8793 /* If this object is in a register, it can't be BLKmode. */
8794 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8795 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8797 if (GET_CODE (op0) == PARALLEL)
8798 /* Handle calls that pass values in multiple
8799 non-contiguous locations. The Irix 6 ABI has examples
8800 of this. */
8801 emit_group_store (memloc, op0, inner_type,
8802 int_size_in_bytes (inner_type));
8803 else
8804 emit_move_insn (memloc, op0);
8806 op0 = memloc;
8810 if (GET_CODE (op0) != MEM)
8811 abort ();
8813 mark_temp_addr_taken (op0);
8814 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8816 op0 = XEXP (op0, 0);
8817 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
8818 op0 = convert_memory_address (ptr_mode, op0);
8819 return op0;
8822 /* If OP0 is not aligned as least as much as the type requires, we
8823 need to make a temporary, copy OP0 to it, and take the address of
8824 the temporary. We want to use the alignment of the type, not of
8825 the operand. Note that this is incorrect for FUNCTION_TYPE, but
8826 the test for BLKmode means that can't happen. The test for
8827 BLKmode is because we never make mis-aligned MEMs with
8828 non-BLKmode.
8830 We don't need to do this at all if the machine doesn't have
8831 strict alignment. */
8832 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
8833 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
8834 > MEM_ALIGN (op0))
8835 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
8837 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8838 rtx new;
8840 if (TYPE_ALIGN_OK (inner_type))
8841 abort ();
8843 if (TREE_ADDRESSABLE (inner_type))
8845 /* We can't make a bitwise copy of this object, so fail. */
8846 error ("cannot take the address of an unaligned member");
8847 return const0_rtx;
8850 new = assign_stack_temp_for_type
8851 (TYPE_MODE (inner_type),
8852 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
8853 : int_size_in_bytes (inner_type),
8854 1, build_qualified_type (inner_type,
8855 (TYPE_QUALS (inner_type)
8856 | TYPE_QUAL_CONST)));
8858 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
8859 (modifier == EXPAND_STACK_PARM
8860 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
8862 op0 = new;
8865 op0 = force_operand (XEXP (op0, 0), target);
8868 if (flag_force_addr
8869 && GET_CODE (op0) != REG
8870 && modifier != EXPAND_CONST_ADDRESS
8871 && modifier != EXPAND_INITIALIZER
8872 && modifier != EXPAND_SUM)
8873 op0 = force_reg (Pmode, op0);
8875 if (GET_CODE (op0) == REG
8876 && ! REG_USERVAR_P (op0))
8877 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8879 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
8880 op0 = convert_memory_address (ptr_mode, op0);
8882 return op0;
8884 case ENTRY_VALUE_EXPR:
8885 abort ();
8887 /* COMPLEX type for Extended Pascal & Fortran */
8888 case COMPLEX_EXPR:
8890 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8891 rtx insns;
8893 /* Get the rtx code of the operands. */
8894 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8895 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8897 if (! target)
8898 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8900 start_sequence ();
8902 /* Move the real (op0) and imaginary (op1) parts to their location. */
8903 emit_move_insn (gen_realpart (mode, target), op0);
8904 emit_move_insn (gen_imagpart (mode, target), op1);
8906 insns = get_insns ();
8907 end_sequence ();
8909 /* Complex construction should appear as a single unit. */
8910 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8911 each with a separate pseudo as destination.
8912 It's not correct for flow to treat them as a unit. */
8913 if (GET_CODE (target) != CONCAT)
8914 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8915 else
8916 emit_insn (insns);
8918 return target;
8921 case REALPART_EXPR:
8922 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8923 return gen_realpart (mode, op0);
8925 case IMAGPART_EXPR:
8926 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8927 return gen_imagpart (mode, op0);
8929 case CONJ_EXPR:
8931 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8932 rtx imag_t;
8933 rtx insns;
8935 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8937 if (! target)
8938 target = gen_reg_rtx (mode);
8940 start_sequence ();
8942 /* Store the realpart and the negated imagpart to target. */
8943 emit_move_insn (gen_realpart (partmode, target),
8944 gen_realpart (partmode, op0));
8946 imag_t = gen_imagpart (partmode, target);
8947 temp = expand_unop (partmode,
8948 ! unsignedp && flag_trapv
8949 && (GET_MODE_CLASS(partmode) == MODE_INT)
8950 ? negv_optab : neg_optab,
8951 gen_imagpart (partmode, op0), imag_t, 0);
8952 if (temp != imag_t)
8953 emit_move_insn (imag_t, temp);
8955 insns = get_insns ();
8956 end_sequence ();
8958 /* Conjugate should appear as a single unit
8959 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8960 each with a separate pseudo as destination.
8961 It's not correct for flow to treat them as a unit. */
8962 if (GET_CODE (target) != CONCAT)
8963 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8964 else
8965 emit_insn (insns);
8967 return target;
8970 case TRY_CATCH_EXPR:
8972 tree handler = TREE_OPERAND (exp, 1);
8974 expand_eh_region_start ();
8976 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8978 expand_eh_region_end_cleanup (handler);
8980 return op0;
8983 case TRY_FINALLY_EXPR:
8985 tree try_block = TREE_OPERAND (exp, 0);
8986 tree finally_block = TREE_OPERAND (exp, 1);
8988 if (!optimize || unsafe_for_reeval (finally_block) > 1)
8990 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
8991 is not sufficient, so we cannot expand the block twice.
8992 So we play games with GOTO_SUBROUTINE_EXPR to let us
8993 expand the thing only once. */
8994 /* When not optimizing, we go ahead with this form since
8995 (1) user breakpoints operate more predictably without
8996 code duplication, and
8997 (2) we're not running any of the global optimizers
8998 that would explode in time/space with the highly
8999 connected CFG created by the indirect branching. */
9001 rtx finally_label = gen_label_rtx ();
9002 rtx done_label = gen_label_rtx ();
9003 rtx return_link = gen_reg_rtx (Pmode);
9004 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
9005 (tree) finally_label, (tree) return_link);
9006 TREE_SIDE_EFFECTS (cleanup) = 1;
9008 /* Start a new binding layer that will keep track of all cleanup
9009 actions to be performed. */
9010 expand_start_bindings (2);
9011 target_temp_slot_level = temp_slot_level;
9013 expand_decl_cleanup (NULL_TREE, cleanup);
9014 op0 = expand_expr (try_block, target, tmode, modifier);
9016 preserve_temp_slots (op0);
9017 expand_end_bindings (NULL_TREE, 0, 0);
9018 emit_jump (done_label);
9019 emit_label (finally_label);
9020 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
9021 emit_indirect_jump (return_link);
9022 emit_label (done_label);
9024 else
9026 expand_start_bindings (2);
9027 target_temp_slot_level = temp_slot_level;
9029 expand_decl_cleanup (NULL_TREE, finally_block);
9030 op0 = expand_expr (try_block, target, tmode, modifier);
9032 preserve_temp_slots (op0);
9033 expand_end_bindings (NULL_TREE, 0, 0);
9036 return op0;
9039 case GOTO_SUBROUTINE_EXPR:
9041 rtx subr = (rtx) TREE_OPERAND (exp, 0);
9042 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
9043 rtx return_address = gen_label_rtx ();
9044 emit_move_insn (return_link,
9045 gen_rtx_LABEL_REF (Pmode, return_address));
9046 emit_jump (subr);
9047 emit_label (return_address);
9048 return const0_rtx;
9051 case VA_ARG_EXPR:
9052 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
9054 case EXC_PTR_EXPR:
9055 return get_exception_pointer (cfun);
9057 case FDESC_EXPR:
9058 /* Function descriptors are not valid except for as
9059 initialization constants, and should not be expanded. */
9060 abort ();
9062 default:
9063 return (*lang_hooks.expand_expr) (exp, original_target, tmode, modifier,
9064 alt_rtl);
9067 /* Here to do an ordinary binary operator, generating an instruction
9068 from the optab already placed in `this_optab'. */
9069 binop:
9070 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9071 subtarget, &op0, &op1, 0);
9072 binop2:
9073 if (modifier == EXPAND_STACK_PARM)
9074 target = 0;
9075 temp = expand_binop (mode, this_optab, op0, op1, target,
9076 unsignedp, OPTAB_LIB_WIDEN);
9077 if (temp == 0)
9078 abort ();
9079 return temp;
9082 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9083 when applied to the address of EXP produces an address known to be
9084 aligned more than BIGGEST_ALIGNMENT. */
9086 static int
9087 is_aligning_offset (tree offset, tree exp)
9089 /* Strip off any conversions and WITH_RECORD_EXPR nodes. */
9090 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9091 || TREE_CODE (offset) == NOP_EXPR
9092 || TREE_CODE (offset) == CONVERT_EXPR
9093 || TREE_CODE (offset) == WITH_RECORD_EXPR)
9094 offset = TREE_OPERAND (offset, 0);
9096 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9097 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9098 if (TREE_CODE (offset) != BIT_AND_EXPR
9099 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9100 || compare_tree_int (TREE_OPERAND (offset, 1),
9101 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
9102 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9103 return 0;
9105 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9106 It must be NEGATE_EXPR. Then strip any more conversions. */
9107 offset = TREE_OPERAND (offset, 0);
9108 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9109 || TREE_CODE (offset) == NOP_EXPR
9110 || TREE_CODE (offset) == CONVERT_EXPR)
9111 offset = TREE_OPERAND (offset, 0);
9113 if (TREE_CODE (offset) != NEGATE_EXPR)
9114 return 0;
9116 offset = TREE_OPERAND (offset, 0);
9117 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9118 || TREE_CODE (offset) == NOP_EXPR
9119 || TREE_CODE (offset) == CONVERT_EXPR)
9120 offset = TREE_OPERAND (offset, 0);
9122 /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
9123 whose type is the same as EXP. */
9124 return (TREE_CODE (offset) == ADDR_EXPR
9125 && (TREE_OPERAND (offset, 0) == exp
9126 || (TREE_CODE (TREE_OPERAND (offset, 0)) == PLACEHOLDER_EXPR
9127 && (TREE_TYPE (TREE_OPERAND (offset, 0))
9128 == TREE_TYPE (exp)))));
9131 /* Return the tree node if an ARG corresponds to a string constant or zero
9132 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9133 in bytes within the string that ARG is accessing. The type of the
9134 offset will be `sizetype'. */
9136 tree
9137 string_constant (tree arg, tree *ptr_offset)
9139 STRIP_NOPS (arg);
9141 if (TREE_CODE (arg) == ADDR_EXPR
9142 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9144 *ptr_offset = size_zero_node;
9145 return TREE_OPERAND (arg, 0);
9147 else if (TREE_CODE (arg) == PLUS_EXPR)
9149 tree arg0 = TREE_OPERAND (arg, 0);
9150 tree arg1 = TREE_OPERAND (arg, 1);
9152 STRIP_NOPS (arg0);
9153 STRIP_NOPS (arg1);
9155 if (TREE_CODE (arg0) == ADDR_EXPR
9156 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9158 *ptr_offset = convert (sizetype, arg1);
9159 return TREE_OPERAND (arg0, 0);
9161 else if (TREE_CODE (arg1) == ADDR_EXPR
9162 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9164 *ptr_offset = convert (sizetype, arg0);
9165 return TREE_OPERAND (arg1, 0);
9169 return 0;
9172 /* Expand code for a post- or pre- increment or decrement
9173 and return the RTX for the result.
9174 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9176 static rtx
9177 expand_increment (tree exp, int post, int ignore)
9179 rtx op0, op1;
9180 rtx temp, value;
9181 tree incremented = TREE_OPERAND (exp, 0);
9182 optab this_optab = add_optab;
9183 int icode;
9184 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9185 int op0_is_copy = 0;
9186 int single_insn = 0;
9187 /* 1 means we can't store into OP0 directly,
9188 because it is a subreg narrower than a word,
9189 and we don't dare clobber the rest of the word. */
9190 int bad_subreg = 0;
9192 /* Stabilize any component ref that might need to be
9193 evaluated more than once below. */
9194 if (!post
9195 || TREE_CODE (incremented) == BIT_FIELD_REF
9196 || (TREE_CODE (incremented) == COMPONENT_REF
9197 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9198 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9199 incremented = stabilize_reference (incremented);
9200 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9201 ones into save exprs so that they don't accidentally get evaluated
9202 more than once by the code below. */
9203 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9204 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9205 incremented = save_expr (incremented);
9207 /* Compute the operands as RTX.
9208 Note whether OP0 is the actual lvalue or a copy of it:
9209 I believe it is a copy iff it is a register or subreg
9210 and insns were generated in computing it. */
9212 temp = get_last_insn ();
9213 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9215 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9216 in place but instead must do sign- or zero-extension during assignment,
9217 so we copy it into a new register and let the code below use it as
9218 a copy.
9220 Note that we can safely modify this SUBREG since it is know not to be
9221 shared (it was made by the expand_expr call above). */
9223 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9225 if (post)
9226 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9227 else
9228 bad_subreg = 1;
9230 else if (GET_CODE (op0) == SUBREG
9231 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9233 /* We cannot increment this SUBREG in place. If we are
9234 post-incrementing, get a copy of the old value. Otherwise,
9235 just mark that we cannot increment in place. */
9236 if (post)
9237 op0 = copy_to_reg (op0);
9238 else
9239 bad_subreg = 1;
9242 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9243 && temp != get_last_insn ());
9244 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9246 /* Decide whether incrementing or decrementing. */
9247 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9248 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9249 this_optab = sub_optab;
9251 /* Convert decrement by a constant into a negative increment. */
9252 if (this_optab == sub_optab
9253 && GET_CODE (op1) == CONST_INT)
9255 op1 = GEN_INT (-INTVAL (op1));
9256 this_optab = add_optab;
9259 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9260 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9262 /* For a preincrement, see if we can do this with a single instruction. */
9263 if (!post)
9265 icode = (int) this_optab->handlers[(int) mode].insn_code;
9266 if (icode != (int) CODE_FOR_nothing
9267 /* Make sure that OP0 is valid for operands 0 and 1
9268 of the insn we want to queue. */
9269 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9270 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9271 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9272 single_insn = 1;
9275 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9276 then we cannot just increment OP0. We must therefore contrive to
9277 increment the original value. Then, for postincrement, we can return
9278 OP0 since it is a copy of the old value. For preincrement, expand here
9279 unless we can do it with a single insn.
9281 Likewise if storing directly into OP0 would clobber high bits
9282 we need to preserve (bad_subreg). */
9283 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9285 /* This is the easiest way to increment the value wherever it is.
9286 Problems with multiple evaluation of INCREMENTED are prevented
9287 because either (1) it is a component_ref or preincrement,
9288 in which case it was stabilized above, or (2) it is an array_ref
9289 with constant index in an array in a register, which is
9290 safe to reevaluate. */
9291 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9292 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9293 ? MINUS_EXPR : PLUS_EXPR),
9294 TREE_TYPE (exp),
9295 incremented,
9296 TREE_OPERAND (exp, 1));
9298 while (TREE_CODE (incremented) == NOP_EXPR
9299 || TREE_CODE (incremented) == CONVERT_EXPR)
9301 newexp = convert (TREE_TYPE (incremented), newexp);
9302 incremented = TREE_OPERAND (incremented, 0);
9305 temp = expand_assignment (incremented, newexp, ! post && ! ignore);
9306 return post ? op0 : temp;
9309 if (post)
9311 /* We have a true reference to the value in OP0.
9312 If there is an insn to add or subtract in this mode, queue it.
9313 Queuing the increment insn avoids the register shuffling
9314 that often results if we must increment now and first save
9315 the old value for subsequent use. */
9317 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9318 op0 = stabilize (op0);
9319 #endif
9321 icode = (int) this_optab->handlers[(int) mode].insn_code;
9322 if (icode != (int) CODE_FOR_nothing
9323 /* Make sure that OP0 is valid for operands 0 and 1
9324 of the insn we want to queue. */
9325 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9326 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9328 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9329 op1 = force_reg (mode, op1);
9331 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9333 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9335 rtx addr = (general_operand (XEXP (op0, 0), mode)
9336 ? force_reg (Pmode, XEXP (op0, 0))
9337 : copy_to_reg (XEXP (op0, 0)));
9338 rtx temp, result;
9340 op0 = replace_equiv_address (op0, addr);
9341 temp = force_reg (GET_MODE (op0), op0);
9342 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9343 op1 = force_reg (mode, op1);
9345 /* The increment queue is LIFO, thus we have to `queue'
9346 the instructions in reverse order. */
9347 enqueue_insn (op0, gen_move_insn (op0, temp));
9348 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9349 return result;
9353 /* Preincrement, or we can't increment with one simple insn. */
9354 if (post)
9355 /* Save a copy of the value before inc or dec, to return it later. */
9356 temp = value = copy_to_reg (op0);
9357 else
9358 /* Arrange to return the incremented value. */
9359 /* Copy the rtx because expand_binop will protect from the queue,
9360 and the results of that would be invalid for us to return
9361 if our caller does emit_queue before using our result. */
9362 temp = copy_rtx (value = op0);
9364 /* Increment however we can. */
9365 op1 = expand_binop (mode, this_optab, value, op1, op0,
9366 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9368 /* Make sure the value is stored into OP0. */
9369 if (op1 != op0)
9370 emit_move_insn (op0, op1);
9372 return temp;
9375 /* Generate code to calculate EXP using a store-flag instruction
9376 and return an rtx for the result. EXP is either a comparison
9377 or a TRUTH_NOT_EXPR whose operand is a comparison.
9379 If TARGET is nonzero, store the result there if convenient.
9381 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9382 cheap.
9384 Return zero if there is no suitable set-flag instruction
9385 available on this machine.
9387 Once expand_expr has been called on the arguments of the comparison,
9388 we are committed to doing the store flag, since it is not safe to
9389 re-evaluate the expression. We emit the store-flag insn by calling
9390 emit_store_flag, but only expand the arguments if we have a reason
9391 to believe that emit_store_flag will be successful. If we think that
9392 it will, but it isn't, we have to simulate the store-flag with a
9393 set/jump/set sequence. */
9395 static rtx
9396 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
9398 enum rtx_code code;
9399 tree arg0, arg1, type;
9400 tree tem;
9401 enum machine_mode operand_mode;
9402 int invert = 0;
9403 int unsignedp;
9404 rtx op0, op1;
9405 enum insn_code icode;
9406 rtx subtarget = target;
9407 rtx result, label;
9409 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9410 result at the end. We can't simply invert the test since it would
9411 have already been inverted if it were valid. This case occurs for
9412 some floating-point comparisons. */
9414 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9415 invert = 1, exp = TREE_OPERAND (exp, 0);
9417 arg0 = TREE_OPERAND (exp, 0);
9418 arg1 = TREE_OPERAND (exp, 1);
9420 /* Don't crash if the comparison was erroneous. */
9421 if (arg0 == error_mark_node || arg1 == error_mark_node)
9422 return const0_rtx;
9424 type = TREE_TYPE (arg0);
9425 operand_mode = TYPE_MODE (type);
9426 unsignedp = TREE_UNSIGNED (type);
9428 /* We won't bother with BLKmode store-flag operations because it would mean
9429 passing a lot of information to emit_store_flag. */
9430 if (operand_mode == BLKmode)
9431 return 0;
9433 /* We won't bother with store-flag operations involving function pointers
9434 when function pointers must be canonicalized before comparisons. */
9435 #ifdef HAVE_canonicalize_funcptr_for_compare
9436 if (HAVE_canonicalize_funcptr_for_compare
9437 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9438 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9439 == FUNCTION_TYPE))
9440 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9441 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9442 == FUNCTION_TYPE))))
9443 return 0;
9444 #endif
9446 STRIP_NOPS (arg0);
9447 STRIP_NOPS (arg1);
9449 /* Get the rtx comparison code to use. We know that EXP is a comparison
9450 operation of some type. Some comparisons against 1 and -1 can be
9451 converted to comparisons with zero. Do so here so that the tests
9452 below will be aware that we have a comparison with zero. These
9453 tests will not catch constants in the first operand, but constants
9454 are rarely passed as the first operand. */
9456 switch (TREE_CODE (exp))
9458 case EQ_EXPR:
9459 code = EQ;
9460 break;
9461 case NE_EXPR:
9462 code = NE;
9463 break;
9464 case LT_EXPR:
9465 if (integer_onep (arg1))
9466 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9467 else
9468 code = unsignedp ? LTU : LT;
9469 break;
9470 case LE_EXPR:
9471 if (! unsignedp && integer_all_onesp (arg1))
9472 arg1 = integer_zero_node, code = LT;
9473 else
9474 code = unsignedp ? LEU : LE;
9475 break;
9476 case GT_EXPR:
9477 if (! unsignedp && integer_all_onesp (arg1))
9478 arg1 = integer_zero_node, code = GE;
9479 else
9480 code = unsignedp ? GTU : GT;
9481 break;
9482 case GE_EXPR:
9483 if (integer_onep (arg1))
9484 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9485 else
9486 code = unsignedp ? GEU : GE;
9487 break;
9489 case UNORDERED_EXPR:
9490 code = UNORDERED;
9491 break;
9492 case ORDERED_EXPR:
9493 code = ORDERED;
9494 break;
9495 case UNLT_EXPR:
9496 code = UNLT;
9497 break;
9498 case UNLE_EXPR:
9499 code = UNLE;
9500 break;
9501 case UNGT_EXPR:
9502 code = UNGT;
9503 break;
9504 case UNGE_EXPR:
9505 code = UNGE;
9506 break;
9507 case UNEQ_EXPR:
9508 code = UNEQ;
9509 break;
9511 default:
9512 abort ();
9515 /* Put a constant second. */
9516 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9518 tem = arg0; arg0 = arg1; arg1 = tem;
9519 code = swap_condition (code);
9522 /* If this is an equality or inequality test of a single bit, we can
9523 do this by shifting the bit being tested to the low-order bit and
9524 masking the result with the constant 1. If the condition was EQ,
9525 we xor it with 1. This does not require an scc insn and is faster
9526 than an scc insn even if we have it.
9528 The code to make this transformation was moved into fold_single_bit_test,
9529 so we just call into the folder and expand its result. */
9531 if ((code == NE || code == EQ)
9532 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9533 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9535 tree type = (*lang_hooks.types.type_for_mode) (mode, unsignedp);
9536 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
9537 arg0, arg1, type),
9538 target, VOIDmode, EXPAND_NORMAL);
9541 /* Now see if we are likely to be able to do this. Return if not. */
9542 if (! can_compare_p (code, operand_mode, ccp_store_flag))
9543 return 0;
9545 icode = setcc_gen_code[(int) code];
9546 if (icode == CODE_FOR_nothing
9547 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
9549 /* We can only do this if it is one of the special cases that
9550 can be handled without an scc insn. */
9551 if ((code == LT && integer_zerop (arg1))
9552 || (! only_cheap && code == GE && integer_zerop (arg1)))
9554 else if (BRANCH_COST >= 0
9555 && ! only_cheap && (code == NE || code == EQ)
9556 && TREE_CODE (type) != REAL_TYPE
9557 && ((abs_optab->handlers[(int) operand_mode].insn_code
9558 != CODE_FOR_nothing)
9559 || (ffs_optab->handlers[(int) operand_mode].insn_code
9560 != CODE_FOR_nothing)))
9562 else
9563 return 0;
9566 if (! get_subtarget (target)
9567 || GET_MODE (subtarget) != operand_mode)
9568 subtarget = 0;
9570 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
9572 if (target == 0)
9573 target = gen_reg_rtx (mode);
9575 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
9576 because, if the emit_store_flag does anything it will succeed and
9577 OP0 and OP1 will not be used subsequently. */
9579 result = emit_store_flag (target, code,
9580 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
9581 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
9582 operand_mode, unsignedp, 1);
9584 if (result)
9586 if (invert)
9587 result = expand_binop (mode, xor_optab, result, const1_rtx,
9588 result, 0, OPTAB_LIB_WIDEN);
9589 return result;
9592 /* If this failed, we have to do this with set/compare/jump/set code. */
9593 if (GET_CODE (target) != REG
9594 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9595 target = gen_reg_rtx (GET_MODE (target));
9597 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9598 result = compare_from_rtx (op0, op1, code, unsignedp,
9599 operand_mode, NULL_RTX);
9600 if (GET_CODE (result) == CONST_INT)
9601 return (((result == const0_rtx && ! invert)
9602 || (result != const0_rtx && invert))
9603 ? const0_rtx : const1_rtx);
9605 /* The code of RESULT may not match CODE if compare_from_rtx
9606 decided to swap its operands and reverse the original code.
9608 We know that compare_from_rtx returns either a CONST_INT or
9609 a new comparison code, so it is safe to just extract the
9610 code from RESULT. */
9611 code = GET_CODE (result);
9613 label = gen_label_rtx ();
9614 if (bcc_gen_fctn[(int) code] == 0)
9615 abort ();
9617 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
9618 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9619 emit_label (label);
9621 return target;
9625 /* Stubs in case we haven't got a casesi insn. */
9626 #ifndef HAVE_casesi
9627 # define HAVE_casesi 0
9628 # define gen_casesi(a, b, c, d, e) (0)
9629 # define CODE_FOR_casesi CODE_FOR_nothing
9630 #endif
9632 /* If the machine does not have a case insn that compares the bounds,
9633 this means extra overhead for dispatch tables, which raises the
9634 threshold for using them. */
9635 #ifndef CASE_VALUES_THRESHOLD
9636 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9637 #endif /* CASE_VALUES_THRESHOLD */
9639 unsigned int
9640 case_values_threshold (void)
9642 return CASE_VALUES_THRESHOLD;
9645 /* Attempt to generate a casesi instruction. Returns 1 if successful,
9646 0 otherwise (i.e. if there is no casesi instruction). */
9648 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
9649 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
9651 enum machine_mode index_mode = SImode;
9652 int index_bits = GET_MODE_BITSIZE (index_mode);
9653 rtx op1, op2, index;
9654 enum machine_mode op_mode;
9656 if (! HAVE_casesi)
9657 return 0;
9659 /* Convert the index to SImode. */
9660 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
9662 enum machine_mode omode = TYPE_MODE (index_type);
9663 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
9665 /* We must handle the endpoints in the original mode. */
9666 index_expr = build (MINUS_EXPR, index_type,
9667 index_expr, minval);
9668 minval = integer_zero_node;
9669 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9670 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
9671 omode, 1, default_label);
9672 /* Now we can safely truncate. */
9673 index = convert_to_mode (index_mode, index, 0);
9675 else
9677 if (TYPE_MODE (index_type) != index_mode)
9679 index_expr = convert ((*lang_hooks.types.type_for_size)
9680 (index_bits, 0), index_expr);
9681 index_type = TREE_TYPE (index_expr);
9684 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9686 emit_queue ();
9687 index = protect_from_queue (index, 0);
9688 do_pending_stack_adjust ();
9690 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
9691 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
9692 (index, op_mode))
9693 index = copy_to_mode_reg (op_mode, index);
9695 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
9697 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
9698 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
9699 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
9700 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
9701 (op1, op_mode))
9702 op1 = copy_to_mode_reg (op_mode, op1);
9704 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
9706 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
9707 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
9708 op2, TREE_UNSIGNED (TREE_TYPE (range)));
9709 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
9710 (op2, op_mode))
9711 op2 = copy_to_mode_reg (op_mode, op2);
9713 emit_jump_insn (gen_casesi (index, op1, op2,
9714 table_label, default_label));
9715 return 1;
9718 /* Attempt to generate a tablejump instruction; same concept. */
9719 #ifndef HAVE_tablejump
9720 #define HAVE_tablejump 0
9721 #define gen_tablejump(x, y) (0)
9722 #endif
9724 /* Subroutine of the next function.
9726 INDEX is the value being switched on, with the lowest value
9727 in the table already subtracted.
9728 MODE is its expected mode (needed if INDEX is constant).
9729 RANGE is the length of the jump table.
9730 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9732 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9733 index value is out of range. */
9735 static void
9736 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
9737 rtx default_label)
9739 rtx temp, vector;
9741 if (INTVAL (range) > cfun->max_jumptable_ents)
9742 cfun->max_jumptable_ents = INTVAL (range);
9744 /* Do an unsigned comparison (in the proper mode) between the index
9745 expression and the value which represents the length of the range.
9746 Since we just finished subtracting the lower bound of the range
9747 from the index expression, this comparison allows us to simultaneously
9748 check that the original index expression value is both greater than
9749 or equal to the minimum value of the range and less than or equal to
9750 the maximum value of the range. */
9752 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
9753 default_label);
9755 /* If index is in range, it must fit in Pmode.
9756 Convert to Pmode so we can index with it. */
9757 if (mode != Pmode)
9758 index = convert_to_mode (Pmode, index, 1);
9760 /* Don't let a MEM slip through, because then INDEX that comes
9761 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9762 and break_out_memory_refs will go to work on it and mess it up. */
9763 #ifdef PIC_CASE_VECTOR_ADDRESS
9764 if (flag_pic && GET_CODE (index) != REG)
9765 index = copy_to_mode_reg (Pmode, index);
9766 #endif
9768 /* If flag_force_addr were to affect this address
9769 it could interfere with the tricky assumptions made
9770 about addresses that contain label-refs,
9771 which may be valid only very near the tablejump itself. */
9772 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9773 GET_MODE_SIZE, because this indicates how large insns are. The other
9774 uses should all be Pmode, because they are addresses. This code
9775 could fail if addresses and insns are not the same size. */
9776 index = gen_rtx_PLUS (Pmode,
9777 gen_rtx_MULT (Pmode, index,
9778 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9779 gen_rtx_LABEL_REF (Pmode, table_label));
9780 #ifdef PIC_CASE_VECTOR_ADDRESS
9781 if (flag_pic)
9782 index = PIC_CASE_VECTOR_ADDRESS (index);
9783 else
9784 #endif
9785 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9786 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9787 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
9788 RTX_UNCHANGING_P (vector) = 1;
9789 MEM_NOTRAP_P (vector) = 1;
9790 convert_move (temp, vector, 0);
9792 emit_jump_insn (gen_tablejump (temp, table_label));
9794 /* If we are generating PIC code or if the table is PC-relative, the
9795 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9796 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9797 emit_barrier ();
9801 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
9802 rtx table_label, rtx default_label)
9804 rtx index;
9806 if (! HAVE_tablejump)
9807 return 0;
9809 index_expr = fold (build (MINUS_EXPR, index_type,
9810 convert (index_type, index_expr),
9811 convert (index_type, minval)));
9812 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9813 emit_queue ();
9814 index = protect_from_queue (index, 0);
9815 do_pending_stack_adjust ();
9817 do_tablejump (index, TYPE_MODE (index_type),
9818 convert_modes (TYPE_MODE (index_type),
9819 TYPE_MODE (TREE_TYPE (range)),
9820 expand_expr (range, NULL_RTX,
9821 VOIDmode, 0),
9822 TREE_UNSIGNED (TREE_TYPE (range))),
9823 table_label, default_label);
9824 return 1;
9827 /* Nonzero if the mode is a valid vector mode for this architecture.
9828 This returns nonzero even if there is no hardware support for the
9829 vector mode, but we can emulate with narrower modes. */
9832 vector_mode_valid_p (enum machine_mode mode)
9834 enum mode_class class = GET_MODE_CLASS (mode);
9835 enum machine_mode innermode;
9837 /* Doh! What's going on? */
9838 if (class != MODE_VECTOR_INT
9839 && class != MODE_VECTOR_FLOAT)
9840 return 0;
9842 /* Hardware support. Woo hoo! */
9843 if (VECTOR_MODE_SUPPORTED_P (mode))
9844 return 1;
9846 innermode = GET_MODE_INNER (mode);
9848 /* We should probably return 1 if requesting V4DI and we have no DI,
9849 but we have V2DI, but this is probably very unlikely. */
9851 /* If we have support for the inner mode, we can safely emulate it.
9852 We may not have V2DI, but me can emulate with a pair of DIs. */
9853 return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
9856 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
9857 static rtx
9858 const_vector_from_tree (tree exp)
9860 rtvec v;
9861 int units, i;
9862 tree link, elt;
9863 enum machine_mode inner, mode;
9865 mode = TYPE_MODE (TREE_TYPE (exp));
9867 if (is_zeros_p (exp))
9868 return CONST0_RTX (mode);
9870 units = GET_MODE_NUNITS (mode);
9871 inner = GET_MODE_INNER (mode);
9873 v = rtvec_alloc (units);
9875 link = TREE_VECTOR_CST_ELTS (exp);
9876 for (i = 0; link; link = TREE_CHAIN (link), ++i)
9878 elt = TREE_VALUE (link);
9880 if (TREE_CODE (elt) == REAL_CST)
9881 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
9882 inner);
9883 else
9884 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
9885 TREE_INT_CST_HIGH (elt),
9886 inner);
9889 /* Initialize remaining elements to 0. */
9890 for (; i < units; ++i)
9891 RTVEC_ELT (v, i) = CONST0_RTX (inner);
9893 return gen_rtx_raw_CONST_VECTOR (mode, v);
9896 #include "gt-expr.h"